2016-08-05 18 views
1

Ich benutze musicg Bibliothek für Clap-Erkennung, aber wann immer beginnt Aktivität zeigt es kontinuierliche Erkennung von Clap ohne jeden Klatschen. Ich dachte, dass es ein Wertproblem mit der Bitrate oder Bildgröße gibt. Dies ist mein CodeMusicg Bibliothek für Clap Detection funktioniert nicht android

RecorderThread.java

public class RecorderThread extends Thread { 

private AudioRecord audioRecord; 
private boolean isRecording; 
private int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO; 
private int audioEncoding = AudioFormat.ENCODING_PCM_16BIT; 
private int sampleRate = 44100; 
private int frameByteSize = 2048; // for 1024 fft size (16bit sample size) 
byte[] buffer; 

public RecorderThread() { 
    int recBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfiguration, audioEncoding); // need to be larger than size of a frame 
    audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, channelConfiguration, audioEncoding, recBufSize); 
    buffer = new byte[frameByteSize]; 
} 

public AudioRecord getAudioRecord() { 
    return audioRecord; 
} 

public boolean isRecording() { 
    return this.isAlive() && isRecording; 
} 

public void startRecording() { 
    try { 
     audioRecord.startRecording(); 
     isRecording = true; 
    } catch (Exception e) { 
     e.printStackTrace(); 
    } 
} 

public void stopRecording() { 
    try { 
     audioRecord.stop(); 
     isRecording = false; 
    } catch (Exception e) { 
     e.printStackTrace(); 
    } 
} 

public byte[] getFrameBytes() { 
    audioRecord.read(buffer, 0, frameByteSize); 

    // analyze sound 
    int totalAbsValue = 0; 
    short sample = 0; 
    float averageAbsValue = 0.0f; 

    for (int i = 0; i < frameByteSize; i += 2) { 
     sample = (short) ((buffer[i]) | buffer[i + 1] << 8); 
     totalAbsValue += Math.abs(sample); 
    } 
    averageAbsValue = totalAbsValue/frameByteSize/2; 

    //System.out.println(averageAbsValue); 

    // no input 
    if (averageAbsValue < 30) { 
     return null; 
    } 

    return buffer; 
} 

public void run() { 
    startRecording(); 
    } 
} 

DetectorThread.java

public class DetectorThread extends Thread { 

private RecorderThread recorder; 
private WaveHeader waveHeader; 
private ClapApi clapApi; 
private volatile Thread _thread; 

private LinkedList<Boolean> clapResultList = new LinkedList<Boolean>(); 
private int numClaps; 
private int totalClapsDetected = 0; 
private int clapCheckLength = 3; 
private int clapPassScore = 3; 

public DetectorThread(RecorderThread recorder) { 
    this.recorder = recorder; 
    AudioRecord audioRecord = recorder.getAudioRecord(); 

    int bitsPerSample = 0; 
    if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) { 
     bitsPerSample = 16; 
    } else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT) { 
     bitsPerSample = 8; 
    } 

    int channel = 0; 
    // whistle detection only supports mono channel 
    //if (audioRecord.getChannelConfiguration() == AudioFormat.CHANNEL_CONFIGURATION_MONO) { 
     channel = 1; 
    //} 

    waveHeader = new WaveHeader(); 
    waveHeader.setChannels(channel); 
    waveHeader.setBitsPerSample(bitsPerSample); 
    waveHeader.setSampleRate(audioRecord.getSampleRate()); 
    clapApi = new ClapApi(waveHeader); 
} 

private void initBuffer() { 
    numClaps = 0; 
    clapResultList.clear(); 

    // init the first frames 
    for (int i = 0; i < clapCheckLength; i++) { 
     clapResultList.add(false); 
    } 
    // end init the first frames 
} 

public void start() { 
    _thread = new Thread(this); 
    _thread.start(); 
} 

public void stopDetection() { 
    _thread = null; 
} 

public void run() { 
    try { 
     byte[] buffer; 
     initBuffer(); 

     Thread thisThread = Thread.currentThread(); 
     while (_thread == thisThread) { 
      // detect sound 
      buffer = recorder.getFrameBytes(); 

      // audio analyst 
      if (buffer != null) { 
       // sound detected 
       MainActivity.clapsValue = numClaps; 

       // whistle detection 
       //System.out.println("*Whistle:"); 
       boolean isClap = clapApi.isClap(buffer); 
       if (clapResultList.getFirst()) { 
        numClaps--; 
       } 

       clapResultList.removeFirst(); 
       clapResultList.add(isClap); 

       if (isClap) { 
        numClaps++; 
       } 
       //System.out.println("num:" + numWhistles); 

       if (numClaps >= clapPassScore) { 
        // clear buffer 
        initBuffer(); 
        totalClapsDetected++; 
       } 
       // end whistle detection 
      } else { 
       // no sound detected 
       if (clapResultList.getFirst()) { 
        numClaps--; 
       } 
       clapResultList.removeFirst(); 
       clapResultList.add(false); 

       MainActivity.clapsValue = numClaps; 
      } 
      // end audio analyst 
     } 
    } catch (Exception e) { 
     e.printStackTrace(); 
    } 
} 

public int getTotalClapsDetected() { 
    return totalClapsDetected; 
    } 
} 

MainActivity.java

public class MainActivity extends Activity { 

public static final int DETECT_NONE = 0; 
public static final int DETECT_CLAP = 1; 
public static int selectedDetection = DETECT_NONE; 

private DetectorThread detectorThread; 
private RecorderThread recorderThread; 
private Thread detectedTextThread; 
public static int clapsValue = 0; 

@Override 
protected void onCreate(Bundle savedInstanceState) { 
    super.onCreate(savedInstanceState); 
    setContentView(R.layout.activity_main); 

    startVoiceDetection(); 
} 

@Override 
protected void onPause() { 
    super.onPause(); 
    stopVoiceDetection(); 
} 

@Override 
protected void onDestroy() { 
    super.onDestroy(); 
    android.os.Process.killProcess(android.os.Process.myPid()); 
} 

private void startVoiceDetection() { 
    selectedDetection = DETECT_CLAP; 
    recorderThread = new RecorderThread(); 
    recorderThread.start(); 
    detectorThread = new DetectorThread(recorderThread); 
    detectorThread.start(); 
    goListeningView(); 
} 

private void stopVoiceDetection() { 
    if (recorderThread != null) { 
     recorderThread.stopRecording(); 
     recorderThread = null; 
    } 
    if (detectorThread != null) { 
     detectorThread.stopDetection(); 
     detectorThread = null; 
    } 
    selectedDetection = DETECT_NONE; 
} 

private void goListeningView() { 
    if (detectedTextThread == null) { 
     detectedTextThread = new Thread() { 
      public void run() { 
       try { 
        while (recorderThread != null && detectorThread != null) { 
         runOnUiThread(new Runnable() { 
          public void run() { 
           if (detectorThread != null) { 
            Log.e("Clap", "Detected"); 
           } 
          } 
         }); 
         sleep(100); 
        } 
       } catch (Exception e) { 
        e.printStackTrace(); 
       } finally { 
        detectedTextThread = null; 
       } 
      } 
     }; 
     detectedTextThread.start(); 
    } 
    } 
} 
+0

Um dieses Problem zu debuggen Sie könnten die Pufferdaten in einer Datei zu speichern und diese Datei teilen oder in Audio-Editor hören. Höchstwahrscheinlich ist Audio irgendwo unterwegs beschädigt. –

+0

OK danke für dein wertvolles Feedback. Haben Sie eine andere Lösung dafür oder erhalten Sie die Veranstaltung, wenn Sie mir bitte mitteilen, dass ich sie benötigt habe. –

Antwort

0

Änderung AudioFormat.CHANNEL_CONFIGURATION_MONO; zu AudioFormat.CHANNEL_IN_MONO mir hilft)