用于Clap Detection的Musicg库不能正常工作

时间:2016-08-05 06:26:35

标签: android voice-recognition musicg

我正在使用musicg库进行拍手检测,但每当开始活动时,它都会显示拍手的连续检测而没有任何拍手。 我认为比特率或帧大小存在一些价值问题。 这是我的代码

RecorderThread.java

public class RecorderThread extends Thread {

private AudioRecord audioRecord;
private boolean isRecording;
private int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
private int sampleRate = 44100;
private int frameByteSize = 2048; // for 1024 fft size (16bit sample size)
byte[] buffer;

public RecorderThread() {
    int recBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfiguration, audioEncoding); // need to be larger than size of a frame
    audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, channelConfiguration, audioEncoding, recBufSize);
    buffer = new byte[frameByteSize];
}

public AudioRecord getAudioRecord() {
    return audioRecord;
}

public boolean isRecording() {
    return this.isAlive() && isRecording;
}

public void startRecording() {
    try {
        audioRecord.startRecording();
        isRecording = true;
    } catch (Exception e) {
        e.printStackTrace();
    }
}

public void stopRecording() {
    try {
        audioRecord.stop();
        isRecording = false;
    } catch (Exception e) {
        e.printStackTrace();
    }
}

public byte[] getFrameBytes() {
    audioRecord.read(buffer, 0, frameByteSize);

    // analyze sound
    int totalAbsValue = 0;
    short sample = 0;
    float averageAbsValue = 0.0f;

    for (int i = 0; i < frameByteSize; i += 2) {
        sample = (short) ((buffer[i]) | buffer[i + 1] << 8);
        totalAbsValue += Math.abs(sample);
    }
    averageAbsValue = totalAbsValue / frameByteSize / 2;

    //System.out.println(averageAbsValue);

    // no input
    if (averageAbsValue < 30) {
        return null;
    }

    return buffer;
}

public void run() {
    startRecording();
  }
}

DetectorThread.java

public class DetectorThread extends Thread {

private RecorderThread recorder;
private WaveHeader waveHeader;
private ClapApi clapApi;
private volatile Thread _thread;

private LinkedList<Boolean> clapResultList = new LinkedList<Boolean>();
private int numClaps;
private int totalClapsDetected = 0;
private int clapCheckLength = 3;
private int clapPassScore = 3;

public DetectorThread(RecorderThread recorder) {
    this.recorder = recorder;
    AudioRecord audioRecord = recorder.getAudioRecord();

    int bitsPerSample = 0;
    if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) {
        bitsPerSample = 16;
    } else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT) {
        bitsPerSample = 8;
    }

    int channel = 0;
    // whistle detection only supports mono channel
    //if (audioRecord.getChannelConfiguration() == AudioFormat.CHANNEL_CONFIGURATION_MONO) {
        channel = 1;
    //}

    waveHeader = new WaveHeader();
    waveHeader.setChannels(channel);
    waveHeader.setBitsPerSample(bitsPerSample);
    waveHeader.setSampleRate(audioRecord.getSampleRate());
    clapApi = new ClapApi(waveHeader);
}

private void initBuffer() {
    numClaps = 0;
    clapResultList.clear();

    // init the first frames
    for (int i = 0; i < clapCheckLength; i++) {
        clapResultList.add(false);
    }
    // end init the first frames
}

public void start() {
    _thread = new Thread(this);
    _thread.start();
}

public void stopDetection() {
    _thread = null;
}

public void run() {
    try {
        byte[] buffer;
        initBuffer();

        Thread thisThread = Thread.currentThread();
        while (_thread == thisThread) {
            // detect sound
            buffer = recorder.getFrameBytes();

            // audio analyst
            if (buffer != null) {
                // sound detected
                MainActivity.clapsValue = numClaps;

                // whistle detection
                //System.out.println("*Whistle:");
                boolean isClap = clapApi.isClap(buffer);
                if (clapResultList.getFirst()) {
                    numClaps--;
                }

                clapResultList.removeFirst();
                clapResultList.add(isClap);

                if (isClap) {
                    numClaps++;
                }
                //System.out.println("num:" + numWhistles);

                if (numClaps >= clapPassScore) {
                    // clear buffer
                    initBuffer();
                    totalClapsDetected++;
                }
                // end whistle detection
            } else {
                // no sound detected
                if (clapResultList.getFirst()) {
                    numClaps--;
                }
                clapResultList.removeFirst();
                clapResultList.add(false);

                MainActivity.clapsValue = numClaps;
            }
            // end audio analyst
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

public int getTotalClapsDetected() {
    return totalClapsDetected;
   }
}

MainActivity.java

public class MainActivity extends Activity {

public static final int DETECT_NONE = 0;
public static final int DETECT_CLAP = 1;
public static int selectedDetection = DETECT_NONE;

private DetectorThread detectorThread;
private RecorderThread recorderThread;
private Thread detectedTextThread;
public static int clapsValue = 0;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    startVoiceDetection();
}

@Override
protected void onPause() {
    super.onPause();
    stopVoiceDetection();
}

@Override
protected void onDestroy() {
    super.onDestroy();
    android.os.Process.killProcess(android.os.Process.myPid());
}

private void startVoiceDetection() {
    selectedDetection = DETECT_CLAP;
    recorderThread = new RecorderThread();
    recorderThread.start();
    detectorThread = new DetectorThread(recorderThread);
    detectorThread.start();
    goListeningView();
}

private void stopVoiceDetection() {
    if (recorderThread != null) {
        recorderThread.stopRecording();
        recorderThread = null;
    }
    if (detectorThread != null) {
        detectorThread.stopDetection();
        detectorThread = null;
    }
    selectedDetection = DETECT_NONE;
}

private void goListeningView() {
    if (detectedTextThread == null) {
        detectedTextThread = new Thread() {
            public void run() {
                try {
                    while (recorderThread != null && detectorThread != null) {
                        runOnUiThread(new Runnable() {
                            public void run() {
                                if (detectorThread != null) {
                                    Log.e("Clap", "Detected");
                                }
                            }
                        });
                        sleep(100);
                    }
                } catch (Exception e) {
                    e.printStackTrace();
                } finally {
                    detectedTextThread = null;
                }
            }
        };
        detectedTextThread.start();
    }
   }
}

1 个答案:

答案 0 :(得分:0)

变化 AudioFormat.CHANNEL_CONFIGURATION_MONO; 至 AudioFormat.CHANNEL_IN_MONO 我帮忙)