我的应用程序在预期行为大约10秒后冻结,我很难确定原因。
应用程序应该从麦克风中采样音频,处理它以检测峰值频率并在屏幕上显示结果。我有MainActivity,其中定义了一个线程来更新视图。我有两个单独实现的线程,一个用于通过AudioRecord类录制音频,一个用于处理音频信号和检测峰值频率。记录和处理似乎有效,并产生预期的结果。但是,经过一段时间(确切的时间从一次运行到下一次运行),应用程序将冻结。然后,日志显示
的重复消息W / art:暂停所有线程:x.xxxms
我/艺术:背景粘性并发标记扫描GC释放...
GC删除的对象数量可能非常高(一种情况:62119(3MB)),我想知道我在哪里创建了这个数量的对象以及如何避免它?我怀疑音频缓冲区存在问题,但不知道如何最好地诊断它。
这里有相关代码:
MainActivity.java
public class MainActivity extends AppCompatActivity {
private static final String TAG = "StringTuner";
VerticalLineDrawingView drawView; // view to draw frequency indicator
TextView textView; // view for text output
private AudioRecorderThread recorder; // thread for recording audio
private ProcessingThread processor; // thread for processing audio signal
private Thread viewUpdater; // thread for updating views
@Override
public void onDestroy() {
super.onDestroy();
recorder.releaseAudioRecord();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.d(TAG, "Creating the main activity");
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
drawView = (VerticalLineDrawingView) this.findViewById(R.id.drawView_NoteIndicator);
textView = (TextView) this.findViewById(R.id.textView_TextOutput);
Log.d(TAG, "Setting up AudioRecord and processing thread");
recorder = new AudioRecorderThread();
recorder.start();
processor = new ProcessingThread(recorder);
processor.start();
// thread for updating view
if (viewUpdater == null){
viewUpdater = new Thread() {
public void run() {
try {
while (recorder != null && processor != null) {
runOnUiThread(new Runnable() {
public void run() {
if (processor != null){
textView.setText(String.valueOf(processor.getPeakFrequency()) + " Hz\n" + processor.getProcessingTime() + " ms");
drawView.reposition((int) (processor.getPosition() * drawView.getViewWidth()));
}
}
});
sleep(1);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
viewUpdater = null;
}
}
};
viewUpdater.start();
}
}
}
AudioRecorderThread.java
public class AudioRecorderThread extends Thread {
private AudioRecord audioRecord;
private boolean isRecording;
private int nSample;
private int frameByteSize;
private byte[] frame;
private byte[] buffer;
public AudioRecorderThread(){
int channel = AudioFormat.CHANNEL_IN_MONO; // channel configuration
int encoding = AudioFormat.ENCODING_PCM_16BIT; // audio encoding
int fS = getMinimumSampleRate(); // sample rate [Hz]
// get minimum buffer size for AudioRecord session
int minBuffer = AudioRecord.getMinBufferSize(fS, channel, encoding);
int src = MediaRecorder.AudioSource.MIC; // audio source
// instantiate AudioRecord session
audioRecord = new AudioRecord(src, fS, channel, encoding, minBuffer);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
throw new RuntimeException("AudioRecord session could not be initialized.");
}
nSample = getNextPowerOf2(fS); // frequency resolution below 1 Hz
frameByteSize = 2 * nSample; // 16bit -> 1 frame = 2* sample size
frame = new byte[frameByteSize];
buffer = new byte[frameByteSize]; // buffer for reading data
}
public AudioRecord getAudioRecord(){
return audioRecord;
}
public boolean isRecording(){
return this.isAlive() && isRecording;
}
public void startRecording(){
try{
audioRecord.startRecording();
isRecording = true;
} catch (Exception e) {
e.printStackTrace();
}
}
public void stopRecording(){
try{
audioRecord.stop();
isRecording = false;
} catch (Exception e) {
e.printStackTrace();
}
}
public void releaseAudioRecord(){
try{
audioRecord.release();
} catch (Exception e) {
e.printStackTrace();
}
}
public byte[] getFrame(){
audioRecord.read(buffer, 0, 800); // one sample = 2 bytes (16 bits)
System.arraycopy(frame,800,frame,0,800);
System.arraycopy(buffer,0,frame,frameByteSize-800,800);
return frame;
}
public int getFrameByteSize(){
return frameByteSize;
}
public double getFrequencySteps(){
return ((double) audioRecord.getSampleRate()) / ((double) nSample);
}
public void run() {
startRecording();
// fill buffer
audioRecord.read(buffer, 0, frameByteSize); // read 1 frame of data into buffer
System.arraycopy(buffer, 0, frame, 0, frameByteSize);
}
}
ProcessingThread.java
public class ProcessingThread extends Thread{
private AudioRecorderThread recorder;
private volatile Thread _thread;
private int bytesPerSample;
private long t_process;
private double f_peak;
private double position;
private double[] stringFrequenciesLog = new double[] {Math.log(82.4), Math.log(110), Math.log(146.8), Math.log(196), Math.log(246.9), Math.log(329.6)};
private double[] stringPositions = new double[] {2.0/9.0, 3.0/9.0, 4.0/9.0, 5.0/9.0, 6.0/9.0, 7.0/9.0};
public ProcessingThread(AudioRecorderThread recorder){
this.recorder = recorder;
AudioRecord audioRecord = recorder.getAudioRecord();
if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT){
bytesPerSample = 2;
}
else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT){
bytesPerSample = 1;
}
}
public void start() {
_thread = new Thread(this);
_thread.start();
}
public void stopDetection(){
_thread = null;
}
public void run() {
try {
double maxValue;
long t0;
byte[] frame;
short[] sample = new short[recorder.getFrameByteSize()/2];
double[] sampleAsDouble = new double[sample.length];
double[] magnitudes;
FastFourierTransform fft = new com.dsp.FastFourierTransform();
Thread thisThread = Thread.currentThread();
while (_thread == thisThread) {
t0 = System.nanoTime();
// read recorded audio data
frame = recorder.getFrame();
// processing
if (frame != null) {
// sound detected
sample = decodeSample(frame);
for (int j=0;j<sample.length;j++) {
sampleAsDouble[j] = (double)sample[j];
}
// todo: bandpass filter the audio signal
// todo: fft the audio signal
magnitudes = fft.getMagnitudes(sampleAsDouble);
// todo: identify peak frequency
maxValue = magnitudes[0];
f_peak = 0.0;
for (int i=1; i<magnitudes.length; i++) {
if (magnitudes[i]>maxValue) {
maxValue = magnitudes[i];
f_peak = (double) i * recorder.getFrequencySteps();
}
}
frequency2position(f_peak);
t_process = System.nanoTime() - t0;
}
else{
// no sound detected
f_peak = -1;
position = -5;
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
private short[] decodeSample(byte[] buffer) {
short[] sample;
if (bytesPerSample==2) {
sample = new short[buffer.length/2];
for (int i = 0; i < buffer.length; i += 2) {
sample[i/2] = (short) ((buffer[i]) | buffer[i + 1] << 8);
}
} else {
sample = new short[buffer.length];
for (int i = 0; i < buffer.length; i++) {
sample[i] = (short) buffer[i];
}
}
return sample;
}
private void frequency2position(double f){
// check f is in valid frequency range
double f_min = 30;
double f_max = 3000;
if (f<f_min) {
position = -1;
return;
}
if (f>f_max) {
position = -2;
return;
}
double f_log = Math.log(f);
// find closest string frequency
int closestIndex = 0;
double distance = Math.abs(f_log - stringFrequenciesLog[closestIndex]);
while ((closestIndex < stringFrequenciesLog.length-1) && (Math.abs(f_log - stringFrequenciesLog[closestIndex+1])) < distance) {
distance = Math.abs(f_log - stringFrequenciesLog[closestIndex+1]);
closestIndex++;
}
double alpha = f_log/stringFrequenciesLog[closestIndex];
position = alpha * stringPositions[closestIndex];
if (position<0) {
position = -3;
return;
}
if (position>1) {
position = -4;
}
}
public double getPosition(){
return position;
}
public double getPeakFrequency(){
return f_peak;
}
public long getProcessingTime(){
return t_process/1000000; // in ms
}
}
VerticalLineDrawingView.java
public class VerticalLineDrawingView extends android.support.v7.widget.AppCompatTextView {
private int position = -1;
private int screenHeight = 0;
private int screenWidth = 0;
private Paint mLinePaint = new Paint();
public VerticalLineDrawingView(final Context ct) {
super(ct);
init();
}
public VerticalLineDrawingView(final Context ct, final AttributeSet attrs) {
super(ct, attrs);
init();
}
public VerticalLineDrawingView(final Context ct, final AttributeSet attrs, final int defStyle) {
super(ct, attrs, defStyle);
init();
}
private void init() {
mLinePaint.setColor(ResourcesCompat.getColor(getResources(), R.color.greenPrimary, null));
mLinePaint.setStrokeWidth(5);
}
@Override
protected void dispatchDraw(Canvas canv) {
if (position>0) {
canv.drawLine(position, 0, position, screenHeight, mLinePaint);
getLayout().draw(canv);
}
}
@Override
protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
screenHeight = getMeasuredHeight();
screenWidth = getMeasuredWidth();
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
screenWidth = w;
screenHeight = h;
}
public void reposition(int newpos) {
position = newpos;
invalidate();
}
public int getViewWidth() {
return screenWidth;
}
public int getPosition() {
return position;
}
}
答案 0 :(得分:0)
你正在对mainthread进行许多操作,这就是它挂起的原因,以避免使用Asynctask。