我因为GC而在我的应用程序上遇到了麻烦,而且我没有太多经验来理解正在发生的事情。以下是发生了什么的详细情况。
我正在尝试通过应用STFT(quick explanation)来构建一个实时处理音频的应用程序。
基本上,我拿每个缓冲区(在我的情况下,最小缓冲区大小是1148字节),我应用一个窗口函数,我获得一个帧矩阵;然后在每帧上我应用FFT;最后,我可以根据频率和时刻在每帧上应用一些增益。然后我返回路径以获取缓冲区的修改版本。
由于我的采样频率为8000Hz,因此每个缓冲区都要在不到1148/8000 = 144ms的时间内进行处理。使用System.currentTimeMillis()
我评估每个缓冲区处理通常需要70到100毫秒,所以没关系。
但问题来自垃圾收集器:我的记忆似乎已经满了,你可以从下面的截图中看到;并且GC动作有时会使我的音频破裂。
问题在于我注意到两件事:
如果我不进行FFT和IFFT,但我只是将帧保留原样,则不会显示CG_CONCURRENT消息。那是因为FFT会产生大量数据(复数数组)
所有操作都在一个单独的线程中完成;所以我试图在应用程序启动后立即从DDMS角度手动创建CG。从下面的代码中可以看出,在onCreate方法中,应用程序只加载布局..但是当我生成CG时,我看到我的堆已经超过90%使用了!我查看了堆转储并生成了Leaks Suspect报告,并且大部分内存被类'android.content.res.Resources'和'android.graphics.Bitmap'占用..(这里是截图)
所以,你有什么建议吗?我觉得奇怪的是我的记忆已经在开始时使用了90%。而且我的堆也不能增加一点以满足我的需求
MainActivity.java
package com.example.fileoutjava;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.View;
public class MainActivity extends Activity {
static final int BUFFER_FACTOR = 1;
DataInputStream dis;
static final int FREQ = 8000;
static final int FRAME_LENGHT = 32;
static final int FRAME_SHIFT = 16;
boolean isMusicStopped = true;
AudioTrack at;
Thread playThread;
long time;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void playMusic(View v) {
if (at == null) {
Log.d("PLAY MUSIC", "LAUNCHING NEW PLAYER");
playThread = new Thread(musicPlayerThread);
playThread.start();
}
}
public void stopMusic(View v) {
isMusicStopped = true;
playThread = null;
}
Runnable musicPlayerThread = new Runnable() {
public void run() {
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
/* eg: 8000 bytes per second, 1000 bytes = 125 ms */
InputStream is = null;
DataInputStream dis = null;
try {
is = MainActivity.this.getApplicationContext().getAssets().open("test.wav");
} catch (IOException e) {
e.printStackTrace();
}
if (is!=null)
dis = new DataInputStream(is); //dis = new DataInputStream(new BufferedInputStream(is,bSize));
isMusicStopped = false;
int min_bSize = AudioTrack.getMinBufferSize(FREQ, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
int bSize = min_bSize*BUFFER_FACTOR;
STFT stft = new STFT(FRAME_SHIFT,FRAME_LENGHT,FREQ,bSize);
at = new AudioTrack(AudioManager.STREAM_MUSIC, FREQ, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bSize, AudioTrack.MODE_STREAM);
at.play();
int count = 0;
byte[] buffer = new byte[bSize];
time = System.currentTimeMillis();
try {
while (!isMusicStopped && (count = dis.read(buffer, 0, bSize)) >= 0) {
Log.d("TIME ELAPSED", ""+(System.currentTimeMillis()-time));
time = System.currentTimeMillis();
//Windowing
stft.frameBuffer(buffer);
//fourier transform and inverse
stft.fourierAnalysis();
// Overlapp-Add
stft.buildBuffer(buffer);
at.write(buffer, 0, count);
}
if (at != null) {
at.stop();
at.flush();
at.release();
at = null;
}
if (dis != null) {
dis.close();
dis = null;
}
if (is != null) {
is.close();
is = null;
}
if (stft != null) stft = null;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
};
private void stop() {
isMusicStopped = true;
playThread = null;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
protected void onPause() {
this.stop();
super.onPause();
}
@Override
protected void onDestroy() {
this.stop();
super.onDestroy();
}
}
STFT.java
package com.example.fileoutjava;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import android.util.Log;
import com.badlogic.gdx.audio.analysis.FFT;
public class STFT {
private int fs, fl; //frame shift and frame length in ms
private int n_fs, n_fl; //frame shift and length in samples
private int buf_len; //length of the buffer array (bytes)
private int data_len; //length of the buffer array (converted to short)
private int padded_data_len; //put 0 padding before and after the buffer short[] data
private float n_segs; //number of frames that can be taken from one buffer array
private float[][] stft_matrix;
private float[] window; //Hamming coefficient
private float norm_factor = 0;
private boolean search_norm_factor = true;
private FFT fft;
private int i,j,k; //index for loops
private ByteBuffer bb;
private float[] tmp_buf;
private float[] tmp_fft;
private float[] tmp_ifft;
public STFT(int frame_shift, int frame_length, int freq, int buf_len) {
fs = frame_shift;
fl = frame_length;
this.buf_len = buf_len;
this.data_len = buf_len/2;
//compute values from ms to samples
n_fs = (int) Math.floor(fs*freq/1000);
n_fl = (int) Math.floor(fl*freq/1000);
padded_data_len = 2*n_fl + data_len;
//create coefficients
window = hamming(n_fl);
tmp_buf = new float[padded_data_len];
bb = ByteBuffer.allocateDirect(2);
bb.order(ByteOrder.LITTLE_ENDIAN);
//compute how many frames can be extracted from the buffer
n_segs = 1 + (float) (Math.ceil((this.padded_data_len-n_fl)/n_fs));
//data matrix: size of frame (with padding from previous frame) * number of segments
stft_matrix = new float[n_fl][(int)n_segs];
Log.d("STFT STATS", "BufLen:"+this.buf_len+" // Flen:"+n_fl+" // FSh:"+n_fs+
" // Nsegs:"+n_segs);
//Initialize the FFT object
fft = new FFT(n_fl*2,freq);
//buffers for FFT data, with zero padding
tmp_fft= new float[n_fl*2];
tmp_ifft = new float[n_fl];
for (int i=0; i<n_fl*2; i++) {
tmp_fft[i] = 0;
tmp_ifft[i/2] = 0;
}
}
//frames the whole buffer into the stft matrix
public void frameBuffer(byte[] buf) {
//initialize tmp_buffer and add 0 padding
for (k=0; k<padded_data_len; k++)
tmp_buf[k] = 0;
//fill the short[] buffer converting from byte[] buffer
for (i=0; i<buf_len; i+=2) {
bb.position(0);
bb.put(buf[i]);
bb.put(buf[i+1]);
tmp_buf[n_fl+i/2] = (float) bb.getShort(0);
}
//frame the short[] buffer into the matrix using windowing
for (j=0; j<n_segs; j++) {
for (int i=0; i<n_fl; i++) {
stft_matrix[i][j] = tmp_buf[j*n_fs+i]*window[i];
//NORMALIZATION FACTOR RETRIEVAL: only the first time
if (search_norm_factor && (j*n_fs+i) == 512)
norm_factor+=window[i];
}
}
if (search_norm_factor)
norm_factor *= 1.2;
//retrieve the norm factor only the first time
search_norm_factor = false;
}
//sums all frames from STFT matrix into one buffer
public void buildBuffer(byte[] output) {
//initialize tmp_buffer and add 0 padding
for (k=0; k<padded_data_len; k++)
tmp_buf[k] = 0;
//Overlap-Add
for (j=0; j<n_segs; j++) {
for (i=0; i<n_fl; i++) {
tmp_buf[j*n_fs+i] += stft_matrix[i][j];
}
}
//convert from short[] to byte[] (with normalization)
for (i=0; i<buf_len; i+=2) {
bb.position(0);
bb.putShort( (short) (tmp_buf[n_fl+i/2]/norm_factor) );
output[i] = bb.get(0);
output[i+1] = bb.get(1);
}
}
//FFT and IFFT of the buffer
public void fourierAnalysis() {
for (j=0; j<n_segs;j++) {
for (i=0; i<n_fl; i++) {
tmp_fft[i] = stft_matrix[i][j];
}
fft.forward(tmp_fft);
//OPERATIONS ON THE SPECTRUM ?
fft.inverse(tmp_ifft);
for (int i=0; i<n_fl; i++) {
stft_matrix[i][j] = tmp_ifft[i];
}
}
}
//utility method for Hamming coefficients
private float[] hamming(int len){
float[] win = new float[len];
for (i=0; i<len; i++){
win[i] = (float) (0.54-0.46*Math.cos((2*Math.PI*i)/(len-1)));
}
return win;
}
}