为Android录音创建Visualiser

时间:2014-06-06 08:41:22

标签: android media-player android-canvas audio-recording

所以基本上我创建了一个带有可视化器的页面和一个用于录制音频的按钮。我希望可视化器能够运行我正在录制的音频。我想我需要获得录制的MaxAmplitude,然后动态绘制每个小时段的可视化图。有人可以帮我吗?或者如果有更好的方法可以做到这一点。目前我粘贴的代码,可视化器在录制后运行录制。

public class MainActivity extends Activity {


private static final float VISUALIZER_HEIGHT_DIP = 200f;

MediaPlayer mp;

private Visualizer mVisualizer;

private LinearLayout mLinearLayout;
private VisualizerView mVisualizerView;
private TextView mStatusTextView;
private File metapath;
private static String[] META_PATH;
private Button play;
private boolean mStartRecording;
private static final String LOG_TAG = "AudioRecordTest";
private static String mFileName = null;

private MediaRecorder mRecorder = null;

private MediaPlayer   mPlayer = null;

public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

mStatusTextView = (TextView)findViewById(R.id.recording);

if(Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED) 
        || Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED_READ_ONLY))
        metapath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC);

META_PATH = metapath.list(new WavFilter());

mLinearLayout = (LinearLayout)findViewById(R.id.subscreen);

play = (Button)findViewById(R.id.startandstop);

mStartRecording=true;
play.setOnClickListener(new View.OnClickListener() {

    @Override
    public void onClick(View v) {
        onRecord(mStartRecording);
        if (mStartRecording) {
            play.setText("Stop recording");
        } else {
            play.setText("Start recording");
        }
        mStartRecording = !mStartRecording;
    }
});

setupVisualizerFxAndUI();        
       mVisualizer.setEnabled(true);

mStatusTextView.setText("Playing audio...");
}


private void onRecord(boolean start) {
if (start) {
    startRecording();
} else {
    stopRecording();
}
}

private void startRecording() {
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setOutputFile(metapath.getPath()+"/myfile.3gp");
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);

try {
    mRecorder.prepare();
} catch (IOException e) {
    Log.e(LOG_TAG, "prepare() failed");
}

mRecorder.start();
}

private void stopRecording() {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
mPlayer = new MediaPlayer();
try {
    mPlayer.setDataSource(metapath.getPath()+"/myfile.3gp");
    mPlayer.prepare();
    mPlayer.start();
} catch (IOException e) {
    Log.e(LOG_TAG, "prepare() failed");
}
}

class WavFilter implements FilenameFilter {
public boolean accept(File dir, String name) {
        return (name.endsWith(".wav"));
}
}

private void setupVisualizerFxAndUI() {

mVisualizerView = new VisualizerView(this);
mVisualizerView.setLayoutParams(new ViewGroup.LayoutParams(
        600,
        (int)(VISUALIZER_HEIGHT_DIP * getResources().getDisplayMetrics().density)));

mLinearLayout.addView(mVisualizerView);
mLinearLayout.setGravity(Gravity.CENTER_HORIZONTAL);

mVisualizer = new Visualizer(0);
mVisualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[1]);
mVisualizer.setDataCaptureListener(new Visualizer.OnDataCaptureListener() {
    public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes,
            int samplingRate) {
        for(int i=0;i<bytes.length;i++)
            bytes[i]+=2;
        mVisualizerView.updateVisualizer(bytes);
    }

    public void onFftDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate) {}
}, Visualizer.getMaxCaptureRate() / 2, true, false); 
}

@Override
protected void onPause() {
super.onPause();

if (isFinishing() && mp != null) {
    mVisualizer.release();
    mp.release();
    mp = null;
}
}

class VisualizerView extends View {
private byte[] mBytes;
private float[] mPoints;
private Rect mRect = new Rect();

private Paint mForePaint = new Paint();

public VisualizerView(Context context) {
    super(context);
    init();
}

private void init() {
    mBytes = null;

    mForePaint.setStrokeWidth(6f);//set width of the moving wave
    mForePaint.setAntiAlias(true);
    mForePaint.setColor(Color.RED);//color of the moving wavd\e
}

public void updateVisualizer(byte[] bytes) {
    mBytes = bytes;
    invalidate();
}

@Override
protected void onDraw(Canvas canvas) {
    super.onDraw(canvas);

    if (mBytes == null) {
        return;
    }

    if (mPoints == null || mPoints.length < mBytes.length * 4) {
        mPoints = new float[mBytes.length * 4];
    }

    mRect.set(0, 0, getWidth(), getHeight());

    for (int i = 0; i < mBytes.length - 1; i++) {
        mPoints[i * 4] = mRect.width() * i / (mBytes.length - 1);
        mPoints[i * 4 + 1] = mRect.height() / 2
                + ((byte) (mBytes[i] + 128)) * (mRect.height() / 2) / 128;
        mPoints[i * 4 + 2] = mRect.width() * (i + 1) / (mBytes.length - 1);
        mPoints[i * 4 + 3] = mRect.height() / 2
                + ((byte) (mBytes[i + 1] + 128)) * (mRect.height() / 2) / 128;
    }
    canvas.drawColor(Color.BLACK);
    canvas.drawLines(mPoints, mForePaint);
}
}

}

0 个答案:

没有答案