如何使用Visualizer对象的onWaveFormDataCapture结果?

时间:2012-06-18 10:54:40

标签: android fft android-mediaplayer waveform visualizer

我是Android编程和音频可视化的新手。我想使用MediaPlayerVisualizer类创建一个简单的音频可视化工具。我的问题是,我不知道wave form data究竟是什么。我必须用它来可视化音频吗? 我正在使用下面的代码。问题是,它只会显示文件前10-12秒的音频;在那之后,我无法捕获更多数据!我哪里出错了?

public void attachVisualizer()
{
    Visualizer vis = new Visualizer(mPlayer.getAudioSessionId());

    vis.setCaptureSize(Visualizer.getCaptureSizeRange()[0]);
    vis.setDataCaptureListener(new Visualizer.OnDataCaptureListener() {
        public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate) {
            int sum = 0;

            for(int i = 0; i < bytes.length; i++) {
                sum += bytes[i];
            }

            if(sum > 8000) {
                // Do something which uses mPlayer.getCurrentPosition() in mathematics
            }
        }

        public void onFftDataCapture(Visualizer visualizer, byte[] fft, int samplingRate) {}
    }, Visualizer.getMaxCaptureRate() , true, false);

    vis.setEnabled(true);
}

修改
我想到的另一个问题是,如何记录给定音频片段中包含的时间长度?

1 个答案:

答案 0 :(得分:3)

我这样做:

visualizer = new Visualizer(0);
    visualizer.setEnabled(false);

    visualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[0]);
    visualizer.setDataCaptureListener(
            new Visualizer.OnDataCaptureListener() {

                public void onWaveFormDataCapture(Visualizer visualizer,
                        byte[] bytes, int samplingRate) {
                    eqview.setVSWaveForm(bytes);
                }

                public void onFftDataCapture(Visualizer visualizer,
                        byte[] bytes, int samplingRate) {
                    fftview.setVSFftData(bytes);
                }
            }, Visualizer.getMaxCaptureRate(), true, true);
    visualizer.setEnabled(true);

对于Visualizer视图,我在网上找到了这个代码,我没写过:

package app.util;

import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.view.View;

/***
* 
* 
* @author yokmama
* 
*/
public class VisualizerView extends View {

private byte[] mBytes;

private float[] mPoints;

private Rect mRect = new Rect();
//SharedPreferences prefs;
private Paint mForePaint = new Paint();

public VisualizerView(Context context, AttributeSet attrs) {
    super(context, attrs);
    //prefs = PreferenceManager.getDefaultSharedPreferences(context);
    init();
}

private void init() {
    mBytes = null;
    //int colorchosen = prefs.getInt("COLOR_PREFERENCE_KEY",
    //      Color.WHITE);
    mForePaint.setStrokeWidth(1);
    //mForePaint.setAntiAlias(true);
    mForePaint.setColor(Color.WHITE);
    //mForePaint.setMaskFilter(new BlurMaskFilter(1, Blur.INNER));

}

public void updateVisualizer(byte[] bytes) {
    mBytes = bytes;
    invalidate();
}

@Override
protected void onDraw(Canvas canvas) {
    super.onDraw(canvas);

    if (mBytes == null) {
        return;
    }

    if (mPoints == null || mPoints.length < mBytes.length * 4) {
        mPoints = new float[mBytes.length * 4];
    }

    mRect.set(0, 0, getWidth(), getHeight());

    for (int i = 0; i < mBytes.length - 1; i++) {
        mPoints[i * 4] = mRect.width() * i / (mBytes.length - 1);
        mPoints[i * 4 + 1] = mRect.height() / 2
                + ((byte) (mBytes[i] + 128)) * (mRect.height() / 2) / 128;
        mPoints[i * 4 + 2] = mRect.width() * (i + 1) / (mBytes.length - 1);
        mPoints[i * 4 + 3] = mRect.height() / 2
                + ((byte) (mBytes[i + 1] + 128)) * (mRect.height() / 2)
                / 128;
    }

    canvas.drawLines(mPoints, mForePaint);
    //canvas.drawPoints(mPoints, mForePaint);
}

}