Android - 如何获取过滤后的音频并保存为wav文件

时间:2015-12-22 18:09:38

标签: java android audio filter signal-processing

如何获取过滤后的音频并将其输出为wav文件?

我已经使用IIR带通滤波器过滤了音频,当我点击" Play"按钮,我听到音频被过滤但是当我去.wav文件时它是原始音频而不是过滤后的音频。

当前代码实现

package com.example.audio;


import ddf.minim.effects.*;
import ddf.minim.*;
import ddf.minim.analysis.*;
import ddf.minim.javasound.*;
import ddf.minim.javax.sound.sampled.*;
import ddf.minim.javax.sound.sampled.spi.*;
import ddf.minim.signals.*;
import ddf.minim.spi.*;
import ddf.minim.ugens.*;


import java.io.BufferedInputStream;

import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;

import com.varma.samples.audiorecorder.R;

import android.app.Activity;
import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.media.MediaScannerConnection;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.text.SpannableStringBuilder;
import android.text.style.RelativeSizeSpan;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;


public class RecorderActivity extends Activity {

    private static final int RECORDER_BPP = 16;
    private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
    private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder";
    private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
    private static final int RECORDER_SAMPLERATE = 44100;// 44100; //18000
    private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_STEREO; //AudioFormat.CHANNEL_IN_STEREO;
    private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
    private static final int PLAY_CHANNELS = AudioFormat.CHANNEL_OUT_STEREO; //AudioFormat.CHANNEL_OUT_STEREO;
    private static final int FREQUENCY_LEFT  = 2000; //Original:18000 (16 Dec)
    private static final int FREQUENCY_RIGHT  = 2000; //Original:18000 (16 Dec)
    private static final int AMPLITUDE_LEFT = 1;
    private static final int AMPLITUDE_RIGHT = 1;
    private static final int DURATION_SECOND = 10;
    private static final int SAMPLE_RATE = 44100;
    private static final float SWEEP_RANGE = 1000.0f;
    private static final int WRITE_NON_BLOCKING = 0;

    String store;
    private AudioRecord recorder = null;
    private int bufferSize = 0;
    private Thread recordingThread = null;
    private boolean isRecording = false;

    double time;
    float[] buffer1;
    float[] buffer2;
    byte[] byteBuffer1;
    byte[] byteBuffer2;
    byte[] byteBufferFinal;
    int bufferIndex;
    short x;
    short y;
    AudioTrack audioTrack;

    Button btnPlay, btnStart, btnStop;

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.main);

        setButtonHandlers();
        enableButtons(false);

        btnPlay = (Button) findViewById(R.id.btnPlay);
        btnStop = (Button) findViewById(R.id.btnStop);
        btnStart = (Button) findViewById(R.id.btnStart);

        bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);

        buffer1 = new float[(int) (DURATION_SECOND * SAMPLE_RATE)];
        buffer2 = new float[(int) (DURATION_SECOND * SAMPLE_RATE)];

        float f1 = 0.0f, f2 = 0.0f;

        for (int sample = 0, step = 0; sample < buffer1.length; sample++) {
            time = sample / (SAMPLE_RATE * 1.0);
            //f1 = (float)(FREQUENCY_LEFT + ((sample / (buffer1.length * 1.0)) * SWEEP_RANGE)); // frequency sweep
            //f2 = (float)(FREQUENCY_RIGHT + ((sample / (buffer1.length * 1.0)) * SWEEP_RANGE)); // frequency sweep
            f1 = FREQUENCY_LEFT; // static frequency
            f2 = FREQUENCY_RIGHT; // static frequency
            buffer1[sample] = (float) (AMPLITUDE_LEFT * Math.sin(2 * Math.PI * f1 * time));
            buffer2[sample] = (float) (AMPLITUDE_RIGHT * Math.sin(2 * Math.PI * f2 * time));
        }

        byteBuffer1 = new byte[buffer1.length * 2]; // two bytes per audio
                                                    // frame, 16 bits

        for (int i = 0, bufferIndex = 0; i < byteBuffer1.length; i++) {
            x = (short) (buffer1[bufferIndex++] * 32767.0); // [2^16 - 1]/2 =
                                                            // 32767.0
            byteBuffer1[i] = (byte) x; // low byte
            byteBuffer1[++i] = (byte) (x >>> 8); // high byte
        }

        byteBuffer2 = new byte[buffer2.length * 2];

        for (int j = 0, bufferIndex = 0; j < byteBuffer2.length; j++) {
            y = (short) (buffer2[bufferIndex++] * 32767.0);
            byteBuffer2[j] = (byte) y; // low byte
            byteBuffer2[++j] = (byte) (y >>> 8); // high byte

        }

        byteBufferFinal = new byte[byteBuffer1.length * 2];
        // LL RR LL RR LL RR
        for (int k = 0, index = 0; index < byteBufferFinal.length - 4; k = k + 2) {
            byteBufferFinal[index] = byteBuffer1[k]; // LEFT
                                                        // {0,1/4,5/8,9/12,13;...}
            byteBufferFinal[index + 1] = byteBuffer1[k + 1];
            index = index + 2;

            byteBufferFinal[index] = byteBuffer2[k]; // RIGHT
                                                        // {2,3/6,7/10,11;...}
            byteBufferFinal[index + 1] = byteBuffer2[k + 1];
            index = index + 2;
        }



        try {
            FileOutputStream ss = new FileOutputStream(Environment.getExternalStorageDirectory().getPath() + "/" + AUDIO_RECORDER_FOLDER + "/source.txt");
            ss.write(byteBufferFinal);
            ss.flush();
            ss.close();
        }
        catch (IOException ioe){
            Log.e("IO Error","Write source error.");
        }
    }

    private void setButtonHandlers() {
        ((Button) findViewById(R.id.btnStart)).setOnClickListener(startClick);
        ((Button) findViewById(R.id.btnStop)).setOnClickListener(stopClick);
        ((Button) findViewById(R.id.btnPlay)).setOnClickListener(playClick);
    }

    private void enableButton(int id, boolean isEnable) {
        ((Button) findViewById(id)).setEnabled(isEnable);
    }

    private void enableButtons(boolean isRecording) {
        enableButton(R.id.btnStart, !isRecording);
        enableButton(R.id.btnStop, isRecording);
        enableButton(R.id.btnPlay, isRecording);
    }

    private String getFilename() {
        String filepath = Environment.getExternalStorageDirectory().getPath();
        File file = new File(filepath, AUDIO_RECORDER_FOLDER);

        if (!file.exists()) {
            file.mkdirs();
        }

        MediaScannerConnection.scanFile(this, new String[]{filepath}, null, null);

        store = file.getAbsolutePath() + "/" + "AudioOriginal"          
            + AUDIO_RECORDER_FILE_EXT_WAV;
        return store;
    }

    private String getTempFilename() {
        String filepath = Environment.getExternalStorageDirectory().getPath();
        File file = new File(filepath, AUDIO_RECORDER_FOLDER);

        if (!file.exists()) {
            file.mkdirs();
        }

        File tempFile = new File(filepath, AUDIO_RECORDER_TEMP_FILE);

        if (tempFile.exists())
            tempFile.delete();

        return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
    }


    private void startRecording() { 
        recorder = new AudioRecord(MediaRecorder.AudioSource.CAMCORDER,
                RECORDER_SAMPLERATE, RECORDER_CHANNELS,
                RECORDER_AUDIO_ENCODING, bufferSize);

        AudioManager am = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
        am.setStreamVolume(AudioManager.STREAM_MUSIC, am.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0);

        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
                (int) SAMPLE_RATE, PLAY_CHANNELS,
                AudioFormat.ENCODING_PCM_16BIT, byteBufferFinal.length,
                AudioTrack.MODE_STATIC);

        audioTrack.write(byteBufferFinal, 0, byteBufferFinal.length);
        audioTrack.play();

        audioTrack.setPlaybackRate(RECORDER_SAMPLERATE);
        recorder.startRecording();

        isRecording = true;

        recordingThread = new Thread(new Runnable() {

            @Override
            public void run() {
                try {
                    writeAudioDataToFile();
                } catch (IOException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }
        }, "AudioRecorder Thread");

        recordingThread.start();
    }

    float[][] deinterleaveData(float[] samples, int numChannels) {
        // assert(samples.length() % numChannels == 0);
        int numFrames = samples.length / numChannels;

        float[][] result = new float[numChannels][];
        for (int ch = 0; ch < numChannels; ch++) {
            result[ch] = new float[numFrames];
            for (int i = 0; i < numFrames; i++) {
                result[ch][i] = samples[numChannels * i + ch];
            }
        }
        return result;
    }

    float[] interleaveData(float[][] data) {
          int numChannels = data.length;
          int numFrames   = data[0].length;

          float[] result = new float[numFrames];
          for (int i = 0; i < numFrames; i++) {
            for (int ch = 0; ch < numChannels; ch++) {
              result[numChannels * i + ch] = data[ch][i];
            }
          }
          return result;
        }


    private int[] byteToShort(byte[] rawdata) {
      int[] converted = new int[rawdata.length / 2];

      for (int i = 0; i < converted.length; i++) {
        // Wave file data are stored in little-endian order
        int lo = rawdata[2*i];
        int hi = rawdata[2*i+1];
        converted[i] = ((hi&0xFF)<<8) | (lo&0xFF);
      }
      return converted;
    }

    private float[] byteToFloat(byte[] audio) {
      return shortToFloat(byteToShort(audio));
    }

    /**
    * Convert int[] audio to 32 bit float format.
    * From [-32768,32768] to [-1,1] 
    * @param audio
    */
    private float[] shortToFloat(int[] audio) {
        Log.d("SHORTTOFLOAT","INSIDE SHORTTOFLOAT");
        float[] converted = new float[audio.length];

        for (int i = 0; i < converted.length; i++) {
            // [-32768,32768] -> [-1,1]
            converted[i] = audio[i] / 32768f; /* default range for Android PCM audio buffers) */
         System.out.println("Converted is " + converted[i]);
        }

        return converted;
    }

    private void writeAudioDataToFile() throws IOException {
        int read = 0;
        byte data[] = new byte[bufferSize];
        String filename = getTempFilename();
        FileOutputStream os = null;
        FileOutputStream rs = null;
        try {
            os = new FileOutputStream(filename);
            rs = new FileOutputStream(getFilename().split(".wav")[0] + ".txt");
        } catch (FileNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        if (null != os) {

           //BandPass bandpass = new BandPass(19000,2000,44100);
            while (isRecording) {
    // decode and deinterleave stereo 16-bit per sample data
            //  float[][] signals = deinterleaveData(byteToFloat(data), 2);

    // filter data samples, updating the buffers with the filtered samples.
            //  bandpass.process(signals[0], signals[1]);

                  read = recorder.read(data, 0, bufferSize);

                if (AudioRecord.ERROR_INVALID_OPERATION != read) {
                    try {
                        os.write(data);
                        rs.write(data);
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }

            try {
                os.close();
                rs.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }

    private void stopRecording() {
        if (null != recorder) {
            isRecording = false;

            audioTrack.flush();
            audioTrack.stop();
            audioTrack.release();

            recorder.stop();
            recorder.release();

            recorder = null;
            recordingThread = null;
        }

        copyWaveFile(getTempFilename(), getFilename());
        deleteTempFile();
        MediaScannerConnection.scanFile(this, new String[]{getFilename()}, null, null);

        AudioManager am = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
        am.setStreamVolume(AudioManager.STREAM_MUSIC, 0, 0);
    }

    private void deleteTempFile() {
        File file = new File(getTempFilename());
        file.delete();
    }

    private void copyWaveFile(String inFilename, String outFilename) {

        FileInputStream in = null;
        FileOutputStream out = null;
        long totalAudioLen = 0;
        long totalDataLen = totalAudioLen + 36;
        long longSampleRate = RECORDER_SAMPLERATE;
        int channels = 2;
        long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels / 8;
        byte[] data = new byte[bufferSize];


        try {
            in = new FileInputStream(inFilename);
            out = new FileOutputStream(outFilename);
            totalAudioLen = in.getChannel().size();
            totalDataLen = totalAudioLen + 36;

            WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
                    longSampleRate, channels, byteRate);

            while (in.read(data) != -1) {
                out.write(data);
            }

            in.close();
            out.close();
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    private void playWaveFile() {
        String filepath = store;

        // define the buffer size for audio track
        int minBufferSize = AudioTrack.getMinBufferSize(8000,
                AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
        int bufferSize = 512;

        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
                (int) RECORDER_SAMPLERATE, AudioFormat.CHANNEL_OUT_STEREO,
                AudioFormat.ENCODING_PCM_16BIT, minBufferSize,
                AudioTrack.MODE_STREAM);

        int count = 0;
        byte[] data = new byte[bufferSize];
        try {
            FileInputStream fileInputStream = new FileInputStream(filepath);
            DataInputStream dataInputStream = new DataInputStream(
                    fileInputStream);

            audioTrack.play();

            //BandPass bandpass = new BandPass(19000,2000,44100);
            BandPass bandpass = new BandPass(550,20,44100);
            while ((count = dataInputStream.read(data, 0, bufferSize)) > -1)    
            {
            // decode and deinterleave stereo 16-bit per sample data
            // float[][] signals = deinterleaveData(byteToFloat(data), 2);

// filter data samples, updating the buffers with the filtered samples.
                //  bandpass.process(signals[0], signals[1]);

            // recombine signals for playback
            //audioTrack.write(interleaveData(signals), 0, count, WRITE_NON_BLOCKING);
            // audioTrack.write(data, 0, count);            
            }

            audioTrack.stop();
            audioTrack.release();
            dataInputStream.close();
            fileInputStream.close();

        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    private void WriteWaveFileHeader(FileOutputStream out, long totalAudioLen,
            long totalDataLen, long longSampleRate, int channels, long byteRate)
            throws IOException {

        byte[] header = new byte[44];

        header[0] = 'R'; // RIFF/WAVE header
        header[1] = 'I';
        header[2] = 'F';
        header[3] = 'F';
        header[4] = (byte) (totalDataLen & 0xff);
        header[5] = (byte) ((totalDataLen >> 8) & 0xff);
        header[6] = (byte) ((totalDataLen >> 16) & 0xff);
        header[7] = (byte) ((totalDataLen >> 24) & 0xff);
        header[8] = 'W';
        header[9] = 'A';
        header[10] = 'V';
        header[11] = 'E';
        header[12] = 'f'; // 'fmt ' chunk
        header[13] = 'm';
        header[14] = 't';
        header[15] = ' ';
        header[16] = 16; // 4 bytes: size of 'fmt ' chunk
        header[17] = 0;
        header[18] = 0;
        header[19] = 0;
        header[20] = 1; // format = 1
        header[21] = 0;
        header[22] = (byte) channels;
        header[23] = 0;
        header[24] = (byte) (longSampleRate & 0xff);
        header[25] = (byte) ((longSampleRate >> 8) & 0xff);
        header[26] = (byte) ((longSampleRate >> 16) & 0xff);
        header[27] = (byte) ((longSampleRate >> 24) & 0xff);
        header[28] = (byte) (byteRate & 0xff);
        header[29] = (byte) ((byteRate >> 8) & 0xff);
        header[30] = (byte) ((byteRate >> 16) & 0xff);
        header[31] = (byte) ((byteRate >> 24) & 0xff);
        header[32] = (byte) (2 * 16 / 8); // block align
        header[33] = 0;
        header[34] = RECORDER_BPP; // bits per sample
        header[35] = 0;
        header[36] = 'd';
        header[37] = 'a';
        header[38] = 't';
        header[39] = 'a';
        header[40] = (byte) (totalAudioLen & 0xff);
        header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
        header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
        header[43] = (byte) ((totalAudioLen >> 24) & 0xff);

        out.write(header, 0, 44);
    }

    private View.OnClickListener startClick = new View.OnClickListener() {

        @Override
        public void onClick(View v) {
            // TODO Auto-generated method stub
            Thread recordThread = new Thread(new Runnable() {

                @Override
                public void run() {
                    isRecording = true;
                    startRecording();
                }

            });

            recordThread.start();
            btnStart.setEnabled(false);
            btnStop.setEnabled(true);
            btnPlay.setEnabled(false);

        }
    };

    private View.OnClickListener stopClick = new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            new Handler().postDelayed(new Runnable() {
                @Override
                public void run() {
                    // TODO Auto-generated method stub
                    stopRecording();
                    enableButtons(false);
                    btnPlay.setEnabled(true);
                    // stop();
                }

            }, 100);

        }
    };

    private View.OnClickListener playClick = new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            // TODO Auto-generated method stub
            playWaveFile();
            btnPlay.setEnabled(true);

            String filepath = store;

            final String promptPlayRecord = "PlayWaveFile()\n" + filepath;

            SpannableStringBuilder biggerText = new SpannableStringBuilder(promptPlayRecord);
            biggerText.setSpan(new RelativeSizeSpan(2.05f), 0, promptPlayRecord.length(), 0);
            Toast.makeText(RecorderActivity.this, biggerText, Toast.LENGTH_LONG).show();
        }
    };
}

我尝试将过滤器置于方法&#34; writeAudioDataToFile&#34;下,虽然它生成了过滤后的音频wav文件,但它是一个快速转发的版本。例如:我录制约5秒,但.wav文件只播放1秒。为什么会这样?

我是否正确地将过滤器置于方法&#34; writeAudioDataToFile&#34;?否则,应采取哪些步骤来确保过滤后的音频可以作为.wav文件输出。

感谢你们的建议!!赞赏它

使用的库

https://github.com/ddf/Minim/blob/master/src/ddf/minim/effects/BandPass.java https://github.com/DASAR/Minim-Android/blob/master/src/ddf/minim/effects/IIRFilter.java

0 个答案:

没有答案