我需要做什么: 我应该做立体声录音。 所以当我点击" Record"时,左扬声器将产生400HZ频率,右扬声器将产生500HZ,我必须记录它。然后我必须转换为音频格式(mp3,wav,flac等)
我已设法完成上述操作(转换为尚未完成的音频格式除外)
以下是我的代码
package com.exercise.AndroidAudioRecord;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.Spinner;
import android.widget.Toast;
public class AndroidAudioRecordActivity extends Activity {
String[] freqText = {"11.025 KHz (Lowest)", "16.000KHZ", "18.000 KHz", "19.000 KHz", "22.050 KHz", "44.100 KHz (Highest)"};
Integer[] freqset = {11025, 16000, 18000, 19000, 22050, 44100};
private ArrayAdapter<String> adapter;
Spinner spFrequency;
Button startRec, stopRec, playBack;
Boolean recording;
/* Generate the frequency tone */
int sample;
double sampleRate;
double duration;
double time;
double f1;
double f2;
double amplitude1;
double amplitude2;
double sineWave1;
double sineWave2;
float[] buffer1;
float[] buffer2;
byte[] byteBuffer1;
byte[] byteBuffer2;
byte[] byteBufferFinal;
int bufferIndex;
short x;
short y;
short [] leftChannelAudioData;
short [] rightChannelAudioData;
AudioTrack audioTrack;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
startRec = (Button)findViewById(R.id.startrec);
stopRec = (Button)findViewById(R.id.stoprec);
playBack = (Button)findViewById(R.id.playback);
startRec.setOnClickListener(startRecOnClickListener);
stopRec.setOnClickListener(stopRecOnClickListener);
playBack.setOnClickListener(playBackOnClickListener);
spFrequency = (Spinner)findViewById(R.id.frequency);
adapter = new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, freqText);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
spFrequency.setAdapter(adapter);
stopRec.setEnabled(false);
sampleRate = 44100.0;
duration = 20.0;
f1 = 400.0;
//f1 = 18000.0;
amplitude1= 1;
f2 = 500;
//f2 =19000.0;
amplitude2 = 1;
buffer1 = new float[(int)(duration*sampleRate)];
buffer2 = new float[(int)(duration*sampleRate)];
for(sample = 0; sample < buffer1.length; sample ++){
time = sample / sampleRate;
buffer1[sample] = (float)(amplitude1*Math.sin(2*Math.PI*f1*time));
buffer2[sample] = (float)(amplitude2*Math.sin(2*Math.PI*f2*time));
//Toast.makeText(getApplicationContext(), "IN", Toast.LENGTH_LONG).show();
}
byteBuffer1 = new byte[buffer1.length*2]; //two bytes per audio frame, 16 bits
for(int a = 0, bufferIndex=0; a < byteBuffer1.length; a++){
x = (short) (buffer1[bufferIndex++]*32767.0); // [2^16 - 1]/2 = 32767.0
byteBuffer1[a] = (byte) x; // low byte
byteBuffer1[++a] = (byte) (x >>> 8); // high byte
}
byteBuffer2 = new byte[buffer2.length*2];
for(int j = 0, bufferIndex=0; j < byteBuffer2.length; j++){
y = (short) (buffer2[bufferIndex++]*32767.0);
byteBuffer2[j] = (byte) y; // low byte
byteBuffer2[++j] = (byte) (y >>> 8); // high byte
}
byteBufferFinal = new byte[byteBuffer1.length*2];
//LL RR LL RR LL RR
for(int k = 0, index = 0; index < byteBufferFinal.length - 4; k=k+2){
byteBufferFinal[index] = byteBuffer1[k]; // LEFT {0,1/4,5/8,9/12,13;...}
byteBufferFinal[index+1] = byteBuffer1[k+1];
index = index + 2;
byteBufferFinal[index] = byteBuffer2[k]; // RIGHT {2,3/6,7/10,11;...}
byteBufferFinal[index+1] = byteBuffer2[k+1];
index = index + 2;
}
/*
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
(int) sampleRate,AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT, byteBufferFinal.length,
AudioTrack.MODE_STATIC);
audioTrack.write(byteBufferFinal, 0, byteBufferFinal.length);
audioTrack.play();
*/
/*End of frequency tone */
}
OnClickListener startRecOnClickListener
= new OnClickListener(){
@Override
public void onClick(View arg0) {
Thread recordThread = new Thread(new Runnable(){
@Override
public void run() {
recording = true;
startRecord();
}
});
recordThread.start();
startRec.setEnabled(false);
stopRec.setEnabled(true);
}};
OnClickListener stopRecOnClickListener
= new OnClickListener(){
@Override
public void onClick(View arg0) {
recording = false;
startRec.setEnabled(true);
stopRec.setEnabled(false);
Log.d("TEST", "THIS IS INSIDE STOP");
audioTrack.flush();
audioTrack.stop();
audioTrack.release();
}};
OnClickListener playBackOnClickListener
= new OnClickListener(){
@Override
public void onClick(View v) {
playRecord();
}
};
private void startRecord(){
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
(int) sampleRate,AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT, byteBufferFinal.length,
AudioTrack.MODE_STATIC);
audioTrack.write(byteBufferFinal, 0, byteBufferFinal.length);
audioTrack.play();
File file = new File(Environment.getExternalStorageDirectory(), "test.pcm");
int selectedPos = spFrequency.getSelectedItemPosition();
int sampleFreq = freqset[selectedPos];
final String promptStartRecord =
"startRecord()\n"
+ file.getAbsolutePath() + "\n"
+ (String)spFrequency.getSelectedItem();
runOnUiThread(new Runnable(){
@Override
public void run() {
Toast.makeText(AndroidAudioRecordActivity.this,
promptStartRecord,
Toast.LENGTH_LONG).show();
}});
try {
file.createNewFile();
OutputStream outputStream = new FileOutputStream(file);
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(outputStream);
DataOutputStream dataOutputStream = new DataOutputStream(bufferedOutputStream);
int minBufferSize = AudioRecord.getMinBufferSize(sampleFreq,
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
short[] audioData = new short[minBufferSize];
//AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.CAMCORDER,
sampleFreq,
AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
minBufferSize);
audioRecord.startRecording();
while(recording){
int numberOfShort = audioRecord.read(audioData, 0, minBufferSize);
for(int i = 0; i < numberOfShort; i++){
dataOutputStream.writeShort(audioData[i]);
}
}
audioRecord.stop();
audioRecord.release();
dataOutputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
void playRecord(){
File file = new File(Environment.getExternalStorageDirectory(), "test.pcm");
int shortSizeInBytes = Short.SIZE/Byte.SIZE;
int bufferSizeInBytes = (int)(file.length()/shortSizeInBytes);
//int bufferSizeInBytes = (int)(file.length());
short[] audioData = new short[bufferSizeInBytes];
try {
InputStream inputStream = new FileInputStream(file);
BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
DataInputStream dataInputStream = new DataInputStream(bufferedInputStream);
int i = 0;
while(dataInputStream.available() > 0){
audioData[i] = dataInputStream.readShort();
i++;
}
dataInputStream.close();
int selectedPos = spFrequency.getSelectedItemPosition();
int sampleFreq = freqset[selectedPos];
//int sampleFreq1 = 400;
//int sampleFreq2 = 20000;
final String promptPlayRecord =
"PlayRecord()\n"
+ file.getAbsolutePath() + "\n"
+ (String)spFrequency.getSelectedItem();
Toast.makeText(AndroidAudioRecordActivity.this,
promptPlayRecord,
Toast.LENGTH_LONG).show();
AudioTrack audioTrack1 = new AudioTrack(
AudioManager.STREAM_MUSIC,
sampleFreq,
AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSizeInBytes,
//audioData.length,
AudioTrack.MODE_STREAM);
//int stereo=audioTrack.setStereoVolume(0.0f, 1.0f);
audioTrack1.play();
audioTrack1.write(audioData, 0, bufferSizeInBytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
问题1.1:是否可以让顶级麦克风录制400HZ,底部麦克风录制500HZ,当我点击播放时,左右扬声器将分别播放400HZ和500HZ? / p>
问题1.2:如果无法解决上述问题,我该如何分开左右声道? (我看到了这个链接,但我不确定如何实现它How to access the second mic android such as Galaxy 3)
问题2:如何将PCM文件转换为wav,mp3,flac格式等?
感谢您的帮助和建议。非常感谢!