我正在开发一款Android服装应用。此应用程序记录来自智能手表麦克风的本地音频并将其发送到手柄设备。句柄设备接收数据并写入.wav文件。该文件已创建,但是当我收听该文件为空时,我只能听静音。
这是磨损码:
public void replyAudioByByte(final byte data[]) {
final String path = "/start_activity";
final Byte[] text= new Byte[1024];
GoogleApiClient client = new GoogleApiClient.Builder(getApplicationContext())
.addApi(Wearable.API)
.build();
new Thread(new Runnable() {
@Override
public void run() {
NodeApi.GetConnectedNodesResult nodes = Wearable.NodeApi.getConnectedNodes(mApiClient).await();
for (Node node : nodes.getNodes()) {
MessageApi.SendMessageResult result = Wearable.MessageApi.sendMessage(
mApiClient, node.getId(),AUDIO_RECORDER, data).await();
if (result.getStatus().isSuccess()) {
Log.d("sendMessage","Message send!!");
for (int j=0; j<data.length; j++ ){
Log.v("Mobile", "Message: {" + data[j] + "} sent to: " + node.getDisplayName());
}
} else {
// Log an error
Log.v("Mobile", "ERROR: failed to send Message");
}
}
}
}).start();
client.disconnect();
Log.d("MOBILE", "send message end");
}
public void startRecordingAudio() {
recorder = findAudioRecord();
Log.d("recorder:","recorder="+recorder.toString());
CountDownTimer countDowntimer = new CountDownTimer(8000, 1000) {
public void onTick(long millisUntilFinished) {
}
public void onFinish() {
try {
//Toast.makeText(getBaseContext(), "Stop recording Automatically ", Toast.LENGTH_LONG).show();
Log.d("wear", "stopRecorder=" + System.currentTimeMillis());
recorder.stop();
Log.d("formato registrazione","recorderformat="+recorder.getAudioFormat()+"-----rate=");
Log.d("formato registrazione","recordersamplerate=" +recorder.getSampleRate());
isRecording=false;
replyAudioByByte(data);
for (int j=0; j< data.length;j++){
Log.d("watch audio registrato", "data[]="+data[j]);
}
Log.d("wear", "recorder.stop ok!");
} catch (Exception e) {
// TODO Auto-generated catch block
Log.e("wear", "recorder.stop catch");
e.printStackTrace();
}
}
};
recorder.startRecording();
countDowntimer.start();
Log.d("wear", "startRecorder=" + System.currentTimeMillis());
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
while (isRecording ) {
recorder.read(data, 0, bufferSize);
Log.d("WEAR","recorder.read="+recorder.read(data, 0, bufferSize));
}
recorder.stop();
recorder.release();
for (int i = 0; i < bufferSize; i++) {
Log.d("startrecording", "data=" + data[i]);
}
}
}, "AudioRecorder Thread");
recordingThread.start();
int a= recorder.getSampleRate();
Log.d("formato registrazione","recorderformat="+recorder.getAudioFormat()+"-----rate="+a);
Log.d("formato registrazione","recordersamplerate=" +recorder.getSampleRate());
}
public AudioRecord findAudioRecord() {
/** The settings that i must use are not the same for every device, so i try if they work */
for (int rate : mSampleRates) {
for (short audioFormat : audioF) {
for (short channelConfig : channelC) {
try {
//Log.d("Check", "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig);
int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
//It checks if it can instantiate the audiorecorder without problems
AudioRecord recorder = new AudioRecord(AudioSource.MIC, rate, channelConfig, audioFormat, bufferSize + 2000);
Log.d("AudioRecorder data","AudioSource.Default="+ AudioSource.MIC);
Log.d("AudioRecorder data","Rate="+ rate);
Log.d("AudioRecorder data","Channel.config="+ channelConfig);
Log.d("AudioRecorder data","AudioFormat= "+audioFormat);
bufferSize=bufferSize+2000;
Log.d("AudioRecorder data","buffersize="+ bufferSize );
if (recorder.getState() == AudioRecord.STATE_INITIALIZED)
Log.d("audiorec","rate="+rate);
return recorder;
}
} catch (Exception e) {
Log.e("Check", rate + "Exception, keep trying.", e);
}
}
}
}
return null;
}
这是句柄代码:
public Void doInBackground(byte [] dataToWrite) {
Log.d("doInBackground","entrato");
byte data[] = new byte[bufferSize];
String tempfilename = "";
FileOutputStream os = null;
//if(allowRecorder){
tempfilename = getTempFilename();
Log.d("doInBackground","getTempFilename=" +tempfilename.toString());
try {
os = new FileOutputStream(tempfilename);
Log.d("doInBackground","os new ok" );
} catch (FileNotFoundException e) {
e.printStackTrace();
}
dbData = new ArrayList<Double>();
Log.d("doInBackGround", "dateToWrite.length=" + dataToWrite.length);
for (int j = 0; j < dataToWrite.length; j++) {
try {
os.write(dataToWrite);
Log.d("os,write", "dataToWrite");
} catch (IOException e) {
e.printStackTrace();
}
}
if(data[data.length-1]!=0){
double Db = 20 * Math.log10(Math.abs((data[data.length-1]/51805.5336) / 0.00002));
dbData.add(Db);
}
try {
os.close();
Log.d("os.close", "dataToWrite");
copyWaveFile(tempfilename,getFilename());
deleteTempFile();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
private void copyWaveFile(String inFilename,String outFilename){
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = 0;
long longSampleRate = 8000;
System.out.println("SAMPLE RATE = "+longSampleRate);
int channels = 12;
audioFormat = 16;
long byteRate = audioFormat * longSampleRate * channels/8;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
Log.d("RecorderRead","totalAudioLen=" +totalAudioLen);
Log.d("RecorderRead","totalDatalen=" +totalDataLen);
System.out.println("Temp File size: " + totalDataLen);
Log.d("AudioRecorder data","AudioSource.Default="+ AudioSource.DEFAULT);
Log.d("AudioRecorder data","Rate="+ longSampleRate);
Log.d("AudioRecorder data","Channel.config="+ channels);
Log.d("AudioRecorder data","AudioFormat= "+audioFormat);
//bufferSize=bufferSize+2000;
Log.d("AudioRecorder data","buffersize="+ bufferSize );
if(totalDataLen != 36){
writeWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
Log.d("writeWAVEFILE", "chiamato");
while(in.read(data) != -1){
out.write(data);
}
System.out.println("Wav File size: " + out.getChannel().size());
}
else{
System.out.println("Non creo il file .wav");
}
in.close();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void writeWaveFileHeader(
FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels,
long byteRate) throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8); // block align
header[33] = 0;
header[34] = (byte) audioFormat; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
在穿着服装中我有
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
在句柄清单中我有
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
当我在wear app的日志文件中运行app时出现此错误:
AudioRecord-JNI: Error -4 during AudioRecord native read
我该怎么做才能解决它? 有人可以帮帮我吗?怎么了?每种类型的帮助都受到赞赏,代码或教程。 提前致谢
答案 0 :(得分:0)
您需要设置用于录制的音频编码器。
void setAudioEncoder (int audio_encoder)
如果未调用此方法,则输出文件将不包含音轨。请在setOutputFormat()
后prepare()
之前调用此方法。
尝试按照以下列出的步骤操作:
MediaRecorder.AudioSource.MIC
。MediaRecorder.release()
立即释放资源。以下是如何录制音频和播放录制音频的示例代码:https://developer.android.com/guide/topics/media/audio-capture.html#example