嗨,我正在使用MeidaMuxer,但我无法完全替换背景声音。直到现在我都可以替换它,但是输出视频文件存在一些问题,例如我有30秒的mp4文件并使用应用程序录制了30秒的音频文件,但是当我要合并这两个结果时,输出是56秒的东西。当我播放最后一个视频的最后几秒钟时,在30秒钟后,视频帧暂停了接下来的26秒钟。请建议我在这里做错了什么?
我尝试了这个lib,但是音频没有改变... https://github.com/GitEliteNovice/Muxing
这是我完整的源代码
package com.customer.ffmpegvideo1;
public class MainActivity extends AppCompatActivity {
private static final int REQUEST_CODE_WRITE_EXTERNAL_STORAGE_PERMISSION = 1001;
private Button btnRecord;
private VideoView videoView;
private String videoPath = Environment.getExternalStorageDirectory() + File.separator + "video.mp4";
private String accPath = Environment.getExternalStorageDirectory() + File.separator + "Audio.3gp";
private MediaRecorder mRecorder;
MediaController mc ;
@RequiresApi(api = Build.VERSION_CODES.M)
@Override
protected void onCreate(@Nullable Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
String TAG = "muxing";
Log.e(TAG, "step 1 ");
btnRecord = findViewById(R.id.btnRecord);
videoView = findViewById(R.id.videoView);
mc = new MediaController(this);
mc.setAnchorView(videoView);
mc.setMediaPlayer(videoView);
videoView.setMediaController(mc);
getPermissionToRecordAudio();
btnRecord.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startPlayingVideo();
}
});
}
private void startPlayingVideo() {
videoView.setVideoPath(videoPath);
videoView.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
@Override
public void onPrepared(MediaPlayer m) {
try {
m.setVolume(0f, 0f);
m.setLooping(false);
Log.e("video time",videoView.getDuration()+"");
startRecording();
} catch (Exception e) {
e.printStackTrace();
}
}
});
videoView.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mediaPlayer) {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
Log.e("onComplete","done");
Log.e("video size","size = "+mediaPlayer.getDuration());
Log.e("audio size","size = "+getDuration(new File(accPath)));
muxing();
}
});
videoView.requestFocus();
}
private static String getDuration(File file) {
MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(file.getAbsolutePath());
String durationStr = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
return formateMilliSeccond(Long.parseLong(durationStr));
}
public static String formateMilliSeccond(long milliseconds) {
String finalTimerString = "";
String secondsString;
int hours = (int) (milliseconds / (1000 * 60 * 60));
int minutes = (int) (milliseconds % (1000 * 60 * 60)) / (1000 * 60);
int seconds = (int) ((milliseconds % (1000 * 60 * 60)) % (1000 * 60) / 1000);
if (hours > 0) {
finalTimerString = hours + ":";
}
if (seconds < 10) {
secondsString = "0" + seconds;
} else {
secondsString = "" + seconds;
}
finalTimerString = finalTimerString + minutes + ":" + secondsString;
return finalTimerString;
}
private void startRecording() {
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setMaxDuration(videoView.getDuration());
Log.e("filename",accPath);
mRecorder.setOutputFile(accPath);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
try {
mRecorder.prepare();
videoView.start();
mRecorder.start();
btnRecord.setVisibility(View.GONE);
} catch (IOException e) {
e.printStackTrace();
}
Toast.makeText(getApplicationContext(), "Recording started", Toast.LENGTH_LONG).show();
}
@RequiresApi(api = Build.VERSION_CODES.M)
public void getPermissionToRecordAudio() {
if (ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED
|| ContextCompat.checkSelfPermission(this, Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED
|| ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED ) {
requestPermissions(new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.RECORD_AUDIO, Manifest.permission.WRITE_EXTERNAL_STORAGE},
REQUEST_CODE_WRITE_EXTERNAL_STORAGE_PERMISSION);
}
}
@RequiresApi(api = Build.VERSION_CODES.M)
@Override
public void onRequestPermissionsResult(int requestCode,
@NonNull String permissions[],
@NonNull int[] grantResults) {
if (requestCode == REQUEST_CODE_WRITE_EXTERNAL_STORAGE_PERMISSION) {
if (grantResults.length == 3 &&
grantResults[0] == PackageManager.PERMISSION_GRANTED
&& grantResults[1] == PackageManager.PERMISSION_GRANTED
&& grantResults[2] == PackageManager.PERMISSION_GRANTED){
Toast.makeText(this, "Permission granted click on button to record audio.", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(this, "You must give permissions to use this app. App is exiting.", Toast.LENGTH_SHORT).show();
finishAffinity();
}
}
}
private void muxing() {
File Videofile = new File(videoPath);
if(Videofile.exists()){
int file_size = Integer.parseInt(String.valueOf(Videofile.length()/1024));
Log.e("Video file","exist" + file_size);
}else{
Log.e("Video file"," not exist");
}
File Audiofile = new File(accPath);
if(Audiofile.exists()){
int file_size = Integer.parseInt(String.valueOf(Audiofile.length()/1024));
Log.e("Audio file","exist"+ file_size);
}else{
Log.e("Audio file"," not exist");
}
try {
MediaExtractor videoExtractor = new MediaExtractor();
videoExtractor.setDataSource(videoPath);
MediaFormat videoFormat = null;
int videoTrackIndex = -1;
int videoTrackCount = videoExtractor.getTrackCount();
Log.e("videoTrackCount",videoTrackCount+"");
for (int i = 0; i < videoTrackCount; i++) {
videoFormat = videoExtractor.getTrackFormat(i);
Log.e("videoFormat",videoFormat+"");
String type = videoFormat.getString(MediaFormat.KEY_MIME);
if (type.startsWith("video/")) {
videoTrackIndex = i;
break;
}
}
MediaExtractor audioExtractor = new MediaExtractor();
audioExtractor.setDataSource(accPath);
MediaFormat audioFormat = null;
int audioTrackIndex = -1;
int audioTrackCount = audioExtractor.getTrackCount();
Log.e("audioTrackCount",audioTrackCount+"");
for (int i = 0; i < audioTrackCount; i++) {
audioFormat = audioExtractor.getTrackFormat(i);
Log.e("audioFormat",audioFormat+"");
String type = audioFormat.getString(MediaFormat.KEY_MIME);
if (type.startsWith("audio/")) {
audioTrackIndex = i;
break;
}
}
videoExtractor.selectTrack(videoTrackIndex);
audioExtractor.selectTrack(audioTrackIndex);
MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();
String videoPath_muxer=Environment.getExternalStorageDirectory() +"/"+(System.currentTimeMillis()+"output.mp4");
Log.e("videoPath_muxer",videoPath_muxer+"");
MediaMuxer mediaMuxer = new MediaMuxer(videoPath_muxer, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
int writeVideoTrackIndex = mediaMuxer.addTrack(videoFormat);
Log.e("writeVideoTrackIndex",writeVideoTrackIndex+"");
int writeAudioTrackIndex = mediaMuxer.addTrack(audioFormat);
Log.e("writeAudioTrackIndex",writeAudioTrackIndex+"");
mediaMuxer.start();
Log.e("mediaMuxer started","......yes......");
ByteBuffer byteBuffer = ByteBuffer.allocate(500 * 1024);
long sampleTime = 0;
{
videoExtractor.readSampleData(byteBuffer, 0);
if (videoExtractor.getSampleFlags() == MediaExtractor.SAMPLE_FLAG_SYNC) {
videoExtractor.advance();
}
videoExtractor.readSampleData(byteBuffer, 0);
long secondTime = videoExtractor.getSampleTime();
Log.e("videoExt secondTime",secondTime+"");
videoExtractor.advance();
long thirdTime = videoExtractor.getSampleTime();
Log.e("videoExt thirdTime",thirdTime+"");
sampleTime = Math.abs(thirdTime - secondTime);
Log.e("videoExt sampleTime",sampleTime+"");
}
videoExtractor.unselectTrack(videoTrackIndex);
videoExtractor.selectTrack(videoTrackIndex);
while (true) {
int readVideoSampleSize = videoExtractor.readSampleData(byteBuffer, 0);
if (readVideoSampleSize < 0) {
break;
}
videoBufferInfo.size = readVideoSampleSize;
videoBufferInfo.presentationTimeUs += sampleTime;
videoBufferInfo.offset = 0;
videoBufferInfo.flags = videoExtractor.getSampleFlags();
mediaMuxer.writeSampleData(writeVideoTrackIndex, byteBuffer, videoBufferInfo);
videoExtractor.advance();
}
Log.e("videoExt","writeSampleData");
while (true) {
int readAudioSampleSize = audioExtractor.readSampleData(byteBuffer, 0);
if (readAudioSampleSize < 0) {
break;
}
audioBufferInfo.size = readAudioSampleSize;
audioBufferInfo.presentationTimeUs += sampleTime;
audioBufferInfo.offset = 0;
audioBufferInfo.flags = videoExtractor.getSampleFlags();
mediaMuxer.writeSampleData(writeAudioTrackIndex, byteBuffer, audioBufferInfo);
audioExtractor.advance();
}
Log.e("audioExt","writeSampleData");
mediaMuxer.stop();
mediaMuxer.release();
videoExtractor.release();
audioExtractor.release();
Log.e("video size","size = "+getDuration(new File(videoPath_muxer)));
} catch (IOException e) {
e.printStackTrace();
}
}
}
这是我的xml文件
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<VideoView
android:id="@+id/videoView"
android:layout_width="match_parent"
android:layout_height="match_parent"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<Button
android:id="@+id/btnRecord"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginStart="16dp"
android:layout_marginTop="16dp"
android:layout_marginEnd="148dp"
android:text="Record"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintHorizontal_bias="1.0"
app:layout_constraintStart_toEndOf="@+id/videoView"
app:layout_constraintTop_toBottomOf="@+id/videoView"
app:layout_constraintVertical_bias="1.0" />
</android.support.constraint.ConstraintLayout>