我尝试使用" AVFoundation"制作像藤蔓一样的视频应用。现在我可以通过AVCaptureVideoDataOutput
保存视频并且可以播放。但是音频无法正常工作,我不知道为什么。
我是iOS应用程序的初学者,因此可能不太清楚。希望你能理解我想说的话并给我一些提示。
这是我正在使用的代码。
设置AVCaptureVideoDataOutput
和AVCaptureAudioDataOutput
:
AVCaptureVideoDataOutput* videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[CaptureSession addOutput:videoDataOutput];
videoDataOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey,
nil];
dispatch_queue_t videoQueue = dispatch_queue_create("VideoQueue", NULL);
[videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
AVCaptureAudioDataOutput *audioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
[CaptureSession addOutput:audioDataOutput];
dispatch_queue_t audioQueue = dispatch_queue_create("AudioQueue", NULL);
[audioDataOutput setSampleBufferDelegate:self queue:audioQueue];
设置AVAssetWrite
和AVAssetWriterInput
:
- (void)makeWriter{
pathString = [NSHomeDirectory()stringByAppendingPathComponent:@"Documents/capture.mov"];
exportURL = [NSURL fileURLWithPath:pathString];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportURL.path])
{
[[NSFileManager defaultManager] removeItemAtPath:exportURL.path error:nil];
}
NSError* error;
writer = [[AVAssetWriter alloc] initWithURL:exportURL
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSDictionary* videoSetting = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:1280], AVVideoWidthKey,
[NSNumber numberWithInt:720], AVVideoHeightKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSetting];
videoWriterInput.expectsMediaDataInRealTime = YES;
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
if( NO ) {
// should work from iphone 3GS on and from ipod 3rd generation
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];
} else {
// should work on any device requires more space
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil ];
}
audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ];
audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
[writer addInput:videoWriterInput];
[writer addInput:audioWriterInput];
}
最后是CaptureOutput
代码:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if ((isPause) && (isRecording)) { return; }
if( !CMSampleBufferDataIsReady(sampleBuffer) ){return;}
if( isRecording == YES ) {
isWritting = YES;
if( writer.status != AVAssetWriterStatusWriting ) {
[writer startWriting];
[writer startSessionAtSourceTime:kCMTimeZero];
}
if( [videoWriterInput isReadyForMoreMediaData] ) {
CFRetain(sampleBuffer);
CMSampleBufferRef newSampleBuffer = [self offsetTimmingWithSampleBufferForVideo:sampleBuffer];
[videoWriterInput appendSampleBuffer:newSampleBuffer];
CFRelease(sampleBuffer);
CFRelease(newSampleBuffer);
}
writeFrames++;
}
}
- (CMSampleBufferRef)offsetTimmingWithSampleBufferForVideo:(CMSampleBufferRef)sampleBuffer
{
CMSampleBufferRef newSampleBuffer;
CMSampleTimingInfo sampleTimingInfo;
sampleTimingInfo.duration = CMTimeMake(1, 30);
sampleTimingInfo.presentationTimeStamp = CMTimeMake(writeFrames, 30);
sampleTimingInfo.decodeTimeStamp = kCMTimeInvalid;
CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault,
sampleBuffer,
1,
&sampleTimingInfo,
&newSampleBuffer);
return newSampleBuffer;
}
答案 0 :(得分:0)
至少有一个问题是您将所有样本缓冲区放入了视频录像器输入中。您需要将来自audiobuffer的样本放入audiowriter输入中。
你应该查看这个SO问题并回答!
performance-issues-when-using-avcapturevideodataoutput-and-avcaptureaudiodataout
答案 1 :(得分:-1)