我的相机正在拍照并完美录制视频(使用AVCaptureMovieFileOutput
),我能够正常切换相机位置到前/后。但是,就像Instagram,Snapchat和其他众多应用程序一样,我还希望用户在录制视频时切换摄像头位置。
似乎能够实现这样的事情,我需要与AVCaptureVideoDataOutput
合作,因为它可以处理不同的框架,但我不能真正实现它。一切都很顺利,但在我完成视频后,它只是不玩,似乎没有来自captureOutput
方法的结果网址。这是我的代码:
- (void)initialize{
if(!_session) {
_session = [[AVCaptureSession alloc] init];
_session.sessionPreset = self.cameraQuality;
// preview layer
CGRect bounds = self.preview.layer.bounds;
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_captureVideoPreviewLayer.bounds = bounds;
_captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
[self.preview.layer addSublayer:_captureVideoPreviewLayer];
AVCaptureDevicePosition devicePosition;
switch (self.position) {
case LLCameraPositionRear:
if([self.class isRearCameraAvailable]) {
devicePosition = AVCaptureDevicePositionBack;
} else {
devicePosition = AVCaptureDevicePositionFront;
_position = LLCameraPositionFront;
}
break;
case LLCameraPositionFront:
if([self.class isFrontCameraAvailable]) {
devicePosition = AVCaptureDevicePositionFront;
} else {
devicePosition = AVCaptureDevicePositionBack;
_position = LLCameraPositionRear;
}
break;
default:
devicePosition = AVCaptureDevicePositionUnspecified;
break;
}
if(devicePosition == AVCaptureDevicePositionUnspecified) {
_videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
} else {
_videoCaptureDevice = [self cameraWithPosition:devicePosition];
}
NSError *error = nil;
_videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_videoCaptureDevice error:&error];
if (!_videoDeviceInput) {
if(self.onError) {
self.onError(self, error);
}
return;
}
if([self.session canAddInput:_videoDeviceInput]) {
[self.session addInput:_videoDeviceInput];
// self.captureVideoPreviewLayer.connection.videoOrientation = [self orientationForConnection];
}
// add audio if video is enabled
if(self.videoEnabled) {
_audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
_audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_audioCaptureDevice error:&error];
if (!_audioDeviceInput) {
if(self.onError) {
self.onError(self, error);
}
}
if([self.session canAddInput:_audioDeviceInput]) {
[self.session addInput:_audioDeviceInput];
}
// Setup the video output
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
_videoOutput.alwaysDiscardsLateVideoFrames = NO;
_videoOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
//[NSDictionary dictionaryWithObject:
//[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// Setup the audio input
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
// Create the session
[_session addOutput:_videoOutput];
[_session addOutput:_audioOutput];
// Setup the queue
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[_videoOutput setSampleBufferDelegate:self queue:queue];
[_audioOutput setSampleBufferDelegate:self queue:queue];
}
// continiously adjust white balance
self.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
// image output
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[self.session addOutput:self.stillImageOutput];
}
//if we had disabled the connection on capture, re-enable it
if (![self.captureVideoPreviewLayer.connection isEnabled]) {
[self.captureVideoPreviewLayer.connection setEnabled:YES];
}
// [_assetWriter startWriting];
//[_assetWriter startSessionAtSourceTime:kCMTimeZero];
[self.session startRunning];
}
- (void)stop
{
[self.session stopRunning];
}
-(BOOL) setupWriter:(NSURL*)url {
NSError *error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL:url fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
// Add video input
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:128.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:192], AVVideoWidthKey,
[NSNumber numberWithInt:144], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
if( NO ) {
// should work from iphone 3GS on and from ipod 3rd generation
audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil];
} else {
// should work on any device requires more space
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil ];
}
_audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ];
_audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
[_videoWriter addInput:_videoWriterInput];
[_videoWriter addInput:_audioWriterInput];
return YES;
}
-(void) startVideoRecording
{
if( !self.recording )
{
NSURL* url = [[NSURL alloc] initFileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[@"movie" stringByAppendingPathExtension:@"mov"]]];
//if(!debug){
[[NSFileManager defaultManager] removeItemAtURL:url error:nil];
//}
NSLog(@"start video recording...");
if( ![self setupWriter:url] ) {
NSLog(@"Setup Writer Failed") ;
return;
}
// [_session startRunning] ;
self.recording = YES;
}
}
-(void) stopVideoRecording:(void (^)(LLSimpleCamera *camera, NSURL *outputFileUrl, NSError *error))completionBlock
{
NSLog(@"STOP RECORDING");
if(!self.videoEnabled) {
return;
}
if( self.recording )
{
self.recording = NO;
self.didRecord = completionBlock;
[_session stopRunning] ;
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^
{
if(![_videoWriter finishWriting]) {
NSLog(@"finishWriting returned NO") ;
}
});
//[_videoWriter endSessionAtSourceTime:lastSampleTime];
//[_videoWriterInput markAsFinished];
//[_audioWriterInput markAsFinished];
NSLog(@"video recording stopped");
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSLog(@"CALLING CAPTUREOUTPUT");
self.recording = NO;
[self enableTorch:NO];
if( !CMSampleBufferDataIsReady(sampleBuffer) )
{
NSLog( @"sample buffer is not ready. Skipping sample" );
return;
}
/*if(self.didRecord) {
NSLog(@"DID RECORD EXISTS !!!");
self.didRecord(self, outputFileURL, error);
}*/
//THE ABOVE CODE WOULD GET THE outputFileURL if fromt he captureOutput method delegate if I used AVCaptureMovieFileOutput
if( self.recording == YES )
{
_lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if( _videoWriter.status != AVAssetWriterStatusWriting )
{
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:_lastSampleTime];
}
if( captureOutput == _videoOutput )
[self newVideoSample:sampleBuffer];
else if( captureOutput == _audioOutput) {
[self newAudioSample:sampleBuffer];
}
/*
// If I add audio to the video, then the output file gets corrupted and it cannot be reproduced
else
[self newAudioSample:sampleBuffer];
*/
}
}
-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer
{
if( self.recording )
{
if( _videoWriter.status > AVAssetWriterStatusWriting )
{
NSLog(@"Warning: writer status is %ld", _videoWriter.status);
if( _videoWriter.status == AVAssetWriterStatusFailed )
NSLog(@"Error: %@", _videoWriter.error);
return;
}
if( ![_videoWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(@"Unable to write to video input");
}
}
-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer
{
if( self.recording )
{
if( _videoWriter.status > AVAssetWriterStatusWriting )
{
NSLog(@"Warning: writer status is %ld", _videoWriter.status);
if( _videoWriter.status == AVAssetWriterStatusFailed )
NSLog(@"Error: %@", _videoWriter.error);
return;
}
if( ![_audioWriterInput appendSampleBuffer:sampleBuffer] )
NSLog(@"Unable to write to audio input");
}
}
PS1:以下是对类似问题的引用
Change camera capture device while recording a video
Simultaneous AVCaptureVideoDataOutput and AVCaptureMovieFileOutput
PS2:对于上面代码的不良缩进感到抱歉。代码完全缩进但不知何故,当我在这里发布大量代码时,它会丢失缩进。