来自后置和前置摄像头的IOS 8电影

时间:2015-05-28 07:21:21

标签: ios iphone sdk camera avfoundation

如何在iOS 8甚至iOS 7上录制(如果可能)并在我这样做的同时在相机之间切换?我已经看过应用程序从前置摄像头进行此录制,然后切换到后置摄像头,当电影完成后,您可以看到它完全是从两个摄像头拍摄的吗?我认为Glide app有它,也许Snapchat(最后一个不确定)

2 个答案:

答案 0 :(得分:4)

经过长时间的搜索和挣扎,这里是代码...感谢您的建议Matic指出我正确的道路,我做了一些更好的东西,没有实际合并到最后的文件

#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreGraphics/CoreGraphics.h>

@interface ViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
{
    IBOutlet UIView *viewCanvasRecording;
    IBOutlet UIButton *btnRecord;

    AVCaptureDevice *frontCamera; // A pointer to the front camera
    AVCaptureDevice *backCamera; // A pointer to the back camera

    AVCaptureDeviceInput *captureFrontInput;
    AVCaptureDeviceInput *captureBackInput;

    __block AVAssetWriter *_assetWriter;
    __block AVAssetWriterInput *_videoWriterInput;
    __block AVAssetWriterInput *_audioWriterInput;

    __block AVCaptureVideoDataOutput *videoOutput;

    dispatch_queue_t _captureQueue;

    BOOL currentFrontCamera;
    BOOL isCapturingInput;
    NSURL *recordingFile;
}

@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;

@end



// ===== now the m file  


//
//  ViewController.m
//  record
//
//  Created by Catalin on 31/05/15.
//  Copyright (c) 2015 lamobratory.com. All rights reserved.
//

#import "ViewController.h"
#import <AssetsLibrary/AssetsLibrary.h>
#import <MediaPlayer/MediaPlayer.h>

@implementation ViewController

- (void)viewDidLoad
{
    [super viewDidLoad];

    currentFrontCamera=NO;
    isCapturingInput=NO;
    _assetWriter=nil;

    [self findCamera:YES]; // init the front camera
    [self findCamera:NO]; // init the back camera

    [self performSelector:@selector(initCaptureWithCamera) withObject:nil afterDelay:1];
}

- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
}

-(IBAction)tapStartRecord:(id)sender
{
    if([[btnRecord titleForState:UIControlStateNormal] isEqualToString:@"START"])
    {
        [btnRecord setTitle:@"STOP" forState:UIControlStateNormal];
        isCapturingInput=YES;
    }
    else if([[btnRecord titleForState:UIControlStateNormal] isEqualToString:@"STOP"])
    {
        isCapturingInput=NO;

        dispatch_async(_captureQueue, ^{

            [_assetWriter finishWritingWithCompletionHandler:^{

                ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];

                [library writeVideoAtPathToSavedPhotosAlbum:recordingFile completionBlock:^(NSURL *assetURL, NSError *error)
                {
                    if (error)
                    {
                        NSLog(@"assets library failed (%@)", error);
                    }
                    else
                    {
                        NSLog(@"file saved to library");
                    }

                    [self.captureSession stopRunning];
                    _assetWriter=nil;
                    recordingFile = nil;
                }];
            }];

        });

        [btnRecord setTitle:@"START" forState:UIControlStateNormal];
    }
}

-(IBAction)tapSwitchCamera:(id)sender
{
    // switch outputs
    [self swipeCamera];
}

-(void)swipeCamera
{
    currentFrontCamera=!currentFrontCamera; // swipe camera

    [self.captureSession beginConfiguration];

    [self.captureSession removeInput:captureBackInput];
    [self.captureSession removeInput:captureFrontInput];

    if(!currentFrontCamera)
        [self.captureSession addInput:captureBackInput];
    else
        [self.captureSession addInput:captureFrontInput];

    [self.captureSession commitConfiguration];
}

#pragma mark - Camera methods

-(BOOL)findCamera:(BOOL)useFrontCamera
{
    // 0. Make sure we initialize our camera pointer:
    AVCaptureDevice *m_camera = NULL;

    // 1. Get a list of available devices:
    // specifying AVMediaTypeVideo will ensure we only get a list of cameras, no microphones
    NSArray * devices = [ AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo ];

    // 2. Iterate through the device array and if a device is a camera, check if it's the one we want:
    for ( AVCaptureDevice * device in devices )
    {
        if ( useFrontCamera && AVCaptureDevicePositionFront == [ device position ] )
        {
            // We asked for the front camera and got the front camera, now keep a pointer to it:
            m_camera = device;
        }
        else if ( !useFrontCamera && AVCaptureDevicePositionBack == [ device position ] )
        {
            // We asked for the back camera and here it is:
            m_camera = device;
        }
    }

    // 3. Set a frame rate for the camera:
    if ( NULL != m_camera )
    {
        // We firt need to lock the camera, so noone else can mess with its configuration:
        if ( [ m_camera lockForConfiguration: NULL ] )
        {
            // Set a minimum frame rate of 10 frames per second
            [ m_camera setActiveVideoMinFrameDuration: CMTimeMake( 1, 10 ) ];

            // and a maximum of 30 frames per second
            [ m_camera setActiveVideoMaxFrameDuration: CMTimeMake( 1, 30 ) ];

            [ m_camera unlockForConfiguration ];
        }
    }

    if(!useFrontCamera)
        backCamera=m_camera;
    else
        frontCamera=m_camera;

    // 4. If we've found the camera we want, return true
    return ( NULL != m_camera );
}

-(void) setupWriter
{
    NSError *error = nil;
    _assetWriter = [[AVAssetWriter alloc] initWithURL:recordingFile fileType:AVFileTypeQuickTimeMovie error:&error];

    NSDictionary* actual = videoOutput.videoSettings;
    int _cy = [[actual objectForKey:@"Height"] intValue];
    int _cx = [[actual objectForKey:@"Width"] intValue];

    NSDictionary* settings = [NSDictionary dictionaryWithObjectsAndKeys:
                              AVVideoCodecH264, AVVideoCodecKey,
                              [NSNumber numberWithInt: _cx], AVVideoWidthKey,
                              [NSNumber numberWithInt: _cy], AVVideoHeightKey,
                              nil];

    _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];
    _videoWriterInput.expectsMediaDataInRealTime = YES;

    _videoWriterInput.transform=CGAffineTransformMakeRotation(M_PI/2); // else it will shot in landscape even though we hold our phone in portrait mode

    /*settings = [NSDictionary dictionaryWithObjectsAndKeys:
                [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
                [ NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
                [ NSNumber numberWithFloat: 16000], AVSampleRateKey,
                [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
                nil];*/

    _audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings: nil];
    _audioWriterInput.expectsMediaDataInRealTime = YES;

    // add input
    if([_assetWriter canAddInput:_videoWriterInput])
    {
        NSLog(@"added output to video");
        [_assetWriter addInput:_videoWriterInput];
    }

    if([_assetWriter canAddInput:_audioWriterInput])
    {
        NSLog(@"added output to audio");
        [_assetWriter addInput:_audioWriterInput];
    }
}

- (void)initCaptureWithCamera
{
    if(self.captureVideoPreviewLayer!=nil) // refresh the views 
        [self.captureVideoPreviewLayer removeFromSuperlayer];

    // remove old file at path if exists
    recordingFile = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@/tmp_vid.mov", NSTemporaryDirectory()]];
    [[NSFileManager defaultManager] removeItemAtURL:recordingFile error:nil];

    // ========================= configure session

    self.captureSession = [[AVCaptureSession alloc] init];
    NSString* preset = 0;
    if (!preset) {
        preset = AVCaptureSessionPresetHigh;
    }
    self.captureSession.sessionPreset = preset;

    // ========================= input devices from camera and mic

    NSError * error = NULL;
    captureFrontInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error: &error];
    captureBackInput = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error: &error];

    if ( NULL != error )
        return;

    if ([self.captureSession canAddInput:captureBackInput])
    {
        NSLog(@"added input from camera");
        [self.captureSession addInput:captureBackInput];
    }

    // audio input from default mic
    AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];

    if ([self.captureSession canAddInput:micinput])
    {
        NSLog(@"added input from mic");
        [self.captureSession addInput:micinput];
    }

    // ========================= now output forms: video and audio in asset writter

    _captureQueue = dispatch_queue_create("com.myapp.capture", DISPATCH_QUEUE_SERIAL);

    videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init];

    [videoOutput setSampleBufferDelegate:self queue:_captureQueue];
    [audioOutput setSampleBufferDelegate:self queue:_captureQueue];

//    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
//    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
//    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
//    
//    [videoOutput setVideoSettings:videoSettings];

    if ([self.captureSession canAddOutput:videoOutput]) {
        [self.captureSession addOutput:videoOutput];
    }

    if ([self.captureSession canAddOutput:audioOutput]) {
        [self.captureSession addOutput:audioOutput];
    }

    // ======================================================================

    // add the preview layer to see what we film
    if (!self.captureVideoPreviewLayer)
        self.captureVideoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];

    // if you want to adjust the previewlayer frame, here!
    self.captureVideoPreviewLayer.frame = viewCanvasRecording.bounds;
    self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;

    [viewCanvasRecording.layer addSublayer: self.captureVideoPreviewLayer];

    [self.captureSession startRunning];
}

-(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
{
    BOOL frameFromVideoCaptured = NO;

    @synchronized(self)
    {
        if (!isCapturingInput) // we haven't started filming yet just ignore the frames
            return;

        if(!_assetWriter)
        {
            [self setupWriter];
        }

        frameFromVideoCaptured=(captureOutput==videoOutput);
    }

    // pass frame to the assset writer
    [self writeFrame:sampleBuffer isVideo:frameFromVideoCaptured];
}

-(BOOL)writeFrame:(CMSampleBufferRef)sampleBuffer isVideo:(BOOL)isVideo
{
    if (CMSampleBufferDataIsReady(sampleBuffer))
    {
        if (_assetWriter.status == AVAssetWriterStatusUnknown)
        {
            CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
            [_assetWriter startWriting];
            [_assetWriter startSessionAtSourceTime:startTime];
        }

        if (_assetWriter.status == AVAssetWriterStatusFailed)
        {
            NSLog(@"writer error %@", _assetWriter.error.localizedDescription);
            return NO;
        }

        if (isVideo)
        {
            if (_videoWriterInput.readyForMoreMediaData == YES)
            {
                [_videoWriterInput appendSampleBuffer:sampleBuffer];
                return YES;
            }
        }
        else
        {
            if (_audioWriterInput.readyForMoreMediaData)
            {
                [_audioWriterInput appendSampleBuffer:sampleBuffer];
                return YES;
            }
        }
    }

    return NO;
}

@end

答案 1 :(得分:1)

我正在使用多文件程序。在交换时,我删除捕获会话上的输入和输出,并重新附加新输出(AVCaptureMovieFileOutput)并附加正确的输入。继续录制时,AVCaptureMovieFileOutput获取的文件名与之前的文件名不同,因此我实际创建了一系列文件。另请注意,AVCaptureSession必须是新对象,您可能无法重复使用前一个对象,因为它很可能还没有停止写入。输入可以重复使用。

完成所有操作后,根据您需要处理的级别,可以通过各种方式将视频拼接在一起。

所以我的基本组件是AVCaptureMovieFileOutputAVCaptureDeviceInputAVCaptureVideoPreviewLayer。此外,还会附加{{1}}以查看您正在录制的内容。