可以在avcam中捕获会话一次有多个对象

时间:2013-08-20 06:59:57

标签: iphone avfoundation avcapturesession avcapturedevice avcam

这里我正在使用AVFoundation框架来录制视频,而我正在尝试同时使用前后凸轮,但是如果前凸轮正常工作,凸轮在第一帧冻结,反之亦然.Cansesesese有多个对象在任何时候,代码都可以帮助我。

- (void)viewDidLoad
 {

[super viewDidLoad];
self.view.frame = CGRectMake(0, 0, 320, 568);
self.view.backgroundColor = [UIColor clearColor];
ActiveIndicatorView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, 320, 250)];
ActiveIndicatorView.backgroundColor = [UIColor clearColor];
[self.view addSubview:ActiveIndicatorView];
ActiveIndicatorViewBack = [[UIView alloc]initWithFrame:CGRectMake(0, 290, 320, 250)];
ActiveIndicatorViewBack.backgroundColor = [UIColor greenColor];
[self.view addSubview:ActiveIndicatorViewBack];
tracksArray = [[NSMutableArray alloc]
               initWithCapacity:2];
[self startRecording];

}

 -(void)startRecording{

[self StartCameraTwo];//front camera
[self StartCamera];//back camera

 }
-(void)StartCamera{

// NSLog(@"Setting up capture session");
CaptureSessionBack = [[AVCaptureSession alloc] init];
//ADD VIDEO INPUT

AVCaptureDevice *VideoDeviceBack = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
 // VideoDeviceBack = [self frontCamera];

if (VideoDeviceBack)
{
    NSError *error;
    VideoInputDeviceBack = [AVCaptureDeviceInput deviceInputWithDevice:VideoDeviceBack error:&error];
    if (!error)
    {
        if ([CaptureSessionBack canAddInput:VideoInputDeviceBack])
            [CaptureSessionBack addInput:VideoInputDeviceBack];
        else
            NSLog(@"");
    }
    else
    {
        //NSLog(@"Couldn't create video input");
    }
}
else
{
}
AVCaptureDevice *audioCaptureDeviceBack = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInputBack = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDeviceBack error:&error];
if (audioInputBack)
{
    [CaptureSessionBack addInput:audioInputBack];
}
previewBack = [self videoPreviewWithFrameBack :ActiveIndicatorViewBack.bounds];
previewBack.backgroundColor = [UIColor grayColor];
[ActiveIndicatorViewBack addSubview:previewBack];
  // [preview addSubview:vie];
MovieFileOutputBack = [[AVCaptureMovieFileOutput alloc] init];
MovieFileOutputBack.minFreeDiskSpaceLimit = 1024 * 1024;                        //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME

if ([CaptureSessionBack canAddOutput:MovieFileOutputBack])
    [CaptureSessionBack addOutput:MovieFileOutputBack];

//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputPropertiesBack];           //(We call a method as it also has to be done after changing camera)

//NSLog(@"Setting image quality");
[CaptureSessionBack setSessionPreset:AVCaptureSessionPreset640x480];
if ([CaptureSessionBack canSetSessionPreset:AVCaptureSessionPreset640x480]) {   //Check size based configs are supported before setting them
    [CaptureSessionBack setSessionPreset:AVCaptureSessionPreset640x480];


}

if ([VideoDeviceBack isFocusModeSupported:AVCaptureFocusModeAutoFocus] && [VideoDeviceBack lockForConfiguration:&error]){
    [VideoDeviceBack setFocusMode:AVCaptureFocusModeAutoFocus];
    if ([VideoDeviceBack isFocusPointOfInterestSupported])
        [VideoDeviceBack setFocusPointOfInterest:CGPointMake(0.5f,0.5f)];
    [VideoDeviceBack unlockForConfiguration];
}

[CaptureSessionBack startRunning];

}
-(void)StartCameraTwo{
// NSLog(@"Setting up capture session");
CaptureSession = [[AVCaptureSession alloc] init];
//ADD VIDEO INPUT

AVCaptureDevice *VideoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
VideoDevice = [self frontCamera];

if (VideoDevice)
{
    NSError *error;
    VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
    if (!error)
    {
        if ([CaptureSession canAddInput:VideoInputDevice])
            [CaptureSession addInput:VideoInputDevice];
        else
            NSLog(@"");
    }
    else
    {
        //NSLog(@"Couldn't create video input");
    }
}
else
{

}

AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput)
{
    [CaptureSession addInput:audioInput];
}

preview = [self videoPreviewWithFrame:ActiveIndicatorView.bounds];
ActiveIndicatorView.backgroundColor = [UIColor redColor];
[ActiveIndicatorView addSubview:preview];
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024;                        //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME

if ([CaptureSession canAddOutput:MovieFileOutput])
    [CaptureSession addOutput:MovieFileOutput];

//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties];           //(We call a method as it also has to be done after changing camera)

//NSLog(@"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPreset640x480];
if ([CaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {   //Check size based configs are supported before setting them
    [CaptureSession setSessionPreset:AVCaptureSessionPreset640x480];


}

if ([VideoDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus] && [VideoDevice lockForConfiguration:&error]){
    [VideoDevice setFocusMode:AVCaptureFocusModeAutoFocus];
    if ([VideoDevice isFocusPointOfInterestSupported])
        [VideoDevice setFocusPointOfInterest:CGPointMake(0.5f,0.5f)];
    [VideoDevice unlockForConfiguration];
}

[CaptureSession startRunning];
}

- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];

//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported])
{
    AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;     //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
    [CaptureConnection setVideoOrientation:orientation];

}

//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);

if (CaptureConnection.supportsVideoMinFrameDuration)
    CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
    CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);

CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
 }

  - (void) CameraSetOutputPropertiesBack
  {
//SET THE CONNECTION PROPERTIES (output properties)
CaptureConnectionBack = [MovieFileOutputBack connectionWithMediaType:AVMediaTypeVideo];

//Set landscape (if required)
if ([CaptureConnectionBack isVideoOrientationSupported])
{
    AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;     //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
    [CaptureConnectionBack setVideoOrientation:orientation];

}

//Set frame rate (if requried)
CMTimeShow(CaptureConnectionBack.videoMinFrameDuration);
CMTimeShow(CaptureConnectionBack.videoMaxFrameDuration);

if (CaptureConnectionBack.supportsVideoMinFrameDuration)
    CaptureConnectionBack.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnectionBack.supportsVideoMaxFrameDuration)
    CaptureConnectionBack.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);

CMTimeShow(CaptureConnectionBack.videoMinFrameDuration);
CMTimeShow(CaptureConnectionBack.videoMaxFrameDuration);
 }
 - (AVCaptureDevice *)frontCamera {
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
    if ([device position] == AVCaptureDevicePositionFront) {
        return device;
    }
}
return nil;
  }

1 个答案:

答案 0 :(得分:0)

目前无法同时使用前置和后置摄像头。