用Objective C编写的iOS相机应用程序在从锁定屏幕返回/解锁手机时会冻结其预览图层。
正在viewWillAppear
中调用所有相机配置设置。到目前为止,我已经成功了,除了唯一的问题,即从锁定屏幕返回时相机预览图层冻结或卡住。我的代码的相机部分如下所示。
非常感谢任何帮助。谢谢。 ps:请随意指出我的代码中的任何错误,因为我只是一个新手。
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
dispatch_async(dispatch_get_main_queue(), ^{
[self setGUIBasedOnMode];
});
}
-(void) setGUIBasedOnMode
{
if (![self isStreamStarted]) {
if (shutterActionMode == SnapCamSelectionModeLiveStream)
{
_flashButton.hidden = true;
_cameraButton.hidden = true;
_liveSteamSession = [[VCSimpleSession alloc] initWithVideoSize:[[UIScreen mainScreen]bounds].size frameRate:30 bitrate:1000000 useInterfaceOrientation:YES];
[_liveSteamSession.previewView removeFromSuperview];
AVCaptureVideoPreviewLayer *ptr;
[_liveSteamSession getCameraPreviewLayer:(&ptr)];
_liveSteamSession.previewView.frame = self.view.bounds;
_liveSteamSession.delegate = self;
}
else{
[_liveSteamSession.previewView removeFromSuperview];
_liveSteamSession.delegate = nil;
_cameraButton.hidden = false;
if(flashFlag == 0){
_flashButton.hidden = false;
}
else if(flashFlag == 1){
_flashButton.hidden = true;
}
self.session = [[AVCaptureSession alloc] init];
self.previewView.hidden = false;
self.previewView.session = self.session;
[self configureCameraSettings]; //All The Camera Configuration Settings.
dispatch_async( self.sessionQueue, ^{
switch ( self.setupResult )
{
case AVCamSetupResultSuccess:
{
[self addObservers];
[self.session startRunning];
self.sessionRunning = self.session.isRunning;
if(loadingCameraFlag == false){
[self hidingView];
}
break;
}
case AVCamSetupResultCameraNotAuthorized:
{
dispatch_async( dispatch_get_main_queue(), ^{
NSString *message = NSLocalizedString( @"MyApp doesn't have permission to use the camera, please change privacy settings", @"Alert message when the user has denied access to the camera");
UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
[alertController addAction:cancelAction];
UIAlertAction *settingsAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"Settings", @"Alert button to open Settings" ) style:UIAlertActionStyleDefault handler:^( UIAlertAction *action ) {
[[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]];
}];
[alertController addAction:settingsAction];
[self presentViewController:alertController animated:YES completion:nil];
} );
break;
}
case AVCamSetupResultSessionConfigurationFailed:
{
dispatch_async( dispatch_get_main_queue(), ^{
NSString *message = NSLocalizedString( @"Unable to capture media", @"Alert message when something goes wrong during capture session configuration" );
UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"MyApp" message:message preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
[alertController addAction:cancelAction];
[self presentViewController:alertController animated:YES completion:nil];
} );
break;
}
}
});
}
}
-(void)configureCameraSettings
{
self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL );
self.setupResult = AVCamSetupResultSuccess;
switch ( [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] )
{
case AVAuthorizationStatusAuthorized:
{
break;
}
case AVAuthorizationStatusNotDetermined:
{
dispatch_suspend( self.sessionQueue);
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^( BOOL granted ) {
if ( ! granted ) {
self.setupResult = AVCamSetupResultCameraNotAuthorized;
}
dispatch_resume( self.sessionQueue );
}];
break;
}
default:
{
self.setupResult = AVCamSetupResultCameraNotAuthorized;
break;
}
}
dispatch_async( self.sessionQueue, ^{
if ( self.setupResult != AVCamSetupResultSuccess ) {
return;
}
self.backgroundRecordingID = UIBackgroundTaskInvalid;
NSError *error = nil;
AVCaptureDevice *videoDevice = [IPhoneCameraViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
[self.session beginConfiguration];
if ( [self.session canAddInput:videoDeviceInput] ) {
[self.session addInput:videoDeviceInput];
self.videoDeviceInput = videoDeviceInput;
dispatch_async( dispatch_get_main_queue(), ^{
UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait;
if ( statusBarOrientation != UIInterfaceOrientationUnknown ) {
initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation;
}
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
if (shutterActionMode == SnapCamSelectionModeVideo)
{
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
if([self.session canSetSessionPreset:AVCaptureSessionPresetMedium]){
[self.session setSessionPreset:AVCaptureSessionPresetMedium];
}
}
previewLayer.connection.videoOrientation = initialVideoOrientation;
} );
}
else {
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
}
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if ( ! audioDeviceInput ) {
}
if ( [self.session canAddInput:audioDeviceInput] ) {
[self.session addInput:audioDeviceInput];
}
else {
}
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
Float64 TotalSeconds = 10*60;
int32_t preferredTimeScale = 30;
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); movieFileOutput.maxRecordedDuration = maxDuration;
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024 * 100;
if ( [self.session canAddOutput:movieFileOutput] ) {
[self.session addOutput:movieFileOutput];
AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ( connection.isVideoStabilizationSupported ) {
connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
self.movieFileOutput = movieFileOutput;
}
else {
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ( [self.session canAddOutput:stillImageOutput] ) {
stillImageOutput.outputSettings = @{AVVideoCodecKey : AVVideoCodecJPEG};
[self.session addOutput:stillImageOutput];
self.stillImageOutput = stillImageOutput;
}
else {
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
}
[self.session commitConfiguration];
});
}
答案 0 :(得分:1)
尝试观察UIApplicationDidEnterBackgroundNotification/UIApplicationWillEnterForegroundNotification,UIApplicationWillResignActiveNotification/UIApplicationDidBecomeActiveNotification
通知,以相应地停止/启动您的捕获会话
答案 1 :(得分:0)
执行ViewDidLoad
ViewWillAppear
和ViewDidAppear
方法应用生命周期之间存在很大差异。
创建UIViews或执行一些繁重的任务是相当昂贵的原因导致冻结,你应该尽可能避免在ViewWillAppear方法上这样做
看看: