在ipad中进行屏幕录制

时间:2012-06-19 06:31:20

标签: objective-c ipad ios5 avassetwriter

我在过去两周遇到了问题。其实我正在开发一款iPad应用程序。我想在其中进行注释,并对该注释进行屏幕录制。注释部分工作正常,但是当我开始录制时会出现问题。问题是它在屏幕录制过程中失去了它的平滑性和滞后性。对于屏幕录制我使用的是AVAsset Writer。代码适用于注释和屏幕录制....但我不知道问题在哪里?

我的ScreenShot尺寸为(1050,650)

我应该使用Grand Central Dispatch来解决这个问题吗? 任何人都可以帮我解决我的问题.....

Plz PLZ帮助我......

我的代码

// For Annotation

        - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event 
   {
       mouseSwiped = NO;
    UITouch *touch = [touches anyObject];

    if ([touch tapCount] == 2) 
    {
        drawImage.image = nil;  //Double click to undo drawing.
        return;
    }

    lastPoint = [touch locationInView:self.view];
    lastPoint.y -= 20;

    }
    - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event 
    {

    mouseSwiped = YES;
    UITouch *touch = [touches anyObject];
    CGPoint currentPoint = [touch locationInView:self.view];
    currentPoint.y -= 20;
//    UIGraphicsBeginImageContext(canvasView.frame.size);
UIGraphicsBeginImageContext(drawImage.frame.size);
    [drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)];
    CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
    CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 10.0);
    CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
    CGContextBeginPath(UIGraphicsGetCurrentContext());
    CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
    CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, currentPoint.y);
    CGContextStrokePath(UIGraphicsGetCurrentContext());
    drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();

    lastPoint = currentPoint;

    }
    - (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event 
    {

    UITouch *touch = [touches anyObject];
    if ([touch tapCount] == 2) 
    {
        drawImage.image = nil;
        return;
    }


    if(!mouseSwiped) 
    {
        UIGraphicsBeginImageContext(drawImage.frame.size);
        [drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)];
        CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound);
        CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0);
        CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0);
        CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
        CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y);
        CGContextStrokePath(UIGraphicsGetCurrentContext());
        CGContextFlush(UIGraphicsGetCurrentContext());
        drawImage.image = UIGraphicsGetImageFromCurrentImageContext();
        UIGraphicsEndImageContext();
    }

   }



    //For Screen Recording

    #define FRAME_WIDTH 1024            
    #define FRAME_HEIGHT 650
    #define TIME_SCALE 600

    - (UIImage*)screenshot
   {

    UIGraphicsBeginImageContext(drawImage.frame.size);
[self.view.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *viewImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
    return viewImage;
   }


    -(NSURL*) pathToDocumentsDirectory {


NSString* outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mov"];
outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager* fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath]) {
    NSError* error;
    if ([fileManager removeItemAtPath:outputPath error:&error] == NO) {
        NSLog(@"Could not delete old recording file at path:  %@", outputPath);
    }
}

[outputPath release];
return [outputURL autorelease];

   }


   -(void) writeSample: (NSTimer*) _timer 
   {


   if (assetWriterInput.readyForMoreMediaData) {
    // CMSampleBufferRef sample = nil;

    CVReturn cvErr = kCVReturnSuccess;

    // get screenshot image!
    CGImageRef image = (CGImageRef) [[self screenshot] CGImage];
    NSLog (@"made screenshot");

    // prepare the pixel buffer
    CVPixelBufferRef pixelBuffer = NULL;
    CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));
    NSLog (@"copied image data");
    cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                         FRAME_WIDTH,
                                         FRAME_HEIGHT,
                                         kCVPixelFormatType_32BGRA,
                                         (void*)CFDataGetBytePtr(imageData),
                                         CGImageGetBytesPerRow(image),
                                         NULL,
                                         NULL,
                                         NULL,
                                         &pixelBuffer);
    NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);

    // calculate the time
    CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
    CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;
    NSLog (@"elapsedTime: %f", elapsedTime);
    CMTime presentationTime =  CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE);

    // write the sample
    BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];

    if (appended) {
        NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
    } else {
        NSLog (@"failed to append");
        [self stopRecording];

    }
}



   }


    -(void) startRecording {


movieURL = [self pathToDocumentsDirectory];
NSLog(@"path=%@",movieURL);
movieError = nil;
[assetWriter release];

assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL 
                                        fileType: AVFileTypeQuickTimeMovie 
                                           error: &movieError];

[self writer];

// start writing samples to it

NSDate* start = [NSDate date];
frameRate=40.0f;                                                                                                                                                         
float processingSeconds = [[NSDate date] timeIntervalSinceDate:start];
delayRemaining = (1.0 / self.frameRate) - processingSeconds;
[assetWriterTimer release];

assetWriterTimer = [NSTimer scheduledTimerWithTimeInterval:delayRemaining > 0.0 ? delayRemaining : 0.01
                                                    target:self
                                                  selector:@selector (writeSample:)
                                                  userInfo:nil
                                                   repeats:YES] ;

    }


   -(void)writer
   {

NSDictionary *assetWriterInputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                          AVVideoCodecH264, AVVideoCodecKey,
                                          [NSNumber numberWithInt:FRAME_WIDTH], AVVideoWidthKey,
                                          [NSNumber numberWithInt:FRAME_HEIGHT], AVVideoHeightKey,
                                          nil];

assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo
                                                      outputSettings:assetWriterInputSettings];
assetWriterInput.expectsMediaDataInRealTime = YES;
[assetWriter addInput:assetWriterInput];

[assetWriterPixelBufferAdaptor release];
assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor  alloc]
                                 initWithAssetWriterInput:assetWriterInput
                                 sourcePixelBufferAttributes:nil];
[assetWriter startWriting];

firstFrameWallClockTime = CFAbsoluteTimeGetCurrent();
[assetWriter startSessionAtSourceTime: CMTimeMake(0, TIME_SCALE)];

   }


   -(void) stopRecording {


[assetWriterTimer invalidate];
assetWriterTimer = nil;

[assetWriter finishWriting];
NSLog (@"finished writing");

   }

1 个答案:

答案 0 :(得分:0)

最简单的方法是尝试降低帧速率。