处理视频文件和视频流的正确方法

时间:2013-02-07 20:09:44

标签: objective-c macos video-processing nsoperation qtkit

//更新

我的问题发现了一个小改进: 我开始转换小块视频,而它录制的线程非常低。 停止播放后,我设置了中等修道院,没有中断UI线程。 所以在那之后用户应该等待~0.3 * O(t),其中t是视频持续时间。

此外,我找到了一个优雅的解决方案,但很难实现。 https://github.com/WIZARDISHUNGRY/qctv 它应该在录制时渲染电影。

//更新

我在视频转换方面遇到了一些问题。 我的程序的目标是实时应用几个过滤器来自网络摄像头的视频,例如棕褐色过滤器和翻转。

现在我正在使用后期视频处理。

这是一个问题:

有没有办法在录制视频文件之前修改视频帧? 或者从相机更改输入视频?就像从CoreImage框架到CIImage应用CIFilter CIColorCube一样。

我正试图将原始视频分成小的和平,持续时间为1-2秒。 然后用本文下写的方法处理。

我确信有一种方法可以在网络摄像头的出口处叠加过滤器,但我找不到它,因为大量的高级框架不允许这样做。

我有一些要求:我需要从10.6支持Mac OS X,并且我不能使用超过5 MB大小的框架或库,因为处理是客户端的。

电影转换的NSOperation课程

#import <Foundation/Foundation.h>
#import <Cocoa/Cocoa.h>
#import <QuartzCore/QuartzCore.h>
#import <QTKit/QTkit.h>
#import <Quartz/Quartz.h>

@protocol ConvertOperationProtocol <NSObject>
-(void)movieWithNumberReady:(NSNumber*)number;
@end
@interface MSMovieConvertOperation : NSOperation
{
    QTMovie* movieToRead;
    QTMovie* movieToWrite;
}
@property int numberOfMovie;
@property NSArray* filtersArray;
@property NSObject<ConvertOperationProtocol>* ConvertOperationDelegate;
@end





#import "MSMovieConvertOperation.h"

@implementation MSMovieConvertOperation

@synthesize numberOfMovie,filtersArray;

//main operation
-(void)main
{
    NSString* movToReadPath = [NSString stringWithFormat:@"/Users/Shared/ms%d.mov",numberOfMovie];
    NSString* movToWritePath = [NSString stringWithFormat:@"/Users/Shared/pms%d",numberOfMovie];


    movieToRead = [QTMovie movieWithFile:movToReadPath error:NULL];
    movieToWrite = [[QTMovie alloc] initToWritableFile:movToWritePath error:NULL];

    while(QTTimeCompare([movieToRead currentTime], [movieToRead duration]) != NSOrderedSame)
    {
        QTTime time = movieToRead.currentTime;
         /////////////////////From here i haven't got any image/ciimage releases
         NSImage* sourceImage = [movieToRead frameImageAtTime:time];

        //Apply filters
        CIImage* inputImage = [CIImage imageWithData:[sourceImage
                                                      TIFFRepresentation]];



        inputImage = [inputImage imageByApplyingTransform:CGAffineTransformMakeScale(-1, 1)];
        if (!(filtersArray == nil || filtersArray.count < 1))
        {
            for (int i = 0; i < filtersArray.count; ++i)
            {
                CIFilter* filt = [filtersArray[i] copy];
                [filt setValue:inputImage forKey:@"inputImage"];
                inputImage = [filt valueForKey:@"outputImage"];
                filt = nil;
            }
        }

        NSRect outputImageRect = NSRectFromCGRect([inputImage extent]);
        [sourceImage lockFocus];
        [inputImage drawAtPoint:NSZeroPoint fromRect:outputImageRect
                      operation:NSCompositeCopy fraction:1.0];
        [sourceImage unlockFocus];
        //////////////////////////////////till there
        //Apply filters
        //jpeg
        time.timeValue = 2000;
        [movieToWrite addImage:sourceImage forDuration:time withAttributes:[NSDictionary dictionaryWithObjectsAndKeys:
                                                                              @"avc1", QTAddImageCodecType, nil]];
        [movieToWrite setCurrentTime:[movieToWrite duration]];
        // I do some stuff with some things
        // Which seems to work fine
        [movieToRead stepForward];
        sourceImage = nil;
        inputImage = nil;

    }

    NSArray *allTracks = [movieToRead tracks];
    NSInteger totalFrames = 0;
    QTTrack* trackToWrite;
    QTTimeRange videoRange = QTMakeTimeRange(QTZeroTime, [movieToWrite duration]);
    for(QTTrack *track in allTracks)
    {
        QTMedia *media = [track media];

        // Checks to make sure the quicktime media being inputted has a video track.

        if (([[media attributeForKey:QTMediaTypeAttribute] isEqualToString:QTMediaTypeSound]))
        {
            trackToWrite = track;
        }

        if([[media attributeForKey:QTMediaTypeAttribute] isEqualToString:QTMediaTypeVideo])
        {
            NSNumber *samples = [media attributeForKey:QTMediaSampleCountAttribute];
            totalFrames = [samples integerValue];
//            NSLog(@"Total Frames: %ld", totalFrames);
//            NSLog(@"Frame Rate:  %@", [track attributeForKey:QTTrackTimeScaleAttribute]);
        }
    }

    [movieToWrite insertSegmentOfTrack:trackToWrite timeRange:videoRange atTime:QTZeroTime];

    // write the QTMovie to a 3GPP movie file on disk
    NSString* pathToWrite = [NSString stringWithFormat:@"/Users/Shared/pms%d.mov",numberOfMovie];

    [movieToWrite writeToFile:[[NSURL fileURLWithPath:pathToWrite] path] withAttributes:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:QTMovieFlatten] error:nil];
    //movieToWrite = nil;
    movieToRead = nil;
    movieToWrite = nil;
    [self.ConvertOperationDelegate performSelectorOnMainThread:@selector(movieWithNumberReady:) withObject:[NSNumber numberWithInt:numberOfMovie] waitUntilDone:NO];

    //NSLog(@"Video #%d Recorded!",numberOfMovie);
}

@end

以下是将小型电影合并为大型电影并从NSOperation课程回调的一些代码

//looking for progress
-(void)movieWithNumberReady:(NSNumber *)number
{
    [statusOfFilesConverting replaceObjectAtIndex:number.intValue withObject:@1];
    BOOL readyToMerge = YES;
    for (int i = 0; i < statusOfFilesConverting.count; ++i)
    {
        if ([statusOfFilesConverting[i] intValue] == 0)
        {
            readyToMerge = NO;
            break;
        }
    }
    if (readyToMerge)
    {
        [self startMergeSmallMoviesIntoLargeOne];
    }
}


//Start to convert
-(IBAction)convertVideoWithNSOperationQueue:(id)sender
{
    [self clearFiles]; //deleting all work copies of video files
    timeStart = [NSDate date];
    statusOfFilesConverting = [[NSMutableArray alloc] init];
    for (int i = 0; i < _amountOfMovies; ++i) // calculated in advance the number of files
        [statusOfFilesConverting addObject:@0];

    NSOperationQueue* queue = [NSOperationQueue new];
    [queue setMaxConcurrentOperationCount:2];
    for (int i = 0; i < _amountOfMovies; ++i)
    {
        MSMovieConvertOperation* op = [MSMovieConvertOperation new];
        op.ConvertOperationDelegate = self;
        op.numberOfMovie = i;
        op.filtersArray = [_movieDelegate getFiltersArray];
        [queue addOperation:op];
    }
}

//merging
-(void)startMergeSmallMoviesIntoLargeOne
{
    NSLog(@"Start Merging Small movies");
    NSError *err = nil;

    QTMovie *myCombinedMovie = [[QTMovie alloc] initToWritableData:[NSMutableData data] error:&err];
    if (err)
    {
        NSLog(@"Error creating myCombinedMovie: %@", [err localizedDescription]);
        return;
    }

    NSMutableArray* _myMovieURLs = [[NSMutableArray alloc] init];
    for (int i = 0; i < _amountOfMovies; ++i)
    {
        NSURL* url = [NSURL fileURLWithPath:[NSString stringWithFormat:@"/Users/Shared/pms%d.mov",i]];
        [_myMovieURLs addObject:url];
    }

    for (NSURL *url in _myMovieURLs)
    {
        QTMovie *theMovie = [QTMovie movieWithURL:url error:&err];
        if (err){
            NSLog(@"Error loading one of the movies: %@", [err localizedDescription]);
            return;
        }
        QTTimeRange timeRange = QTMakeTimeRange(QTZeroTime, [theMovie duration]);
        QTTime insertionTime = [myCombinedMovie duration];
        [myCombinedMovie insertSegmentOfMovie:theMovie timeRange:timeRange atTime:insertionTime];
    }

    NSDictionary *writeAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES], QTMovieFlatten, nil];
    bool success = [myCombinedMovie writeToFile:@"/Users/Shared/outputMovie.mp4" withAttributes:writeAttributes error:&err];
    if (!success)
    {
        NSLog(@"Error writing movie: %@", [err localizedDescription]);
        return;
    }
    else
    {
        NSLog(@"Video end processing!");
        timeFinish = [NSDate date];
        [self multiThreadRecordingFinish];
    }
}

提前致谢!

0 个答案:

没有答案