我有一段持续5分钟的视频现在我想在该视频中添加文字,比如5到15秒。
任何人都可以帮助我。我尝试了下面的代码来添加文本到图像
CATextLayer *subtitle1Text = [[CATextLayer alloc] init];
[subtitle1Text setFont:@"Helvetica-Bold"];
[subtitle1Text setFontSize:36];
[subtitle1Text setFrame:CGRectMake(0, 0, size.width, 100)];
[subtitle1Text setString:_subTitle1.text];
[subtitle1Text setAlignmentMode:kCAAlignmentCenter];
[subtitle1Text setForegroundColor:[[UIColor whiteColor] CGColor]];
// 2 - The usual overlay
CALayer *overlayLayer = [CALayer layer];
[overlayLayer addSublayer:subtitle1Text];
overlayLayer.frame = CGRectMake(0, 0, size.width, size.height);
[overlayLayer setMasksToBounds:YES];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
一个人可以帮助我如何为视频做这件事
答案 0 :(得分:0)
让我们尝试以下方法使用AVFoundation.framework
逐帧编辑视频,
-(void)startEditingVideoAtURL:(NSURL *)resourceURL
{
NSError * error = nil;
// Temp File path to write the edited movie
NSURL * movieURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@%@", NSTemporaryDirectory(), resourceURL.lastPathComponent]];
NSFileManager * fm = [NSFileManager defaultManager];
NSError * error;
BOOL success = [fm removeItemAtPath:movieURL.path error:&error];
// Create AVAssetWriter to convert the images into movie
AVAssetWriter * videoWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
AVAsset * avAsset = [[AVURLAsset alloc] initWithURL:resourceURL options:nil];
// Set your out put video frame here
NSDictionary * videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:480.0], AVVideoWidthKey,
[NSNumber numberWithInt:480.0], AVVideoHeightKey,
nil];
// Create AVAssetWriterInput with video Settings
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSDictionary * sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
[NSNumber numberWithInt:480.0], kCVPixelBufferWidthKey,
[NSNumber numberWithInt:480.0], kCVPixelBufferHeightKey,
nil];
AVAssetWriterInputPixelBufferAdaptor * assetWriterPixelBufferAdaptor1 = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
NSError *aerror = nil;
// Create AVAssetReader to read the video files frame by frame
AVAssetReader * reader = [[AVAssetReader alloc] initWithAsset:avAsset error:&aerror];
NSArray * vTracks = [avAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetReaderTrackOutput * asset_reader_output = nil;
if (vTracks.count)
{
AVAssetTrack * videoTrack = [vTracks objectAtIndex:0];
videoWriterInput.transform = videoTrack.preferredTransform;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions];
[reader addOutput:asset_reader_output];
}
else
{
[[NSNotificationCenter defaultCenter] postNotificationName:EDITOR_FAILED_NOTI object:resourceURL.path];
[reader cancelReading];
queueInProgress = NO;
[self removeCurrentVideo];
return;
}
//audio setup
AVAssetWriterInput * audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
AVAssetReader * audioReader = [AVAssetReader assetReaderWithAsset:avAsset error:&error];
NSArray * aTrack = [avAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetReaderOutput * readerOutput = nil;
if (aTrack.count)
{
AVAssetTrack * audioTrack = [aTrack objectAtIndex:0];
readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
}
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
[reader startReading]; // Here the video reader starts the reading
dispatch_queue_t _processingQueue = dispatch_queue_create("assetAudioWriterQueue", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
^{
while ([videoWriterInput isReadyForMoreMediaData])
{
CMSampleBufferRef sampleBuffer;
if ([reader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [asset_reader_output copyNextSampleBuffer]))
{
CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage * _ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
CVPixelBufferLockBaseAddress(imageBuffer,0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
void * baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer);
CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, baseAddress, bufferSize, NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGDataProviderRelease(dataProvider);
// Add your text drawing code here
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
CGColorSpaceRelease(rgbColorSpace);
BOOL result = [assetWriterPixelBufferAdaptor1 appendPixelBuffer:imageBuffer withPresentationTime:currentTime];
CVPixelBufferRelease(imageBuffer);
CFRelease(sampleBuffer);
if (!result)
{
dispatch_async(dispatch_get_main_queue(), ^
{
[[NSNotificationCenter defaultCenter] postNotificationName:EDITOR_FAILED_NOTI object:resourceURL.path];
[reader cancelReading];
queueInProgress = NO;
[self removeCurrentVideo];
});
break;
}
}
else
{
[videoWriterInput markAsFinished]; // This will called, once video write done
switch ([reader status])
{
case AVAssetReaderStatusReading:
// the reader has more for other tracks, even if this one is done
break;
case AVAssetReaderStatusCompleted:
{
if (!readerOutput)
{
if ([videoWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)])
[videoWriter finishWritingWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^
{
});
}];
else
{
if ([videoWriter finishWriting])
{
dispatch_async(dispatch_get_main_queue(), ^
{
});
}
}
break;
}
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef nextBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(nextBuffer = [readerOutput copyNextSampleBuffer]))
{
if (nextBuffer)
{
[audioWriterInput appendSampleBuffer:nextBuffer];
}
CFRelease(nextBuffer);
}else
{
[audioWriterInput markAsFinished];
switch ([audioReader status])
{
case AVAssetReaderStatusCompleted:
if ([videoWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)])
[videoWriter finishWritingWithCompletionHandler:^
{
}];
else
{
if ([videoWriter finishWriting])
{
}
}
break;
}
}
}
}
];
break;
}
// your method for when the conversion is done
// should call finishWriting on the writer
//hook up audio track
case AVAssetReaderStatusFailed:
{
break;
}
}
break;
}
}
}
];
}
谢谢!