我正在尝试调整iOS广播扩展功能来录制视频而不是直播。
这似乎是可能的,因为你可以在processSampleBuffer:withType:
方法中获得像素缓冲区。
然后我编码如下,但在附加缓冲区时失败。
我对AVAssetWriter和App Extension编程都不熟悉,所以我无法弄清楚这里有什么问题。
我是否正在做一些我们不应该在扩展中做的事情?或者AVAssetWriter的用法是错误的?
任何想法都有帮助。谢谢!
//
// SampleHandler.m
// The main class of the App Extension
//
#import "SampleHandler.h"
#import <AVFoundation/AVFoundation.h>
@implementation SampleHandler {
VideoExporter *exporter;
NSDate *startDate;
}
- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *,NSObject *> *)setupInfo {
// User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
[self setupWriter];
}
- (void)setupWriter {
NSFileManager *fm = [NSFileManager defaultManager];
// Set video path into shared container
NSURL *containerURL = [fm containerURLForSecurityApplicationGroupIdentifier:@"group.com.mycompany"];
NSURL *libraryURL = [containerURL URLByAppendingPathComponent:@"Library" isDirectory: true];
BOOL isDir = false;
NSURL *cachesURL = [libraryURL URLByAppendingPathComponent:@"Caches" isDirectory: true];
NSURL *outVideoURL = [cachesURL URLByAppendingPathComponent:@"output.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:[outVideoURL path]]){
[[NSFileManager defaultManager] removeItemAtPath:[outVideoURL path] error:nil];
}
exporter = [[VideoExporter alloc] initWithOutputURL:outVideoURL size:CGSizeMake(1280, 720) frameRate:30];
exporter.delegate = self;
[exporter beginExport];
startDate = [NSDate date];
}
- (void)broadcastPaused {
// User has requested to pause the broadcast. Samples will stop being delivered.
}
- (void)broadcastResumed {
// User has requested to resume the broadcast. Samples delivery will resume.
}
- (void)broadcastFinished {
// User has requested to finish the broadcast.
NSLog(@"User requested finish writing");
[exporter finishWriting];
}
- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType {
switch (sampleBufferType) {
case RPSampleBufferTypeVideo:
// Handle video sample buffer
[exporter addCMSampleBuffer:sampleBuffer];
break;
case RPSampleBufferTypeAudioApp:
// Handle audio sample buffer for app audio
break;
case RPSampleBufferTypeAudioMic:
// Handle audio sample buffer for mic audio
break;
default:
break;
}
}
@end
//
// VideoExporter.m
// Helper class to write the video
//
#import "VideoExporter.h"
@implementation VideoExporter
@synthesize width, height;
@synthesize framesPerSecond;
@synthesize outputURL;
@synthesize delegate;
- (id)initWithOutputURL:(NSURL *)aURL size:(CGSize)size frameRate:(uint64_t)fps {
if ((self = [super init])) {
width = (int)round(size.width);
height = (int)round(size.height);
framesPerSecond = fps;
outputURL = aURL;
}
return self;
}
- (void)beginExport {
NSError *error = nil;
writer = [[AVAssetWriter alloc] initWithURL:outputURL
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSAssert(writer != nil, @"Writer should not be nil");
NSDictionary * outSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecTypeH264, AVVideoCodecKey,
[NSNumber numberWithInt:width], AVVideoWidthKey,
[NSNumber numberWithInt:height], AVVideoHeightKey, nil];
writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:outSettings];
NSDictionary * pixelAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil];
adaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:pixelAttributes];
[writer addInput:writerInput];
BOOL started = [writer startWriting];
NSAssert(started, @"Should start writing!");
[writer startSessionAtSourceTime:kCMTimeZero];
}
- (void)addCMSampleBuffer:(CMSampleBufferRef)buf {
if(writer.status==0) {
NSLog(@"Writer status unknown!!");
}
[self appendCMSampleBuffer:buf];
}
- (void)finishWriting {
[writerInput markAsFinished];
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
int64_t longDuration = 1000;
CMTime cmTime = CMTimeMake(longDuration, 1);
[writer endSessionAtSourceTime:cmTime];
[writer finishWritingWithCompletionHandler:^{
// Call delegate method here
dispatch_semaphore_signal(semaphore);
}];
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
}
#pragma mark - Private -
- (void)appendCMSampleBuffer:(CMSampleBufferRef)bufferRef {
if(![writerInput isReadyForMoreMediaData]) {
NSLog(@"WriterInput not ready! status = %ld, error=%@", (long)writer.status, writer.error);
return;
}
BOOL success = [adaptor appendPixelBuffer:(CVPixelBufferRef)bufferRef withPresentationTime:CMTimeMake(frameCount++, (int32_t) framesPerSecond)];
if(success == NO) {
NSLog(@"Append buffer failed! status = %ld, error=%@", (long)writer.status, writer.error); // Always gets here
}
}
@end
答案 0 :(得分:0)
我在iOS 13.7(17H35)上测试了此行为
似乎AVAssetWriter
需要Foreground Privileges,而扩展名则没有。 (source)
startWriting
方法返回false
,而assetWriter.error
等于:
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={
NSLocalizedFailureReason=An unknown error occurred (-17508),
NSLocalizedDescription=The operation could not be completed,
NSUnderlyingError=0x282a80120 {
Error Domain=NSOSStatusErrorDomain Code=-17508 "(null)"
}
}
答案 1 :(得分:0)
iOS 14:从 BroadcastUpload 扩展 Example project with Package 使用 AVAssetWriter 时没问题