我想将一些带有自定义创建日期的视频导出到mov文件中。问题是,我设法只更改存储在文件中的日期之一。这是创建视频的ffmpeg输出:
Metadata:
major_brand : qt
minor_version : 0
compatible_brands: qt
creation_time : 2015-02-04 13:04:41
comment : EASTMAN KODAK COMPANY KODAK CX7220 DIGITAL CAMERA
comment-eng : EASTMAN KODAK COMPANY KODAK CX7220 DIGITAL CAMERA
encoder : Lavf55.12.100
date : 2004-01-01T00:00:00+0100
Duration: 00:00:26.20, start: 0.092880, bitrate: 1506 kb/s
Stream #0:0(eng): Video: h264 (High) (avc1 / 0x31637661), yuvj420p, 320x240, 1430 kb/s, 14.92 fps, 15 tbr, 15360 tbn, 30 tbc
Metadata:
creation_time : 2015-02-04 13:04:41
handler_name : Core Media Data Handler
Stream #0:1(eng): Audio: aac (mp4a / 0x6134706D), 11025 Hz, mono, fltp, 66 kb/s
Metadata:
creation_time : 2015-02-04 13:04:41
handler_name : Core Media Data Handler
仅限"日期"属性是正确的,我想设置相同的所有其他日期:creation_time和所有流中的这些日期。
这或多或少是我目前使用的代码:
AVAsset *asset = [AVAsset assetWithURL:fileURL]; // Asset przetwarzanego pliku wideo (Input)
// Zbierz informacje o pliku
NSString *fileDateString = [fileURL.lastPathComponent substringToIndex:19]; // Data wynikająca z nazwy pliku (NSString)
NSDate *suggestedDate = [dateFormatter1 dateFromString:fileDateString]; // Data wynikająca z nazwy pliku (NSDate)
NSString *oldLocationString = nil; // Lokalizacja GPS zapisana w pliku
NSDate *oldCreationDate = nil; // Data utworzenia zapisana w pliku
NSDate *oldModificationDate = nil; // Data zmian zapisana w pliku
// Wyciągnij informacje zapisane w pliku
NSArray *metadata = asset.metadata; // Originalne informacje zapisane w pliku
NSMutableArray *newMetadata = [metadata mutableCopy]; // Nowe zmodyfikowane informacje
// Znajdź w oryginalnych informacjach wzmianki o datach i lokalizacji i usuń je z nowych informacji
for (AVMutableMetadataItem *metadataItem in metadata) {
NSString *commonKey = metadataItem.commonKey;
NSString *stringValue = metadataItem.stringValue;
NSDate *dateValue = metadataItem.dateValue;
if ([commonKey isEqualToString:AVMetadataCommonKeyLocation] && stringValue) {
oldLocationString = stringValue;
[newMetadata removeObject:metadataItem];
}
else if ([commonKey isEqualToString:AVMetadataCommonKeyCreationDate] && dateValue) {
oldCreationDate = dateValue;
[newMetadata removeObject:metadataItem];
}
else if ([commonKey isEqualToString:AVMetadataCommonKeyLastModifiedDate] && dateValue) {
oldModificationDate = dateValue;
[newMetadata removeObject:metadataItem];
}
}
// Wyznacz poprawną datę utworzenia na podstawie nazwy pliku lub dat zapisanych w oryginalnych informacjach
NSDate *selectedDate = nil;
if (suggestedDate) {
selectedDate = suggestedDate;
}
else if (oldCreationDate) {
selectedDate = oldCreationDate;
}
else if (oldModificationDate) {
selectedDate = oldModificationDate;
}
// Wyznacz poprawną lokalizację na podstawie danych zapisanych w oryginalnych informacjach
NSString *selectedLocation = nil;
if (oldLocationString) {
selectedLocation = oldLocationString;
}
// Dodaj do nowych informacji wyznaczoną datę utworzenia
if (selectedDate) {
NSString *newDataString = [dateFormatter5 stringFromDate:selectedDate];
AVMutableMetadataItem *metaItem = [AVMutableMetadataItem metadataItem];
metaItem.key = AVMetadataCommonKeyCreationDate;
metaItem.keySpace = AVMetadataKeySpaceCommon;
metaItem.value = newDataString;
[newMetadata addObject:metaItem];
}
// Dodaj do nowych informacji wyznaczoną lokalizację
if (oldLocationString) {
AVMutableMetadataItem *metaItem = [AVMutableMetadataItem metadataItem];
metaItem.key = AVMetadataCommonKeyLocation;
metaItem.keySpace = AVMetadataKeySpaceCommon;
metaItem.value = oldLocationString;
[newMetadata addObject:metaItem];
}
// Ustal nową ścieżkę pliku wyjściowego
NSString *dateString1 = [dateFormatter1 stringFromDate:selectedDate];
NSString *fileExt = fileURL.pathExtension;
NSString *newFileName = [NSString stringWithFormat:@"%@ (%06lu)", dateString1, (unsigned long)index];
NSString *newFilePath = [newLibraryPath stringByAppendingPathComponent:[fileURL.path substringFromIndex:libraryURL.path.length]];
newFilePath = [[[newFilePath substringToIndex:newFilePath.length - newFilePath.lastPathComponent.length] stringByAppendingPathComponent:newFileName] stringByAppendingPathExtension:fileExt];
NSString *newFileDir = [newFilePath substringToIndex:newFilePath.length - newFilePath.lastPathComponent.length];
// Utwórz wymagane katalogi
BOOL isDirectory;
if (![fileManager fileExistsAtPath:newFileDir isDirectory:&isDirectory]) {
[fileManager createDirectoryAtPath:newFileDir withIntermediateDirectories:YES attributes:nil error:nil];
}
// Wyeksportuj wideo z nowymi informacjami
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:asset presetName:AVAssetExportPresetPassthrough];
exportSession.metadata = newMetadata;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputURL = [NSURL fileURLWithPath:newFilePath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
dispatch_semaphore_t sema = dispatch_semaphore_create(0);
[exportSession exportAsynchronouslyWithCompletionHandler:^{
dispatch_semaphore_signal(sema);
}];
dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER);
或者不同的代码,基本上做同样的事情,但可以访问所有流(轨道),也许有人可以帮助那个
// Creating the Asset Reader
NSError *outError;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:asset error:&outError];
BOOL success = (assetReader != nil);
assert(success);
// Creating the Asset Writer
NSURL *outputURL = [NSURL fileURLWithPath:newFilePath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&outError];
success = (assetWriter != nil);
assert(success);
// Setting Up the Asset Reader Outputs
NSMutableArray *outputObjects = [NSMutableArray arrayWithCapacity:asset.tracks.count];
for (AVAssetTrack *videoTrack in [asset tracksWithMediaType:AVMediaTypeVideo]) {
AVAssetReaderOutput *videoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:nil];
if ([assetReader canAddOutput:videoTrackOutput]) {
[assetReader addOutput:videoTrackOutput];
}
AVAssetWriterInput *assetVideoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:nil];
if ([assetWriter canAddInput:assetVideoWriterInput]) {
[assetWriter addInput:assetVideoWriterInput];
}
AVAssetTrack *videoAssetTrack = videoTrack;
assetVideoWriterInput.transform = videoAssetTrack.preferredTransform;
[outputObjects addObject:@{@"Output" : videoTrackOutput, @"Input" : assetVideoWriterInput}];
}
for (AVAssetTrack *audioTrack in [asset tracksWithMediaType:AVMediaTypeAudio]) {
AVAssetReaderOutput *audioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
if ([assetReader canAddOutput:audioTrackOutput]) {
[assetReader addOutput:audioTrackOutput];
}
AVAssetWriterInput *assetAudioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil];
if ([assetWriter canAddInput:assetAudioWriterInput]) {
[assetWriter addInput:assetAudioWriterInput];
}
AVAssetTrack *videoAssetTrack = audioTrack;
assetAudioWriterInput.transform = videoAssetTrack.preferredTransform;
[outputObjects addObject:@{@"Output" : audioTrackOutput, @"Input" : assetAudioWriterInput}];
}
// Setup metadata
assetWriter.metadata = newMetadata;
// Writing Media Data
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];
[assetReader startReading];
dispatch_semaphore_t sema1 = dispatch_semaphore_create(0);
__block NSUInteger numberOfOutputs = outputObjects.count;
for (NSDictionary *outputObjectDictionary in outputObjects) {
AVAssetWriterInput *assetWriterInput = outputObjectDictionary[@"Input"];
AVAssetReaderOutput *assetReaderOutput = outputObjectDictionary[@"Output"];
[assetWriterInput requestMediaDataWhenReadyOnQueue:dispatch_queue_create(nil, nil) usingBlock:^{
while ([assetWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef nextSampleBuffer = [assetReaderOutput copyNextSampleBuffer];
if (nextSampleBuffer) {
[assetWriterInput appendSampleBuffer:nextSampleBuffer];
CFRelease(nextSampleBuffer);
nextSampleBuffer = nil;
}
else {
[assetWriterInput markAsFinished];
if (--numberOfOutputs == 0) {
dispatch_semaphore_signal(sema1);
}
break;
}
}
}];
}
dispatch_semaphore_wait(sema1, DISPATCH_TIME_FOREVER);
dispatch_semaphore_t sema2 = dispatch_semaphore_create(0);
[assetWriter finishWritingWithCompletionHandler:^{
NSLog(@"DONE");
dispatch_semaphore_signal(sema2);
}];
dispatch_semaphore_wait(sema2, DISPATCH_TIME_FOREVER);
请帮忙