我正在Swift3 iOS中开发视频应用程序。基本上,我必须使用“淡入淡出效果”将视频资产和音频合并为一个,并将其保存到iPhone画廊中。为此,我使用以下方法:
private func doMerge(arrayVideos:[AVAsset], arrayAudios:[AVAsset], animation:Bool, completion:@escaping Completion) -> Void {
var insertTime = kCMTimeZero
var audioInsertTime = kCMTimeZero
var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = []
var outputSize = CGSize.init(width: 0, height: 0)
// Determine video output size
for videoAsset in arrayVideos {
let videoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let assetInfo = orientationFromTransform(transform: videoTrack.preferredTransform)
var videoSize = videoTrack.naturalSize
if assetInfo.isPortrait == true {
videoSize.width = videoTrack.naturalSize.height
videoSize.height = videoTrack.naturalSize.width
}
outputSize = videoSize
}
// Init composition
let mixComposition = AVMutableComposition.init()
for index in 0..<arrayVideos.count {
// Get video track
guard let videoTrack = arrayVideos[index].tracks(withMediaType: AVMediaTypeVideo).first else { continue }
// Get audio track
var audioTrack:AVAssetTrack?
if index < arrayAudios.count {
if arrayAudios[index].tracks(withMediaType: AVMediaTypeAudio).count > 0 {
audioTrack = arrayAudios[index].tracks(withMediaType: AVMediaTypeAudio).first
}
}
// Init video & audio composition track
let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
do {
let startTime = kCMTimeZero
let duration = arrayVideos[index].duration
// Add video track to video composition at specific time
try videoCompositionTrack.insertTimeRange(CMTimeRangeMake(startTime, duration), of: videoTrack, at: insertTime)
// Add audio track to audio composition at specific time
var audioDuration = kCMTimeZero
if index < arrayAudios.count {
audioDuration = arrayAudios[index].duration
}
if let audioTrack = audioTrack {
do {
try audioCompositionTrack.insertTimeRange(CMTimeRangeMake(startTime, audioDuration), of: audioTrack, at: audioInsertTime)
}
catch {
print(error.localizedDescription)
}
}
// Add instruction for video track
let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack, asset: arrayVideos[index], standardSize: outputSize, atTime: insertTime)
// Hide video track before changing to new track
let endTime = CMTimeAdd(insertTime, duration)
if animation {
let timeScale = arrayVideos[index].duration.timescale
let durationAnimation = CMTime.init(seconds: 1, preferredTimescale: timeScale)
layerInstruction.setOpacityRamp (fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: CMTimeRange.init(start: endTime, duration: durationAnimation))
}
else {
layerInstruction.setOpacity(0, at: endTime)
}
arrayLayerInstructions.append(layerInstruction)
// Increase the insert time
audioInsertTime = CMTimeAdd(audioInsertTime, audioDuration)
insertTime = CMTimeAdd(insertTime, duration)
}
catch {
print("Load track error")
}
}
// Main video composition instruction
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, insertTime)
mainInstruction.layerInstructions = arrayLayerInstructions
// Main video composition
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [mainInstruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = outputSize
// Export to file
let path = NSTemporaryDirectory().appending("mergedVideo.mp4")
let exportURL = URL.init(fileURLWithPath: path)
// Remove file if existed
FileManager.default.removeItemIfExisted(exportURL)
// Init exporter
let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = exportURL
exporter?.outputFileType = AVFileTypeQuickTimeMovie//AVFileType.mp4
exporter?.shouldOptimizeForNetworkUse = false //true
exporter?.videoComposition = mainComposition
// Do export
exporter?.exportAsynchronously(completionHandler: {
DispatchQueue.main.async {
self.exportDidFinish(exporter: exporter, videoURL: exportURL, completion: completion)
}
})
}
fileprivate func exportDidFinish(exporter:AVAssetExportSession?, videoURL:URL, completion:@escaping Completion) -> Void {
if exporter?.status == AVAssetExportSessionStatus.completed {
print("Exported file: \(videoURL.absoluteString)")
completion(videoURL,nil)
}
else if exporter?.status == AVAssetExportSessionStatus.failed {
completion(videoURL,exporter?.error)
print(exporter?.error as Any)
}
}
问题:在我的exportDidFinish方法中,AVAssetExportSessionStatus失败,并显示以下错误消息:
Error Domain = AVFoundationErrorDomain代码= -11800“该操作可能 未完成” UserInfo = {NSLocalizedFailureReason =未知错误 发生(-16976),NSLocalizedDescription =无法执行该操作 已完成,NSUnderlyingError = 0x1c065fb30 {错误 Domain = NSOSStatusErrorDomain Code = -16976“(null)”}}
有人可以建议我吗?
答案 0 :(得分:1)
我有完全相同的错误,并且只有在运行iOS11的iPhone 5S模拟器上。我通过将导出操作的质量设置从“最高”(AVAssetExportPresetHighestQuality)更改为“通过”(AVAssetExportPresetPassthrough)(保持原始质量)来解决此问题:
/// try to start an export session and set the path and file type
if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetPassthrough) { /* AVAssetExportPresetHighestQuality */
exportSession.outputURL = videoOutputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously(completionHandler: {
switch exportSession.status {
case .failed:
if let _error = exportSession.error {
// !!!used to fail over here with 11800, -16976 codes, if using AVAssetExportPresetHighestQuality. But works fine when using: AVAssetExportPresetPassthrough
failure(_error)
}
....
希望这对某人有帮助,因为该错误代码和消息不提供任何信息。这只是一个“未知错误”。除了更改质量设置外,我还将尝试更改其他设置并简化导出操作,以识别该操作中可能失败的特定组件。 (某些特定的图像,音频或视频资产)。当您收到这样的一般错误消息时,最好使用消除过程,每次将代码减少一半,以对数时间解决问题。