使用AVAssetExportSession导出时间间隔会导致黑色视频

时间:2017-09-15 15:33:27

标签: ios swift avfoundation photosframework

我需要能够在iOS上的相机​​应用中合并使用延时功能拍摄的视频并导出为单个视频。

但是,即使我尝试将单个未更改的时间流逝视频导出到照片库,它也会保存为完全黑色的视频(持续时间正确)。下面是我编写的示例代码,只是导出一个未更改的视频(其中大部分是根据Ray Wenderlich教程改编的):

@IBAction func saveVideo(_ sender: UIBarButtonItem) {
    // 1 - Early exit if there's no video file selected

    guard let videoAsset = self.avAsset else {
        let alert = UIAlertController(title: "Error", message: "Failed to load video asset.", preferredStyle: .alert)
        let cancelAction = UIAlertAction(title: "OK", style: .cancel, handler: nil)
        alert.addAction(cancelAction)
        self.present(alert, animated: true, completion: nil)

        return
    }

    // 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
    print("Preparing AVMutableComposition...")
    let mixComposition = AVMutableComposition()

    // 3 - Video track
    let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)


    do {
        if let videoAssetTrack = videoAsset.tracks(withMediaType: .video).first {
            try videoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAssetTrack, at: kCMTimeZero)
        }

        if let audioAssetTrack = videoAsset.tracks(withMediaType: .audio).first {
            let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
            try audioTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: audioAssetTrack, at: kCMTimeZero)
        }
    } catch let error as NSError {
        self.presentAlert(title: "Export Error", message: "Unable to complete export due to the following error: \(error). Please try again.", block: nil)
        print("error: \(error)")
    }

    // 3.1 - Create AVMutableVideoCompositionInstruction
    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration)

    // 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
    let videoLayerInstruction: AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack!)
    let videoAssetTrack = videoAsset.tracks(withMediaType: .video).first
    var assetOrientation: UIImageOrientation = .up
    var isPortrait = false
    let t = videoAssetTrack!.preferredTransform
    if t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0 {
        assetOrientation = .right
        isPortrait = true
    } else if t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0 {
        assetOrientation = .left
        isPortrait = true
    } else if t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0 {
        assetOrientation = .up
    } else if t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0 {
        assetOrientation = .down
    }

    videoLayerInstruction.setTransform(videoAssetTrack!.preferredTransform, at: kCMTimeZero)
    videoLayerInstruction.setOpacity(0.0, at: videoAsset.duration)

    // 3.3 - Add instructions
    mainInstruction.layerInstructions = [videoLayerInstruction]

    let mainComposition = AVMutableVideoComposition()
    mainComposition.instructions = [mainInstruction]
    mainComposition.frameDuration = CMTimeMake(1, 30)

    var naturalSize: CGSize
    if isPortrait {
        naturalSize = CGSize(width: videoAssetTrack!.naturalSize.height, height: videoAssetTrack!.naturalSize.width)
    } else {
        naturalSize = videoAssetTrack!.naturalSize
    }

    mainComposition.renderSize = CGSize(width: naturalSize.width, height: naturalSize.height)

    // set up file destination
    let tempName = "temp-thread.mov"
    let tempURL = URL(fileURLWithPath: (NSTemporaryDirectory() as NSString).appendingPathComponent(tempName))
    do {
        if FileManager.default.fileExists(atPath: tempURL.path) {
            try FileManager.default.removeItem(at: tempURL)
        }
    } catch {
        print("Error removing temp file.")
    }
    // create final video using export session
    guard let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
    exportSession.outputURL = tempURL
    exportSession.outputFileType = AVFileType.mov
    exportSession.shouldOptimizeForNetworkUse = true
    exportSession.videoComposition = mainComposition
    print("Exporting video...")
    exportSession.exportAsynchronously {
        DispatchQueue.main.async {
            switch exportSession.status {
            // Success
            case .completed:
                print("Saving to Photos Library...")
                PHPhotoLibrary.shared().performChanges({
                    PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportSession.outputURL!)
                }) { success, error in
                    if success {
                        print("Added video to library - success: \(success), error: \(String(describing: error?.localizedDescription))")
                    } else {
                        print("Added video to library - success: \(success), error: \(String(describing: error!.localizedDescription))")
                    }

                    let _ = try? FileManager.default.removeItem(at: tempURL)
                }
                print("Export session completed")
            // Status other than success
            case .cancelled, .exporting, .failed, .unknown, .waiting:
                print("Export status: \(exportSession.status.rawValue)")
                print("Reason: \(String(describing: exportSession.error))")
            }
        }
    }
}

为什么生成的视频会显示为全黑?我似乎找不到关于Apple时间推移视频的大量文档,所以我不确定为什么它们可能与常规视频文件不同。它们的帧速率似乎为30fps,如果我在Mac上检查一帧,它只是一个没有音频通道的常规QuickTime电影文件。有任何想法吗?使用此代码导出任何其他视频(即使没有音频的视频)也能正常运行。

1 个答案:

答案 0 :(得分:0)

问题代码是:

videoLayerInstruction.setTransform(videoAssetTrack!.preferredTransform, at: kCMTimeZero)

此转换仅适用于“向上”(默认)方向,它使视频在其他方向上全黑。您应该针对每个方向进行正确的转换,例如:

var transform = videoAssetTrack.preferredTransform
// Right
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
    isPortrait = true
    let rotate = CGAffineTransform.identity.translatedBy(x: videoAssetTrack.naturalSize.height - videoAssetTrack.preferredTransform.tx, y: -videoAssetTrack.preferredTransform.ty)
    transform = videoAssetTrack.preferredTransform.concatenating(rotate)
}
// Left
else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
    isPortrait = true
    let rotate = CGAffineTransform.identity.translatedBy(x:  -videoAssetTrack.preferredTransform.tx, y: videoAssetTrack.naturalSize.width - videoAssetTrack.preferredTransform.ty)
    transform = videoAssetTrack.preferredTransform.concatenating(rotate)
}
// Up
else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
    transform = videoAssetTrack.preferredTransform
}
// Down
else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
    let rotate = CGAffineTransform.identity.translatedBy(x: videoAssetTrack.naturalSize.width - videoAssetTrack.preferredTransform.tx, y: videoAssetTrack.naturalSize.height - videoAssetTrack.preferredTransform.ty)
    transform = videoAssetTrack.preferredTransform.concatenating(rotate)
}

videoLayerInstruction.setTransform(transform, at: .zero)