Avfoundation裁剪视频导致视频周围黑色区域的错误帧

时间:2018-10-29 17:29:52

标签: ios swift avfoundation

我正在使用一个库YPImagePicker来拾取和裁剪视频。并非每个视频都被错误地裁剪,但其中有些是错误的。该库使用AVFoundation裁剪具有特定帧和时间的视频。该库具有以下用于裁剪视频的代码:

func fetchVideoUrlAndCrop(for videoAsset: PHAsset, cropRect: CGRect, callback: @escaping (URL) -> Void) {
    let videosOptions = PHVideoRequestOptions()
    videosOptions.isNetworkAccessAllowed = true
    imageManager?.requestAVAsset(forVideo: videoAsset, options: videosOptions) { asset, _, _ in
        do {
            guard let asset = asset else { print("⚠️ PHCachingImageManager >>> Don't have the asset"); return }

            let assetComposition = AVMutableComposition()
            let trackTimeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)

            // 1. Inserting audio and video tracks in composition

            guard let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first,
                let videoCompositionTrack = assetComposition
                    .addMutableTrack(withMediaType: .video,
                                     preferredTrackID: kCMPersistentTrackID_Invalid) else {
                                        print("⚠️ PHCachingImageManager >>> Problems with video track")
                                        return

            }
            guard let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first,
                let audioCompositionTrack = assetComposition
                    .addMutableTrack(withMediaType: AVMediaType.audio,
                                     preferredTrackID: kCMPersistentTrackID_Invalid) else {
                                        print("⚠️ PHCachingImageManager >>> Problems with audio track")
                                        return
            }

            try videoCompositionTrack.insertTimeRange(trackTimeRange, of: videoTrack, at: kCMTimeZero)
            try audioCompositionTrack.insertTimeRange(trackTimeRange, of: audioTrack, at: kCMTimeZero)

            // 2. Create the instructions

            let mainInstructions = AVMutableVideoCompositionInstruction()
            mainInstructions.timeRange = trackTimeRange

            // 3. Adding the layer instructions. Transforming

            let layerInstructions = AVMutableVideoCompositionLayerInstruction(assetTrack: videoCompositionTrack)
            layerInstructions.setTransform(videoTrack.getTransform(cropRect: cropRect), at: kCMTimeZero)
            layerInstructions.setOpacity(1.0, at: kCMTimeZero)
            mainInstructions.layerInstructions = [layerInstructions]

            // 4. Create the main composition and add the instructions

            let videoComposition = AVMutableVideoComposition()
            videoComposition.renderSize = cropRect.size
            videoComposition.instructions = [mainInstructions]
            videoComposition.frameDuration = CMTimeMake(1, 30)

            // 5. Configuring export session

            let exportSession = AVAssetExportSession(asset: assetComposition,
                                                     presetName: YPConfig.video.compression)
            exportSession?.outputFileType = YPConfig.video.fileType
            exportSession?.shouldOptimizeForNetworkUse = true
            exportSession?.videoComposition = videoComposition
            exportSession?.outputURL = URL(fileURLWithPath: NSTemporaryDirectory())
                .appendingUniquePathComponent(pathExtension: YPConfig.video.fileType.fileExtension)

            // 6. Exporting

            DispatchQueue.main.async {
            self.exportTimer = Timer.scheduledTimer(timeInterval: 0.1,
                                                    target: self,
                                                    selector: #selector(self.onTickExportTimer),
                                                    userInfo: exportSession,
                                                    repeats: true)
            }

            exportSession?.exportAsynchronously(completionHandler: {
                DispatchQueue.main.async {
                    if let url = exportSession?.outputURL, exportSession?.status == .completed {
                        callback(url)
                    } else {
                        let error = exportSession?.error
                        print("error exporting video \(String(describing: error))")
                    }
                }
            })
        } catch let error {
            print("⚠️ PHCachingImageManager >>> \(error)")
        }
    }
    }

有人知道为什么导出错误帧的视频吗?欢迎提出建议。

预先感谢!

Reference image

0 个答案:

没有答案