裁剪视频迅速

时间:2016-12-04 12:53:50

标签: ios swift

我在正方形UIView中录制视频但是当我导出视频时全屏1080x1920现在我想知道如何将视频从全屏缩小为1:1的平方比...

以下是我设置摄像机的方法:

session = AVCaptureSession()
        for device in AVCaptureDevice.devices() {

            if let device = device as? AVCaptureDevice , device.position == AVCaptureDevicePosition.back {

                self.device = device
            }
        }

        for device in AVCaptureDevice.devices(withMediaType: AVMediaTypeAudio) {
            let device = device as? AVCaptureDevice
            let audioInput = try! AVCaptureDeviceInput(device: device)
            session?.addInput(audioInput)
        }

        do {

            if let session = session {
                videoInput = try AVCaptureDeviceInput(device: device)

                session.addInput(videoInput)

                videoOutput = AVCaptureMovieFileOutput()
                let totalSeconds = 60.0 //Total Seconds of capture time
                let timeScale: Int32 = 30 //FPS

                let maxDuration = CMTimeMakeWithSeconds(totalSeconds, timeScale)


                videoOutput?.maxRecordedDuration = maxDuration
                videoOutput?.minFreeDiskSpaceLimit = 1024 * 1024//SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME

                if session.canAddOutput(videoOutput) {
                    session.addOutput(videoOutput)
                }


                let videoLayer = AVCaptureVideoPreviewLayer(session: session)
                videoLayer?.frame = self.videoPreview.bounds

                videoLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill

                self.videoPreview.layer.addSublayer(videoLayer!)

                session.startRunning()

我已经看过其他一些帖子,但没有发现它们非常有用,而且大多数都在Obj C ...

如果有人可以帮助我或让我朝着正确的方向前进,我们非常感激!

2 个答案:

答案 0 :(得分:4)

首先,您需要使用AVCaptureFileOutputRecordingDelegate

视频完成录制后,您专门使用func capture( _ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error! )方法执行裁剪过程。

这是我曾经实施过的裁剪功能的一个例子。您需要传入录制的视频的网址以及用于在裁剪过程完成后返回裁剪视频的新网址的回调。

   func cropVideo( _ outputFileUrl: URL, callback: @escaping ( _ newUrl: URL ) -> () )
   {
       // Get input clip
       let videoAsset: AVAsset = AVAsset( url: outputFileUrl )
       let clipVideoTrack = videoAsset.tracks( withMediaType: AVMediaTypeVideo ).first! as AVAssetTrack

       // Make video to square
       let videoComposition = AVMutableVideoComposition()
       videoComposition.renderSize = CGSize( width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height )
       videoComposition.frameDuration = CMTimeMake( 1, self.framesPerSecond )

       // Rotate to portrait
       let transformer = AVMutableVideoCompositionLayerInstruction( assetTrack: clipVideoTrack )
       let transform1 = CGAffineTransform( translationX: clipVideoTrack.naturalSize.height, y: -( clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height ) / 2 )
       let transform2 = transform1.rotated(by: CGFloat( M_PI_2 ) )
       transformer.setTransform( transform2, at: kCMTimeZero)

       let instruction = AVMutableVideoCompositionInstruction()
       instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds( self.intendedVideoLength, self.framesPerSecond ) )

       instruction.layerInstructions = [transformer]
       videoComposition.instructions = [instruction]

       // Export
       let croppedOutputFileUrl = URL( fileURLWithPath: FileManager.getOutputPath( String.random() ) )
       let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
       exporter.videoComposition = videoComposition
       exporter.outputURL = croppedOutputFileUrl
       exporter.outputFileType = AVFileTypeQuickTimeMovie

       exporter.exportAsynchronously( completionHandler: { () -> Void in
           DispatchQueue.main.async(execute: {
               callback( croppedOutputFileUrl )
           })
       })
   }

此外,这是我的getOutputPath方法的实现:

func getOutputPath( _ name: String ) -> String
{
    let documentPath = NSSearchPathForDirectoriesInDomains(      .documentDirectory, .userDomainMask, true )[ 0 ] as NSString
    let outputPath = "\(documentPath)/\(name).mov"
    return outputPath
}

希望这有帮助。

答案 1 :(得分:0)

func cropFrame(videoAsset:AVAsset, animation:Bool) -> Void {
    var insertTime = kCMTimeZero
    var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = []
    var outputSize = CGSize.init(width: 0, height: 0)

    // Determine video output size
    let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video)[0]

    let assetInfo = orientationFromTransform(transform: videoTrack.preferredTransform)

    var videoSize = videoTrack.naturalSize
    if assetInfo.isPortrait == true {
        videoSize.width = videoTrack.naturalSize.height
        videoSize.height = videoTrack.naturalSize.width
    }

    if videoSize.height > outputSize.height {
        outputSize = videoSize
    }

    let defaultSize = CGSize(width: 1920, height: 1080) // Default video size

    if outputSize.width == 0 || outputSize.height == 0 {
        outputSize = defaultSize
    }

    // Silence sound (in case of video has no sound track)
    let silenceURL = Bundle.main.url(forResource: "silence", withExtension: "mp3")
    let silenceAsset = AVAsset(url:silenceURL!)
    let silenceSoundTrack = silenceAsset.tracks(withMediaType: AVMediaType.audio).first

    // Init composition
    let mixComposition = AVMutableComposition.init()


    // Get audio track
    var audioTrack:AVAssetTrack?
    if videoAsset.tracks(withMediaType: AVMediaType.audio).count > 0 {
        audioTrack = videoAsset.tracks(withMediaType: AVMediaType.audio).first
    }
    else {
        audioTrack = silenceSoundTrack
    }

    // Init video & audio composition track
    let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
                                                               preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

    let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,
                                                               preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

    do {
        let startTime = kCMTimeZero
        let duration = videoAsset.duration

        // Add video track to video composition at specific time
        try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(startTime, duration),
                                                   of: videoTrack,
                                                   at: insertTime)

        // Add audio track to audio composition at specific time
        if let audioTrack = audioTrack {
            try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(startTime, duration),
                                                       of: audioTrack,
                                                       at: insertTime)
        }

        // Add instruction for video track
        let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack!,
                                                                   asset: videoAsset,
                                                                   standardSize: outputSize,
                                                                   atTime: insertTime)
        // Hide video track before changing to new track
        let endTime = CMTimeAdd(insertTime, duration)
        //let finalTimer = CMTimeAdd(CMTime(seconds: 5, preferredTimescale: videoAsset.duration.timescale), CMTime(seconds: 5, preferredTimescale: videoAsset.duration.timescale))

        //Kalpesh crop video frames
        if animation {
            let timeScale = videoAsset.duration.timescale
            let durationAnimation = CMTime.init(seconds: 1, preferredTimescale: timeScale)
            layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: CMTimeRange.init(start: endTime, duration: durationAnimation))

            //**********======== CROP YOUR VIDEO FRAME HERE MANUALLY ========**********

            layerInstruction.setCropRectangle(CGRect(x: 0, y: 0, width: videoTrack.naturalSize.width, height: 300.0), at: startTime)
        } else {
            layerInstruction.setOpacity(0, at: endTime)
        }
        arrayLayerInstructions.append(layerInstruction)

        // Increase the insert time
        insertTime = CMTimeAdd(insertTime, duration)
    }
    catch {
        print("Load track error")
    }


    // Main video composition instruction
    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, insertTime)
    mainInstruction.layerInstructions = arrayLayerInstructions

    // Main video composition
    let mainComposition = AVMutableVideoComposition()
    mainComposition.instructions = [mainInstruction]
    mainComposition.frameDuration = CMTimeMake(1, 30)
    mainComposition.renderSize = outputSize

    // Export to file
    let path = NSTemporaryDirectory().appending("mergedVideo.mp4")
    let exportURL = URL.init(fileURLWithPath: path)


    // Init exporter
    let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
    exporter?.outputURL = exportURL
    exporter?.outputFileType = AVFileType.mp4
    exporter?.shouldOptimizeForNetworkUse = true
    exporter?.videoComposition = mainComposition

    // Do export
    exporter?.exportAsynchronously(completionHandler: {

    })
}


func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset, standardSize:CGSize, atTime: CMTime) -> AVMutableVideoCompositionLayerInstruction {
    let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
    let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0]

    let transform = assetTrack.preferredTransform
    let assetInfo = orientationFromTransform(transform: transform)

    var aspectFillRatio:CGFloat = 1
    if assetTrack.naturalSize.height < assetTrack.naturalSize.width {
        aspectFillRatio = standardSize.height / assetTrack.naturalSize.height
    }
    else {
        aspectFillRatio = standardSize.width / assetTrack.naturalSize.width
    }

    if assetInfo.isPortrait {
        let scaleFactor = CGAffineTransform(scaleX: aspectFillRatio, y: aspectFillRatio)

        let posX = standardSize.width/2 - (assetTrack.naturalSize.height * aspectFillRatio)/2
        let posY = standardSize.height/2 - (assetTrack.naturalSize.width * aspectFillRatio)/2
        let moveFactor = CGAffineTransform(translationX: posX, y: posY)

        instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(moveFactor), at: atTime)

    } else {
        let scaleFactor = CGAffineTransform(scaleX: aspectFillRatio, y: aspectFillRatio)

        let posX = standardSize.width/2 - (assetTrack.naturalSize.width * aspectFillRatio)/2
        let posY = standardSize.height/2 - (assetTrack.naturalSize.height * aspectFillRatio)/2
        let moveFactor = CGAffineTransform(translationX: posX, y: posY)

        var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(moveFactor)

        if assetInfo.orientation == .down {
            let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
            concat = fixUpsideDown.concatenating(scaleFactor).concatenating(moveFactor)
        }

        instruction.setTransform(concat, at: atTime)
    }
    return instruction
}
func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
    var assetOrientation = UIImageOrientation.up
    var isPortrait = false
    if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
        assetOrientation = .right
        isPortrait = true
    } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
        assetOrientation = .left
        isPortrait = true
    } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
        assetOrientation = .up
    } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
        assetOrientation = .down
    }
    return (assetOrientation, isPortrait)
}

以这种方式致电:

    let avssets = AVAsset(url: VideoURL)
    self.cropFrame(videoAsset: avssets, animation: true)