视频在Swift中导出不正确的视频长度

时间:2019-03-19 04:57:28

标签: ios swift video

我已成功从所有帧的图像持续时间= 1秒的图像阵列中创建了Video。但是我想对每个图像使用不同的持续时间,exp:0.1s,0.2s,但是不能正常工作。

                while (!self.selectedPhotosArray.isEmpty) {
                if (videoWriterInput.isReadyForMoreMediaData) {
                    let nextDicData = self.selectedPhotosArray.remove(at: 0)
                    if let nextImage = nextDicData["img"] as? UIImage
                    {
                        var frameDuration = CMTimeMake(Int64(0 * 10000), fps)
                        if let timeVl = nextDicData["time"] as? Float{
                               framePerSecond = Int64(timeVl * 10000)

                        }

                        frameDuration =  CMTimeMake(framePerSecond ,fps)

                        let lastFrameTime = CMTimeMake(Int64(lastTimeVl), fps)
                        let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
                        var pixelBuffer: CVPixelBuffer? = nil
                        let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
                        if let pixelBuffer = pixelBuffer, status == 0 {
                            let managedPixelBuffer = pixelBuffer
                            CVPixelBufferLockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                            let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                            let context = CGContext(data: data, width: Int(self.outputSize.width), height: Int(self.outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
                            context!.clear(CGRect(x: 0, y: 0, width: CGFloat(self.outputSize.width), height: CGFloat(self.outputSize.height)))
                            let horizontalRatio = CGFloat(self.outputSize.width) / nextImage.size.width
                            let verticalRatio = CGFloat(self.outputSize.height) / nextImage.size.height
                            //let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
                            let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit
                            let newSize: CGSize = CGSize(width: nextImage.size.width, height: nextImage.size.height)
                            let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
                            let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0

                            context?.draw(nextImage.cgImage!, in: CGRect(x: 0, y: 0, width: newSize.width, height: newSize.height))
                            CVPixelBufferUnlockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                            appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
                        } else {
                            print("Failed to allocate pixel buffer")
                            appendSucceeded = false
                        }
                    }

                }
                if !appendSucceeded {
                    break
                }
                frameCount += 1
               lastTimeVl += framePerSecond

            }

0 个答案:

没有答案