因此,我正在尝试使用此应用程序将大量大约30张图像转换为视频。
我执行转换的代码基于我在以下问题上看到的以下代码:Making video from UIImage array with different transition animations
在尝试进行这种类型的转换时,我经常看到的一个问题是AVAssetsWriter的输出存在某种问题,但这对我来说似乎可以。
我不知道这是否可能是问题,但是如果我在启动videoWriter之前检查pixelBufferPool是否为null,则说它为null,但是在启动后却说它不为空。
这是我进行转换的代码:
var outputSize = CGSize(width: 1920, height: 1280)
let imagesPerSecond: TimeInterval = 0.3 //each image will be stay for 3 secs
var selectedPhotosArray = [UIImage()]
let audioIsEnabled: Bool = false //if your video has no sound
var asset: AVAsset!
var videoCriado : Bool = false
var publicId : String?
var videoPlayer : AVPlayer?
func buildVideoFromImageArray(imageArrayToVideoURL: URL, completion: @escaping (AVPlayer) -> ()) {
removeFileAtURLIfExists(url: imageArrayToVideoURL as NSURL)
guard let videoWriter = try? AVAssetWriter(outputURL: imageArrayToVideoURL as URL, fileType: AVFileType.mp4) else {
fatalError("AVVideoCodecType.h264 error")
}
let outputSettings = [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
fatalError("Negative : Can't apply the Output settings...")
}
let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
print(videoWriter.status.rawValue)
print(videoWriter.outputURL)
let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))]
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
}
if videoWriter.startWriting() {
print(videoWriter.status.rawValue)
let zeroTime = CMTimeMake(value: Int64(imagesPerSecond),timescale: Int32(1))
videoWriter.startSession(atSourceTime: zeroTime)
assert(pixelBufferAdaptor.pixelBufferPool != nil)
let media_queue = DispatchQueue(label: "mediaInputQueue")
videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
let fps: Int32 = 1
let framePerSecond: Int64 = Int64(self.imagesPerSecond)
let frameDuration = CMTimeMake(value: Int64(self.imagesPerSecond), timescale: fps)
var frameCount: Int64 = 0
var appendSucceeded = true
while (!self.selectedPhotosArray.isEmpty) {
if (videoWriterInput.isReadyForMoreMediaData) {
let nextPhoto = self.selectedPhotosArray.remove(at: 0)
let lastFrameTime = CMTimeMake(value: frameCount * framePerSecond, timescale: fps)
let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
var pixelBuffer: CVPixelBuffer? = nil
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
if let pixelBuffer = pixelBuffer, status == 0 {
let managedPixelBuffer = pixelBuffer
CVPixelBufferLockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: data, width: Int(self.outputSize.width), height: Int(self.outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
context!.clear(CGRect(x: 0, y: 0, width: CGFloat(self.outputSize.width), height: CGFloat(self.outputSize.height)))
let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
//let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit
let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0
context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
CVPixelBufferUnlockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
} else {
print("Failed to allocate pixel buffer")
appendSucceeded = false
}
}
if !appendSucceeded {
break
}
frameCount += 1
}
videoWriterInput.markAsFinished()
videoWriter.finishWriting { () -> Void in
print("-----video1 url = \(imageArrayToVideoURL)")
self.asset = AVAsset(url: imageArrayToVideoURL)
self.videoPlayer = AVPlayer(url: imageArrayToVideoURL)
//self.videoCriado = true
//self.resultUrl = self.exportVideoWithAnimation()
completion(self.videoPlayer!)
//self.exportVideoWithAnimation()
}
})
}
//return asset
}
这就是我调用该函数的方式:
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let fileURL = documentsURL.appendingPathComponent("\(public_id!.description).mp4")
//let videoSize = CGSize(width: moldura.larguraMoldura, height: moldura.alturaMoldura)
imageToVideo.selectedPhotosArray = fotosBoomerangArray
let sizeVideo = CGSize(width: moldura.larguraMoldura, height: moldura.alturaMoldura)
imageToVideo.outputSize = sizeVideo
imageToVideo.buildVideoFromImageArray(imageArrayToVideoURL: fileURL, completion: {
(video) in
DispatchQueue.main.async {
self.videoPlayer = video
self.irParaPreview()
}
})
因此,这返回的是一个无法播放的视频,如果我尝试播放它,我会得到一个iOS播放器,该播放器上有一条横过播放符号的横条,而时间栏旁的滚轮不断旋转。我还需要文件的数据来上传视频,当我尝试获取它时,它为null。
答案 0 :(得分:0)
AVFoundation中发生错误的方法有很多。我建议您更仔细地遵循编写良好的参考实现。这里有一些我认为有用的东西:
答案 1 :(得分:0)
因此,我能够结合Robin Stewart提供的链接中的某些方法来制定一个可行的解决方案。值得指出的是,像我一样使用它们对我不起作用,只有当我进行一些更改后,它才最终起作用。也许这与Swift 3中的大多数功能有关,而我正在使用Swift 4.2。
这是我的解决方法:
func writeImagesAsMovie(_ allImages: [UIImage], videoPath: String, videoSize: CGSize, videoFPS: Int32, completion: @escaping (Bool) -> ()) -> Bool{
guard let assetWriter = try? AVAssetWriter(outputURL: URL(string: videoPath)!, fileType: AVFileType.mp4) else {
fatalError("AVVideoCodecType.h264 error")
}
let outputSettings = [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
guard assetWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
fatalError("Negative : Can't apply the Output settings...")
}
let writerInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))]
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
if assetWriter.canAdd(writerInput) {
assetWriter.add(writerInput)
}
// Start writing session
if assetWriter.startWriting() {
assetWriter.startSession(atSourceTime: CMTime.zero)
// -- Create queue for <requestMediaDataWhenReadyOnQueue>
assert(pixelBufferAdaptor.pixelBufferPool != nil)
let mediaQueue = DispatchQueue(label: "mediaInputQueue", attributes: [])
// -- Set video parameters
let frameDuration = CMTimeMake(value: 1, timescale: videoFPS)
var frameCount = 0
// -- Add images to video
let numImages = allImages.count
writerInput.requestMediaDataWhenReady(on: mediaQueue, using: { () -> Void in
// Append unadded images to video but only while input ready
while (writerInput.isReadyForMoreMediaData && frameCount < numImages) {
let lastFrameTime = CMTimeMake(value: Int64(frameCount), timescale: videoFPS)
let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
if !self.appendPixelBufferForImageAtURL(allImages[frameCount], pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime) {
print("Error converting images to video: AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer")
return
}
frameCount += 1
}
// No more images to add? End video.
if (frameCount >= numImages) {
writerInput.markAsFinished()
assetWriter.finishWriting {
if (assetWriter.error != nil) {
print("Error converting images to video: \(assetWriter.error)")
} else {
print("Converted images to movie @ \(videoPath)")
completion(true)
}
}
}
})
}
return true
}
func createAssetWriter(_ path: String, size: CGSize) -> AVAssetWriter? {
// Convert <path> to NSURL object
let pathURL = URL(fileURLWithPath: path)
// Return new asset writer or nil
do {
// Create asset writer
let newWriter = try AVAssetWriter(outputURL: pathURL, fileType: AVFileType.mp4)
// Define settings for video input
let videoSettings: [String : AnyObject] = [
AVVideoCodecKey : AVVideoCodecType.h264 as AnyObject,
AVVideoWidthKey : size.width as AnyObject,
AVVideoHeightKey : size.height as AnyObject,
]
// Add video input to writer
let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
newWriter.add(assetWriterVideoInput)
// Return writer
print("Created asset writer for \(size.width)x\(size.height) video")
return newWriter
} catch {
print("Error creating asset writer: \(error)")
return nil
}
}
func appendPixelBufferForImageAtURL(_ image: UIImage, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
var appendSucceeded = false
autoreleasepool {
if let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool {
let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity:1)
let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
kCFAllocatorDefault,
pixelBufferPool,
pixelBufferPointer
)
if let pixelBuffer = pixelBufferPointer.pointee , status == 0 {
fillPixelBufferFromImage(image, pixelBuffer: pixelBuffer)
appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
pixelBufferPointer.deinitialize()
} else {
NSLog("Error: Failed to allocate pixel buffer from pool")
}
pixelBufferPointer.deallocate(capacity: 1)
}
}
return appendSucceeded
}
func fillPixelBufferFromImage(_ image: UIImage, pixelBuffer: CVPixelBuffer) {
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
// Create CGBitmapContext
let context = CGContext(
data: pixelData,
width: Int(image.size.width),
height: Int(image.size.height),
bitsPerComponent: 8,
bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer),
space: rgbColorSpace,
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue
)!
// Draw image into context
let drawCGRect = CGRect(x:0, y:0, width:image.size.width, height:image.size.height)
var drawRect = NSCoder.string(for: drawCGRect);
let ciImage = CIImage(image: image)
let cgImage = convertCIImageToCGImage(inputImage: ciImage!)
context.draw(cgImage!, in: CGRect(x: 0.0,y: 0.0,width: image.size.width,height: image.size.height))
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
}
func convertCIImageToCGImage(inputImage: CIImage) -> CGImage! {
let context = CIContext(options: nil)
if context != nil {
return context.createCGImage(inputImage, from: inputImage.extent)
}
return nil
}
}