我正在根据图像创建视频,然后需要立即访问它。我通过将图像的imageBuffer
添加到AVAssetWriter
来创建它。视频在这里创建:
while !adaptor.assetWriterInput.isReadyForMoreMediaData { usleep(10) }
let first = adaptor.append(buffer, withPresentationTime: startFrameTime)
while !adaptor.assetWriterInput.isReadyForMoreMediaData { usleep(10) }
let second = adaptor.append(buffer, withPresentationTime: endFrameTime)
videoWriterInput.markAsFinished()
videoWriter.finishWriting {
completion(videoWriter.error)
}
第一个和第二个都返回true(成功追加),并且videoWriter上没有错误。视频已成功创建。
完成后,我可以成功“检索”资产makeVideo(image, urlDestination) { error in
guard error == nil else { return }
let imageAsset = AVAsset(url: url)
guard
let imageTrack = self.composition.addMutableTrack(
withMediaType: .video,
preferredTrackID: kCMPersistentTrackID_Invalid),
let imageVideoTrack = imageAsset.tracks(withMediaType: .video).first else {
assertionFailure()
return
}
try! imageTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: self.duration),
of: imageVideoTrack,
at: .zero
)
let imageVideoLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: imageTrack)
}
但是,第一次尝试访问它时,我没有任何视频(有一段视频时长合适,但没有任何形式的视频显示)。如果将其添加到PreviewController中,我什么也没得到。但是,如果我关闭了PreviewController并再次访问该资产,那么它就成功了。
我首先想到的是这是一个潜在的计时问题,但是即使我添加了延迟,它还是第一次失败。
有什么想法吗?请记住,该代码适用于url中的文件已经存在的情况。只是制作完成后不正确。
编辑: 以上是我认为与该问题有关的代码部分。完整的代码如下:
private func filePath() -> URL {
let fileManager = FileManager.default
let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
guard let documentDirectory = urls.first else {
fatalError("documentDir Error")
}
return documentDirectory
}
class VideoComposer {
let composition = AVMutableComposition()
let mainInstruction = AVMutableVideoCompositionInstruction()
let duration: CMTime
let videoSize: CGSize
var viewSizeMultiplier: CGFloat = 5.0
init(view: UIView) {
videoSize = CGSize(width: 1772.0, height: 3840.0)
viewSizeMultiplier = 1772.0 / view.frame.width
self.duration = CMTime(seconds: 15, preferredTimescale: 600)
mainInstruction.timeRange = CMTimeRangeMake(start: .zero, duration: self.duration)
view.subviews.reversed().forEach { subview in
if let imageView = subview as? UIImageView {
addImage(of: imageView)
}
else {
print("unhandled view type")
}
}
}
func createVideo(completion: @escaping (AVAssetExportSession) -> Void) {
// make video composition
let videoComposition = AVMutableVideoComposition()
videoComposition.instructions = [mainInstruction]
videoComposition.frameDuration = CMTimeMake(value: 1, timescale: 60)
videoComposition.renderSize = videoSize
export(videoComposition: videoComposition) { (session) in
completion(session)
}
}
private func export(videoComposition: AVMutableVideoComposition, completion: @escaping (AVAssetExportSession) -> Void) {
// export
let url = filePath().appendingPathComponent("output.mov")
let fileManager = FileManager.default
if fileManager.fileExists(atPath: url.path) {
try! fileManager.removeItem(at: url)
}
guard let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else {
assertionFailure()
return
}
exporter.videoComposition = videoComposition
exporter.outputFileType = .mov
exporter.outputURL = url
exporter.exportAsynchronously {
DispatchQueue.main.async {
completion(exporter)
}
}
}
private func addImage(of imageView: UIImageView) {
guard let image = imageView.image else {
assertionFailure("no image")
return
}
let movieLength = TimeInterval(duration.seconds)
let url = filePath().appendingPathComponent("image.mov")
ImageVideoCreator.writeSingleImageToMovie(image: image, movieLength: movieLength, outputFileURL: url) { [weak self] success in
guard let `self` = self else {
return
}
let imageAsset = AVAsset(url: url)
let keys = ["playable", "readable", "composable", "tracks", "exportable"]
var error: NSError? = nil
imageAsset.loadValuesAsynchronously(forKeys: keys, completionHandler: {
DispatchQueue.main.async {
keys.forEach({ key in
let status = imageAsset.statusOfValue(forKey: key, error: &error)
switch status {
case .loaded:
print("loaded. \(error)")
case .loading:
print("loading. \(error)")
case .failed:
print("failed. \(error)")
case .cancelled:
print("cancelled. \(error)")
case .unknown:
print("unknown. \(error)")
}
})
guard
let imageTrack = self.composition.addMutableTrack(
withMediaType: .video,
preferredTrackID: kCMPersistentTrackID_Invalid),
let imageVideoTrack = imageAsset.tracks(withMediaType: .video).first
else {
assertionFailure()
return
}
try! imageTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: self.duration), of: imageVideoTrack, at: .zero)
let imageVideoLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: imageTrack)
print("image")
self.setTransform(on: imageVideoLayerInstruction, of: imageView, andOf: imageVideoTrack)
self.mainInstruction.layerInstructions.append(imageVideoLayerInstruction)
}
})
}
}
}
class ViewController: UIViewController {
var composer: VideoComposer?
let player = AVPlayerViewController()
override func viewDidLoad() {
super.viewDidLoad()
guard let pathUrl = Bundle.main.url(forResource: "SampleVideo_1280x720_1mb", withExtension: "mp4") else {
assertionFailure()
return
}
let image = UIImage(named: "image")
let imageView = UIImageView(image: image)
view.addSubview(imageView)
imageView.translatesAutoresizingMaskIntoConstraints = false
imageView.topAnchor.constraint(equalTo: view.topAnchor, constant: 0).isActive = true
imageView.leadingAnchor.constraint(equalTo: view.leadingAnchor, constant: 0).isActive = true
imageView.widthAnchor.constraint(equalToConstant: image!.size.width / 4).isActive = true
imageView.heightAnchor.constraint(equalToConstant: image!.size.height / 4).isActive = true
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
composer = VideoComposer(view: view)
composer?.createVideo() { exporter in
self.didFinish(session: exporter)
}
}
func didFinish(session: AVAssetExportSession) {
guard let url = session.outputURL else {
assertionFailure()
return
}
self.showVideo(videoUrl: url)
}
func showVideo(videoUrl: URL) {
let videoPlayer = AVPlayer(url: videoUrl)
player.player = videoPlayer
self.present(player, animated: true) {
self.player.player?.play()
}
}
}
class ImageVideoCreator {
private static func pixelBuffer(fromImage image: CGImage, size: CGSize) -> CVPixelBuffer? {
let options: CFDictionary = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true] as CFDictionary
var pxbuffer: CVPixelBuffer? = nil
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(size.width), Int(size.height), kCVPixelFormatType_32ARGB, options, &pxbuffer)
guard let buffer = pxbuffer, status == kCVReturnSuccess else {
return nil
}
CVPixelBufferLockBaseAddress(buffer, [])
guard let pxdata = CVPixelBufferGetBaseAddress(buffer) else {
return nil
}
let bytesPerRow = CVPixelBufferGetBytesPerRow(buffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
guard let context = CGContext(data: pxdata, width: Int(size.width), height: Int(size.height), bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue) else {
return nil
}
context.concatenate(CGAffineTransform(rotationAngle: 0))
context.draw(image, in: CGRect(x: 0, y: 0, width: size.width, height: size.height))
CVPixelBufferUnlockBaseAddress(buffer, [])
return buffer
}
static func writeSingleImageToMovie(image: UIImage, movieLength: TimeInterval, outputFileURL: URL, completion: @escaping (Bool) -> ()) {
let fileManager = FileManager.default
if fileManager.fileExists(atPath: outputFileURL.path) {
try! fileManager.removeItem(at: outputFileURL)
}
do {
let imageSize = image.size
let videoWriter = try AVAssetWriter(outputURL: outputFileURL, fileType: AVFileType.mov)
let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: imageSize.width,
AVVideoHeightKey: imageSize.height]
let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
let adaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: nil)
if !videoWriter.canAdd(videoWriterInput) {
completion(false)
return
}
videoWriterInput.expectsMediaDataInRealTime = true
videoWriter.add(videoWriterInput)
videoWriter.startWriting()
let timeScale: Int32 = 600 // recommended in CMTime for movies.
let startFrameTime = CMTimeMake(value: 0, timescale: 600)
let endFrameTime = CMTimeMakeWithSeconds(movieLength, preferredTimescale: timeScale)
videoWriter.startSession(atSourceTime: startFrameTime)
guard let cgImage = image.cgImage else {
completion(false)
return
}
let buffer: CVPixelBuffer = self.pixelBuffer(fromImage: cgImage, size: imageSize)!
while !adaptor.assetWriterInput.isReadyForMoreMediaData { usleep(10) }
let first = adaptor.append(buffer, withPresentationTime: startFrameTime)
while !adaptor.assetWriterInput.isReadyForMoreMediaData { usleep(10) }
let second = adaptor.append(buffer, withPresentationTime: endFrameTime)
videoWriterInput.markAsFinished()
videoWriter.finishWriting {
completion(true)
}
} catch {
completion(false)
}
}
}