我正在尝试使用YUY2视频并在iOS设备上一次显示一帧。我最终将使用YUY2数据流,但目前我正在使用录制的文件。
不幸的是,我似乎无法正确转换。我正在使用CoreVideo,我的代码基于一堆从互联网上拼凑而成的零碎例子。目前我正在收视频垃圾。
到目前为止,这是我的代码,谢谢你看看:
import UIKit
import AVKit
import AVFoundation
import CoreGraphics
class ViewController: UIViewController {
var url: URL!
var buffer: Data!
var image: UIImage!
override func viewDidLoad() {
super.viewDidLoad()
let path = Bundle.main.path(forResource: "gst-video", ofType: "yuy2")!
self.url = URL(fileURLWithPath: path)
// Examining the video file in a Hex Editor shows the frame size of 614400
let data = try! Data(contentsOf: self.url)
let sub = data.subdata(in: 0..<614400)
self.buffer = sub
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
var pixelBufferOut: CVPixelBuffer?
let attributes : [NSObject:AnyObject] = [
kCVPixelBufferCGImageCompatibilityKey : true as AnyObject,
kCVPixelBufferCGBitmapContextCompatibilityKey : true as AnyObject,
]
CVPixelBufferCreateWithBytes(kCFAllocatorDefault, 640, 480, kCVPixelFormatType_422YpCbCr8_yuvs, UnsafeMutablePointer(&buffer), 8, nil, nil, attributes as CFDictionary, &pixelBufferOut)
guard let pixelBuffer = pixelBufferOut else {
return
}
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let ciContext = CIContext(options: nil)
let frameImage = ciContext.createCGImage(ciImage, from: ciImage.extent)
let frame = UIImage(cgImage: frameImage!)
print(frame.cgImage)
self.image = frame
let imageView = UIImageView()
imageView.image = frame
imageView.frame.origin = CGPoint(x: 8, y: 32)
imageView.frame.size = frame.size
view.addSubview(imageView)
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
}
}