从MTKView
捕获帧的最有效方法是什么?如果可能的话,我想实时保存帧中的.mov文件。是否可以渲染到AVPlayer框架中?
目前正在使用此代码绘制(基于@warrenm PerformanceShaders project):
func draw(in view: MTKView) {
_ = inflightSemaphore.wait(timeout: DispatchTime.distantFuture)
updateBuffers()
let commandBuffer = commandQueue.makeCommandBuffer()
commandBuffer.addCompletedHandler{ [weak self] commandBuffer in
if let strongSelf = self {
strongSelf.inflightSemaphore.signal()
}
}
// Dispatch the current kernel to perform the selected image filter
selectedKernel.encode(commandBuffer: commandBuffer,
sourceTexture: kernelSourceTexture!,
destinationTexture: kernelDestTexture!)
if let renderPassDescriptor = view.currentRenderPassDescriptor, let currentDrawable = view.currentDrawable
{
let clearColor = MTLClearColor(red: 0, green: 0, blue: 0, alpha: 1)
renderPassDescriptor.colorAttachments[0].clearColor = clearColor
let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
renderEncoder.label = "Main pass"
renderEncoder.pushDebugGroup("Draw textured square")
renderEncoder.setFrontFacing(.counterClockwise)
renderEncoder.setCullMode(.back)
renderEncoder.setRenderPipelineState(pipelineState)
renderEncoder.setVertexBuffer(vertexBuffer, offset: MBEVertexDataSize * bufferIndex, at: 0)
renderEncoder.setVertexBuffer(uniformBuffer, offset: MBEUniformDataSize * bufferIndex , at: 1)
renderEncoder.setFragmentTexture(kernelDestTexture, at: 0)
renderEncoder.setFragmentSamplerState(sampler, at: 0)
renderEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
renderEncoder.popDebugGroup()
renderEncoder.endEncoding()
commandBuffer.present(currentDrawable)
}
bufferIndex = (bufferIndex + 1) % MBEMaxInflightBuffers
commandBuffer.commit()
}
答案 0 :(得分:21)
这是一个小类,它执行写出捕获Metal视图内容的电影文件的基本功能:
class MetalVideoRecorder {
var isRecording = false
var recordingStartTime = TimeInterval(0)
private var assetWriter: AVAssetWriter
private var assetWriterVideoInput: AVAssetWriterInput
private var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor
init?(outputURL url: URL, size: CGSize) {
do {
assetWriter = try AVAssetWriter(outputURL: url, fileType: AVFileTypeAppleM4V)
} catch {
return nil
}
let outputSettings: [String: Any] = [ AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : size.width,
AVVideoHeightKey : size.height ]
assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
assetWriterVideoInput.expectsMediaDataInRealTime = true
let sourcePixelBufferAttributes: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA,
kCVPixelBufferWidthKey as String : size.width,
kCVPixelBufferHeightKey as String : size.height ]
assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
sourcePixelBufferAttributes: sourcePixelBufferAttributes)
assetWriter.add(assetWriterVideoInput)
}
func startRecording() {
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: kCMTimeZero)
recordingStartTime = CACurrentMediaTime()
isRecording = true
}
func endRecording(_ completionHandler: @escaping () -> ()) {
isRecording = false
assetWriterVideoInput.markAsFinished()
assetWriter.finishWriting(completionHandler: completionHandler)
}
func writeFrame(forTexture texture: MTLTexture) {
if !isRecording {
return
}
while !assetWriterVideoInput.isReadyForMoreMediaData {}
guard let pixelBufferPool = assetWriterPixelBufferInput.pixelBufferPool else {
print("Pixel buffer asset writer input did not have a pixel buffer pool available; cannot retrieve frame")
return
}
var maybePixelBuffer: CVPixelBuffer? = nil
let status = CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &maybePixelBuffer)
if status != kCVReturnSuccess {
print("Could not get pixel buffer from asset writer input; dropping frame...")
return
}
guard let pixelBuffer = maybePixelBuffer else { return }
CVPixelBufferLockBaseAddress(pixelBuffer, [])
let pixelBufferBytes = CVPixelBufferGetBaseAddress(pixelBuffer)!
// Use the bytes per row value from the pixel buffer since its stride may be rounded up to be 16-byte aligned
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
let region = MTLRegionMake2D(0, 0, texture.width, texture.height)
texture.getBytes(pixelBufferBytes, bytesPerRow: bytesPerRow, from: region, mipmapLevel: 0)
let frameTime = CACurrentMediaTime() - recordingStartTime
let presentationTime = CMTimeMakeWithSeconds(frameTime, 240)
assetWriterPixelBufferInput.append(pixelBuffer, withPresentationTime: presentationTime)
CVPixelBufferUnlockBaseAddress(pixelBuffer, [])
}
}
在初始化其中一个并调用startRecording()
之后,您可以将包含渲染命令的调度处理程序添加到命令缓冲区并调用writeFrame
(在结束编码之后,但在呈现drawable或commit之前)缓冲区):
let texture = currentDrawable.texture
commandBuffer.addCompletedHandler { commandBuffer in
self.recorder.writeFrame(forTexture: texture)
}
完成录制后,只需致电endRecording
,视频文件即可完成并关闭。
<强>注意事项强>:
此类假定源纹理为默认格式.bgra8Unorm
。如果不是,您将遭遇崩溃或腐败。如有必要,使用计算或片段着色器转换纹理,或使用“加速”。
此类还假定纹理与视频帧的大小相同。如果不是这种情况(如果可绘制的大小发生变化,或者您的屏幕自动旋转),则输出将被破坏,您可能会看到崩溃。通过根据应用程序需要缩放或裁剪源纹理来缓解此问题。
答案 1 :(得分:2)
升级到Swift 5
[
{
"EmpID ": 1,
"TrackTimes ": [
{
"EntryTime ": "2020-02-16T01:00:00 ",
"ExitTime ": "2020-02-16T02:15:00 "
}
]
},
{
"EmpID ": 1,
"TrackTimes ": [
{
"EntryTime ": "2020-02-16T03:15:00 ",
"ExitTime ": "2020-02-16T03:30:00 "
},
{
"EntryTime ": "2020-02-16T03:30:00 ",
"ExitTime ": "2020-02-16T03:45:00 "
},
{
"EntryTime ": "2020-02-16T03:45:00 ",
"ExitTime ": "2020-02-16T04:15:00 "
}
]
},
{
"EmpID ": 1,
"TrackTimes ": [
{
"EntryTime ": "2020-02-16T04:30:00 ",
"ExitTime ": "2020-02-16T05:15:00 "
}
]
},
{
"EmpID ": 2,
"TrackTimes ": [
{
"EntryTime ": "2020-02-16T01:00:00 ",
"ExitTime ": "2020-02-16T02:15:00 "
}
]
},
{
"EmpID ": 2,
"TrackTimes ": [
{
"EntryTime ": "2020-02-16T03:15:00 ",
"ExitTime ": "2020-02-16T03:30:00 "
},
{
"EntryTime ": "2020-02-16T03:30:00 ",
"ExitTime ": "2020-02-16T03:45:00 "
},
{
"EntryTime ": "2020-02-16T03:45:00 ",
"ExitTime ": "2020-02-16T04:15:00 "
}
]
},
{
"EmpID ": 2,
"TrackTimes ": [
{
"EntryTime ": "2020-02-16T04:30:00 ",
"ExitTime ": "2020-02-16T05:15:00 "
}
]
}
]
答案 2 :(得分:0)
我正在使用这篇文章记录自定义金属视图,但我遇到了一些问题。当我开始录制时,我在 iPhone 12 Pro Max 上从 60fps 到 ~20fps。在分析之后,使一切变慢的函数是 texture.getBytes,因为它正在将缓冲区从 GPU 抓取到 CPU 中。
另一个问题,不确定这是否会导致视频和音频不同步。我不确定我是否应该进入信号量路线来解决这个问题,或者有任何其他潜在的解决方法。
就我而言,纹理大小与屏幕大小一样大,因为我从相机流创建它,然后通过几个 CIFilter 处理它。我不确定问题是否是因为它太大,所以 getBytes 无法实时支持这种大小的纹理。
如果我需要定义优先级,我的#1 优先级是解决音频和视频之间的不同步问题。任何想法都会非常有帮助。