我正尝试在iOS 6.3.3的iPhone 6S上使用Swift 3将yuv420p编码视频渲染为OpenGL ES2纹理。
纹理设置:
var formatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
var lumaTexture: CVOpenGLESTexture?
var chromaTexture: CVOpenGLESTexture?
var mediapixelBuffer: CVPixelBuffer?
var ioSurfaceBuffer: CVPixelBuffer?
media.videoSamplesBuffer = media.assetReaderOutput?.copyNextSampleBuffer()
mediapixelBuffer = CMSampleBufferGetImageBuffer(media.videoSamplesBuffer!)!
CVPixelBufferLockBaseAddress(mediapixelBuffer!, .readOnly)
let bufferWidth0: Int = CVPixelBufferGetWidthOfPlane(mediapixelBuffer!, 0)
let bufferWidth1: Int = CVPixelBufferGetWidthOfPlane(mediapixelBuffer!, 1)
let bufferHeight0: Int = CVPixelBufferGetWidthOfPlane(mediapixelBuffer!, 0)
let bufferHeight1: Int = CVPixelBufferGetWidthOfPlane(mediapixelBuffer!, 1)
let bytesPerRow0: Int = CVPixelBufferGetBytesPerRowOfPlane(mediapixelBuffer!, 0)
let bytesPerRow1: Int = CVPixelBufferGetBytesPerRowOfPlane(mediapixelBuffer!, 1)
let pixelBufferBaseAddress = CVPixelBufferGetBaseAddress(mediapixelBuffer!)
let pixelBufferPlaneAddress0 = CVPixelBufferGetBaseAddressOfPlane(mediapixelBuffer!, 0)
let pixelBufferPlaneAddress1 = CVPixelBufferGetBaseAddressOfPlane(mediapixelBuffer!, 1)
let ioBufferRet = CVPixelBufferCreate(kCFAllocatorDefault,
bufferWidth_,
bufferHeight_,
self.formatType,
attr,
&ioSurfaceBuffer)
if ioBufferRet != 0 { print("error at `CVPixelBufferCreate`", ioBufferRet) }
CVPixelBufferLockBaseAddress(ioSurfaceBuffer!, .readOnly)
var copyBufferPlaneAddress0 = CVPixelBufferGetBaseAddressOfPlane(ioSurfaceBuffer!, 0)
var copyBufferPlaneAddress1 = CVPixelBufferGetBaseAddressOfPlane(ioSurfaceBuffer!, 1)
memcpy(copyBufferPlaneAddress0, pixelBufferPlaneAddress0, bufferHeight0 * bytesPerRow0/2) // Y
memcpy(copyBufferPlaneAddress1, pixelBufferPlaneAddress1, bufferHeight1 * bytesPerRow1/2) // UV
glActiveTexture(GLenum(GL_TEXTURE0))
if nil != ioSurfaceBuffer && nil != media.vidTexCachePtr {
var cvRet = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
media.vidTexCachePtr!,
ioSurfaceBuffer!,
nil,
GLenum(GL_TEXTURE_2D),
GLint(GL_RED_EXT),
GLsizei(bufferWidth0),
GLsizei(bufferHeight0),
GLenum(GL_RED_EXT),
GLenum(GL_UNSIGNED_BYTE),
0,
&lumaTexture)
if cvRet != 0 { print("0 error at `CVOpenGLESTextureCacheCreateTextureFromImage`", cvRet) }
}
if nil != lumaTexture {
glBindTexture(CVOpenGLESTextureGetTarget(lumaTexture!), CVOpenGLESTextureGetName(lumaTexture!))
}
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE)
glActiveTexture(GLenum(GL_TEXTURE1))
if nil != ioSurfaceBuffer && nil != media.vidTexCachePtr {
var cvRet = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
media.vidTexCachePtr!,
ioSurfaceBuffer!,
nil,
GLenum(GL_TEXTURE_2D),
GLint(GL_RG_EXT),
GLsizei(bufferWidth1),
GLsizei(bufferHeight1),
GLenum(GL_RG_EXT),
GLenum(GL_UNSIGNED_BYTE),
1,
&chromaTexture)
if cvRet != 0 { print("1 error at `CVOpenGLESTextureCacheCreateTextureFromImage`", cvRet) }
}
if nil != chromaTexture {
glBindTexture(CVOpenGLESTextureGetTarget(chromaTexture!), CVOpenGLESTextureGetName(chromaTexture!))
}
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE)
CVPixelBufferUnlockBaseAddress(mediapixelBuffer!, .readOnly)
CVPixelBufferUnlockBaseAddress(ioSurfaceBuffer!, .readOnly)
Fragment Shader:
#version 100
precision mediump float;
varying vec2 vUV;
uniform sampler2D SamplerY;
uniform sampler2D SamplerUV;
void main() {
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(SamplerY, vUV).r;
yuv.yz = texture2D(SamplerUV, vUV).rg - vec2(0.5, 0.5);
// Using BT.709 which is the standard for HDTV
rgb = mat3( 1, 1, 1,
0, -.18732, 1.8556,
1.57481, -.46813, 0) * yuv;
gl_FragColor = vec4(rgb, 1);
}
单独的亮度纹理看起来正确,但单独的色度纹理似乎只有Cr通道。我知道,因为视频是4:2:0,第二个色度通道是空的,所以也许我不应该“看到”Cb通道,但最终结果(应该是颜色条颜色)看起来像this 。 它缺少红色。 (我假设这是因为输出是BGRA。如果是RGBA,则蓝色将丢失)。如何让红色回来?
This post描述了与我遇到的问题类似的问题。但该解决方案采用3个平面(分别为Y,U和V),而我试图通过2个平面(Y和UV)来实现这一点。我尝试使用kCVPixelFormatType_420YpCbCr8Planar
格式类型来访问3个平面,但是CVOpenGLESTextureCacheCreateTextureFromImage
无法创建IOSurface。我还尝试了一些不同的YUV-> RGB着色器方程式,并研究了如何使用ffmpeg来提供CVPixelBuffer,但我无法让它为我的iPhone架构(arm64)构建。提前谢谢你,非常感谢任何帮助!
答案 0 :(得分:0)
因此事实证明SamplerUV
纹理实际上并未发送到着色器。 (但它在GPU捕获帧中是可见的,这是误导性的)。我假设(错误地)因为SamplerY
自动发送到着色器,第二个纹理SamplerUV
也是如此。因此我之前看到的结果是用于Y和UV纹理的亮度纹理。
修复问题的缺失行:
var SamplerY: GLint = 0
var SamplerUV: GLint = 1
SamplerY = glGetUniformLocation(shaderProgram, "SamplerY")
SamplerUV = glGetUniformLocation(shaderProgram, "SamplerUV")
glUniform1i(SamplerY, 0)
glUniform1i(SamplerUV, 1)