在iOS5.0上,从相机拍摄的视频可以很好地播放,
glActiveTexture(GL_TEXTURE0);
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RED_EXT,
videoWidth,
videoHeight,
GL_RED_EXT,
GL_UNSIGNED_BYTE,
0,
&lumaTexture);
if (!lumaTexture || err) {
NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
return;
}
glBindTexture(CVOpenGLESTextureGetTarget(lumaTexture), CVOpenGLESTextureGetName(lumaTexture));
// Set texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glActiveTexture(GL_TEXTURE1);
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RG_EXT,
videoWidth / 2,
videoHeight / 2,
GL_RG_EXT,
GL_UNSIGNED_BYTE,
1,
&chromaTexture);
if (!chromaTexture || err) {
NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
return;
}
glBindTexture(CVOpenGLESTextureGetTarget(chromaTexture), CVOpenGLESTextureGetName(chromaTexture));
// Set texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
在iOS4.0上,以下代码效果很好,
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
void * baseAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer,0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, spriteTexture[0]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, videoWidth, videoHeight, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, baseAddress);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
void *baseAddress1 = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer,1);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, spriteTexture[1]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RG_EXT, videoWidth / 2, videoHeight / 2, 0, GL_RG_EXT, GL_UNSIGNED_BYTE,baseAddress1);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
但是当播放视频从本地mp4文件读取时,第二种方法失败了,第一种方法效果很好,很奇怪,我很妄想。那怎么解决呢?为什么会这样?