I am trying to render a yuv420p encoded video to an OpenGL ES2 texture using Swift 3 on an iPhone 6S with iOS 10.3.3.
Texture Setup:
var formatType = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
var lumaTexture: CVOpenGLESTexture?
var chromaTexture: CVOpenGLESTexture?
var mediapixelBuffer: CVPixelBuffer?
var ioSurfaceBuffer: CVPixelBuffer?
media.videoSamplesBuffer = media.assetReaderOutput?.copyNextSampleBuffer()
mediapixelBuffer = CMSampleBufferGetImageBuffer(media.videoSamplesBuffer!)!
CVPixelBufferLockBaseAddress(mediapixelBuffer!, .readOnly)
let bufferWidth0: Int = CVPixelBufferGetWidthOfPlane(mediapixelBuffer!, 0)
let bufferWidth1: Int = CVPixelBufferGetWidthOfPlane(mediapixelBuffer!, 1)
let bufferHeight0: Int = CVPixelBufferGetWidthOfPlane(mediapixelBuffer!, 0)
let bufferHeight1: Int = CVPixelBufferGetWidthOfPlane(mediapixelBuffer!, 1)
let bytesPerRow0: Int = CVPixelBufferGetBytesPerRowOfPlane(mediapixelBuffer!, 0)
let bytesPerRow1: Int = CVPixelBufferGetBytesPerRowOfPlane(mediapixelBuffer!, 1)
let pixelBufferBaseAddress = CVPixelBufferGetBaseAddress(mediapixelBuffer!)
let pixelBufferPlaneAddress0 = CVPixelBufferGetBaseAddressOfPlane(mediapixelBuffer!, 0)
let pixelBufferPlaneAddress1 = CVPixelBufferGetBaseAddressOfPlane(mediapixelBuffer!, 1)
let ioBufferRet = CVPixelBufferCreate(kCFAllocatorDefault,
bufferWidth_,
bufferHeight_,
self.formatType,
attr,
&ioSurfaceBuffer)
if ioBufferRet != 0 { print("error at `CVPixelBufferCreate`", ioBufferRet) }
CVPixelBufferLockBaseAddress(ioSurfaceBuffer!, .readOnly)
var copyBufferPlaneAddress0 = CVPixelBufferGetBaseAddressOfPlane(ioSurfaceBuffer!, 0)
var copyBufferPlaneAddress1 = CVPixelBufferGetBaseAddressOfPlane(ioSurfaceBuffer!, 1)
memcpy(copyBufferPlaneAddress0, pixelBufferPlaneAddress0, bufferHeight0 * bytesPerRow0/2) // Y
memcpy(copyBufferPlaneAddress1, pixelBufferPlaneAddress1, bufferHeight1 * bytesPerRow1/2) // UV
glActiveTexture(GLenum(GL_TEXTURE0))
if nil != ioSurfaceBuffer && nil != media.vidTexCachePtr {
var cvRet = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
media.vidTexCachePtr!,
ioSurfaceBuffer!,
nil,
GLenum(GL_TEXTURE_2D),
GLint(GL_RED_EXT),
GLsizei(bufferWidth0),
GLsizei(bufferHeight0),
GLenum(GL_RED_EXT),
GLenum(GL_UNSIGNED_BYTE),
0,
&lumaTexture)
if cvRet != 0 { print("0 error at `CVOpenGLESTextureCacheCreateTextureFromImage`", cvRet) }
}
if nil != lumaTexture {
glBindTexture(CVOpenGLESTextureGetTarget(lumaTexture!), CVOpenGLESTextureGetName(lumaTexture!))
}
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE)
glActiveTexture(GLenum(GL_TEXTURE1))
if nil != ioSurfaceBuffer && nil != media.vidTexCachePtr {
var cvRet = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
media.vidTexCachePtr!,
ioSurfaceBuffer!,
nil,
GLenum(GL_TEXTURE_2D),
GLint(GL_RG_EXT),
GLsizei(bufferWidth1),
GLsizei(bufferHeight1),
GLenum(GL_RG_EXT),
GLenum(GL_UNSIGNED_BYTE),
1,
&chromaTexture)
if cvRet != 0 { print("1 error at `CVOpenGLESTextureCacheCreateTextureFromImage`", cvRet) }
}
if nil != chromaTexture {
glBindTexture(CVOpenGLESTextureGetTarget(chromaTexture!), CVOpenGLESTextureGetName(chromaTexture!))
}
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE)
CVPixelBufferUnlockBaseAddress(mediapixelBuffer!, .readOnly)
CVPixelBufferUnlockBaseAddress(ioSurfaceBuffer!, .readOnly)
Fragment Shader:
#version 100
precision mediump float;
varying vec2 vUV;
uniform sampler2D SamplerY;
uniform sampler2D SamplerUV;
void main() {
mediump vec3 yuv;
lowp vec3 rgb;
yuv.x = texture2D(SamplerY, vUV).r;
yuv.yz = texture2D(SamplerUV, vUV).rg - vec2(0.5, 0.5);
// Using BT.709 which is the standard for HDTV
rgb = mat3( 1, 1, 1,
0, -.18732, 1.8556,
1.57481, -.46813, 0) * yuv;
gl_FragColor = vec4(rgb, 1);
}
The separate luminance texture looks right and but the separate Chroma texture seems to only have the Cr channel. I know that because the video is 4:2:0 the second chroma channel is empty and so maybe I shouldn't "see" the Cb channel, but the final result (which should be colorbar colors) looks like this. It is missing the red. (I assume this is because the output is BGRA. If it were RGBA the blue would be missing). How do I get the red back?
This post describes a similar issue to the one I am experiencing. But the solution employs 3 planes (Y, U, and V separately) while I am trying to achieve this with 2 planes (Y, and UV). I tried using kCVPixelFormatType_420YpCbCr8Planar
format type to get access to 3 planes, but then CVOpenGLESTextureCacheCreateTextureFromImage
fails to create an IOSurface. I've also tried a few different YUV->RGB shader equations and looked into using ffmpeg to supply the CVPixelBuffer, but I can't get it to build for my iPhone architecture (arm64). Thank you in advance and any help would be much appreciated!