frame); assert(frame->imgfmt == AV_PIX_FMT_YUV420P); AVFrame *frame = av_frame_alloc(); int ret = avcodec_receive_frame(avctx, frame); assert(frame->imgfmt == AV_PIX_FMT_VIDEOTOOLBOX); CVPixelBufferRef img = (CVPixelBufferRef)frame->planes[3]; HARDWARE FRAMES: VIDEOTOOLBOX { unwrap to access iOS pixel buffer // use pixel buffer to: // - render to UIImage on screen // - read video pixel data // - modify pixel data // - upload video frame to OpenGL tex // // or: // - convert back to generic software frame // - filter/render like with software decode
frame); assert(frame->imgfmt == AV_PIX_FMT_YUV420P); AVFrame *frame = av_frame_alloc(); int ret = avcodec_receive_frame(avctx, frame); assert(frame->imgfmt == AV_PIX_FMT_VIDEOTOOLBOX); CVPixelBufferRef img = (CVPixelBufferRef)frame->planes[3]; int planes_nb = CVPixelBufferGetPlaneCount(img); for (int i = 0; i < planes_nb; i++) { CVOpenGLESTextureCacheCreateTextureFromImage( ... ); GLuint tex = CVOpenGLESTextureGetName(plane); // pass to GL shader for rendering } HARDWARE FRAMES: VIDEOTOOLBOX { transfer each plane to a OpenGL texture