applemedia: rework GL texture sharing

Use YUV instead of RGB textures, then convert using the new apple specific
shader in GstGLColorConvert. Also use GLMemory directly instead of using the
GL upload meta, avoiding an extra texture copy we used to have before.
This commit is contained in:
Alessandro Decina 2015-01-30 00:28:18 +11:00
parent 77d48abfab
commit f577b52e5d
4 changed files with 101 additions and 53 deletions

View file

@ -56,7 +56,7 @@ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
"height = " GST_VIDEO_SIZE_RANGE "; "
GST_VIDEO_CAPS_MAKE_WITH_FEATURES
(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META,
(GST_CAPS_FEATURE_MEMORY_GL_MEMORY,
"RGBA") "; "
));
@ -398,7 +398,7 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
if (gst_format == GST_VIDEO_FORMAT_BGRA) {
GstCaps *rgba_caps = GST_AVF_CAPS_NEW (GST_VIDEO_FORMAT_RGBA, dimensions.width, dimensions.height, fps_n, fps_d);
gst_caps_set_features (rgba_caps, 0, gst_caps_features_new (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, NULL));
gst_caps_set_features (rgba_caps, 0, gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, NULL));
gst_caps_append (result, rgba_caps);
}
}
@ -748,7 +748,11 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
gst_query_parse_nth_allocation_meta (query, idx, &upload_meta_params);
if (gst_structure_get (upload_meta_params, "gst.gl.GstGLContext",
GST_GL_TYPE_CONTEXT, &context, NULL) && context) {
GstCaps *query_caps;
gst_query_parse_allocation (query, &query_caps, NULL);
textureCache = gst_core_video_texture_cache_new (context);
gst_core_video_texture_cache_set_format (textureCache,
"NV12", query_caps);
gst_object_unref (context);
}
}
@ -875,13 +879,8 @@ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
}
CFRelease (sbuf);
if (textureCache != NULL) {
GstVideoGLTextureType texture_types[4] = {GST_VIDEO_GL_TEXTURE_TYPE_RGBA, 0};
gst_buffer_add_video_gl_texture_upload_meta (*buf,
GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_NORMAL,
1, texture_types,
gst_core_video_texture_cache_upload, textureCache, NULL, NULL);
}
if (textureCache != NULL)
*buf = gst_core_video_texture_cache_get_gl_buffer (textureCache, *buf);
GST_BUFFER_OFFSET (*buf) = offset++;
GST_BUFFER_OFFSET_END (*buf) = GST_BUFFER_OFFSET (buf) + 1;

View file

@ -34,11 +34,18 @@ typedef struct _GstCoreVideoTextureCache
#else
CVOpenGLESTextureCacheRef cache;
#endif
GstVideoInfo input_info;
GstVideoInfo output_info;
GstGLColorConvert *convert;
} GstCoreVideoTextureCache;
GstCoreVideoTextureCache *gst_core_video_texture_cache_new (GstGLContext * ctx);
void gst_core_video_texture_cache_free (GstCoreVideoTextureCache * cache);
void gst_core_video_texture_cache_set_format (GstCoreVideoTextureCache * cache,
const gchar * input_format, GstCaps * out_caps);
gboolean gst_core_video_texture_cache_upload (GstVideoGLTextureUploadMeta * meta, guint texture_id[4]);
GstBuffer * gst_core_video_texture_cache_get_gl_buffer (GstCoreVideoTextureCache * cache,
GstBuffer * cv_buffer);
G_END_DECLS

View file

@ -36,6 +36,8 @@ gst_core_video_texture_cache_new (GstGLContext * ctx)
GstCoreVideoTextureCache *cache = g_new0 (GstCoreVideoTextureCache, 1);
cache->ctx = gst_object_ref (ctx);
gst_video_info_init (&cache->input_info);
cache->convert = gst_gl_color_convert_new (cache->ctx);
#if !HAVE_IOS
CGLPixelFormatObj pixelFormat =
@ -62,30 +64,51 @@ gst_core_video_texture_cache_free (GstCoreVideoTextureCache * cache)
#else
/* FIXME: how do we release ->cache ? */
#endif
gst_object_unref (cache->convert);
gst_object_unref (cache->ctx);
g_free (cache);
}
gboolean
gst_core_video_texture_cache_upload (GstVideoGLTextureUploadMeta * meta,
guint texture_id[4])
void
gst_core_video_texture_cache_set_format (GstCoreVideoTextureCache * cache,
const gchar * input_format, GstCaps * out_caps)
{
g_return_val_if_fail (meta != NULL, FALSE);
GstCaps *in_caps;
GstCapsFeatures *features;
GstCoreVideoTextureCache *cache =
(GstCoreVideoTextureCache *) meta->user_data;
const GstGLFuncs *gl = cache->ctx->gl_vtable;
g_return_if_fail (gst_caps_is_fixed (out_caps));
out_caps = gst_caps_copy (out_caps);
features = gst_caps_get_features (out_caps, 0);
gst_caps_features_add (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
gst_video_info_from_caps (&cache->output_info, out_caps);
in_caps = gst_caps_copy (out_caps);
gst_caps_set_simple (in_caps, "format", G_TYPE_STRING, input_format, NULL);
features = gst_caps_get_features (in_caps, 0);
gst_caps_features_add (features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
gst_video_info_from_caps (&cache->input_info, in_caps);
gst_gl_color_convert_set_caps (cache->convert, in_caps, out_caps);
gst_caps_unref (out_caps);
gst_caps_unref (in_caps);
}
CFTypeRef
texture_from_buffer (GstCoreVideoTextureCache * cache,
GstBuffer * buffer, GLuint * texture_id, GLuint * texture_target)
{
#if !HAVE_IOS
CVOpenGLTextureRef texture = NULL;
#else
CVOpenGLESTextureRef texture = NULL;
#endif
GstVideoMeta *video_meta = gst_buffer_get_video_meta (meta->buffer);
GstCoreMediaMeta *cm_meta =
(GstCoreMediaMeta *) gst_buffer_get_meta (meta->buffer,
(GstCoreMediaMeta *) gst_buffer_get_meta (buffer,
gst_core_media_meta_api_get_type ());
GstCoreVideoMeta *cv_meta =
(GstCoreVideoMeta *) gst_buffer_get_meta (meta->buffer,
(GstCoreVideoMeta *) gst_buffer_get_meta (buffer,
gst_core_video_meta_api_get_type ());
CVPixelBufferRef pixel_buf;
if (cm_meta)
@ -95,29 +118,41 @@ gst_core_video_texture_cache_upload (GstVideoGLTextureUploadMeta * meta,
#if !HAVE_IOS
CVOpenGLTextureCacheCreateTextureFromImage (kCFAllocatorDefault,
cache->cache, pixel_buf, NULL, &texture);
*texture_id = CVOpenGLTextureGetName (texture);
*texture_target = CVOpenGLTextureGetTarget (texture);
CVOpenGLTextureCacheFlush (cache->cache, 0);
#else
CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault,
cache->cache, cm_meta->image_buf, NULL, GL_TEXTURE_2D,
GL_RGBA, video_meta->width, video_meta->height, GL_RGBA, GL_UNSIGNED_BYTE,
0, &texture);
cache->cache, cm_meta->image_buf, NULL, GL_TEXTURE_2D, GL_RGBA,
GST_VIDEO_INFO_WIDTH (&cache->input_info),
GST_VIDEO_INFO_HEIGHT (&cache->input_info),
GL_RGBA, GL_UNSIGNED_BYTE, 0, &texture);
*texture_id = CVOpenGLESTextureGetName (texture);
*texture_target = CVOpenGLESTextureGetTarget (texture);
CVOpenGLESTextureCacheFlush (cache->cache, 0);
#endif
GLuint fboId;
gl->GenFramebuffers (1, &fboId);
gl->BindFramebuffer (GL_FRAMEBUFFER, fboId);
gl->FramebufferTexture2D (GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
#if !HAVE_IOS
CVOpenGLTextureGetTarget (texture), CVOpenGLTextureGetName (texture),
#else
CVOpenGLESTextureGetTarget (texture), CVOpenGLESTextureGetName (texture),
#endif
0);
gl->BindTexture (GL_TEXTURE_2D, texture_id[0]);
gl->CopyTexImage2D (GL_TEXTURE_2D, 0, GL_RGBA8, 0, 0, video_meta->width,
video_meta->height, 0);
gl->BindTexture (GL_TEXTURE_2D, 0);
gl->BindFramebuffer (GL_FRAMEBUFFER, 0);
gl->DeleteFramebuffers (1, &fboId);
return TRUE;
return (CFTypeRef) texture;
}
GstBuffer *
gst_core_video_texture_cache_get_gl_buffer (GstCoreVideoTextureCache * cache,
GstBuffer * cv_buffer)
{
const GstGLFuncs *gl;
GstBuffer *rgb_buffer;
CFTypeRef texture;
GLuint texture_id, texture_target;
GstMemory *memory;
gl = cache->ctx->gl_vtable;
texture = texture_from_buffer (cache, cv_buffer, &texture_id, &texture_target);
memory = (GstMemory *) gst_gl_memory_wrapped_texture (cache->ctx, texture_id, texture_target,
&cache->input_info, 0, NULL, NULL, NULL);
gst_buffer_append_memory (cv_buffer, memory);
rgb_buffer = gst_gl_color_convert_perform (cache->convert, cv_buffer);
gst_buffer_unref (cv_buffer);
CFRelease (texture);
return rgb_buffer;
}

View file

@ -107,7 +107,7 @@ CFSTR ("EnableHardwareAcceleratedVideoDecoder");
#define VIDEO_SRC_CAPS \
GST_VIDEO_CAPS_MAKE(GST_VTDEC_VIDEO_FORMAT_STR) ";" \
GST_VIDEO_CAPS_MAKE_WITH_FEATURES \
(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, \
(GST_CAPS_FEATURE_MEMORY_GL_MEMORY, \
"RGBA") ";"
G_DEFINE_TYPE_WITH_CODE (GstVtdec, gst_vtdec, GST_TYPE_VIDEO_DECODER,
@ -301,8 +301,7 @@ gst_vtdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
output_state->caps = gst_video_info_to_caps (&output_state->info);
if (output_state->info.finfo->format == GST_VIDEO_FORMAT_RGBA) {
gst_caps_set_features (output_state->caps, 0,
gst_caps_features_new
(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, NULL));
gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, NULL));
}
return TRUE;
@ -397,7 +396,11 @@ gst_vtdec_create_session (GstVtdec * vtdec, GstVideoFormat format)
cv_format = kCVPixelFormatType_422YpCbCr8;
break;
case GST_VIDEO_FORMAT_RGBA:
cv_format = kCVPixelFormatType_32BGRA;
#ifdef HAVE_IOS
cv_format = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
#else
cv_format = kCVPixelFormatType_422YpCbCr8;
#endif
break;
default:
g_warn_if_reached ();
@ -679,8 +682,16 @@ gst_vtdec_push_frames_if_needed (GstVtdec * vtdec, gboolean drain,
/* negotiate now so that we know whether we need to use the GL upload meta or
* not */
if (gst_pad_check_reconfigure (decoder->srcpad))
if (gst_pad_check_reconfigure (decoder->srcpad)) {
gst_video_decoder_negotiate (decoder);
if (vtdec->texture_cache) {
GstVideoCodecState *output_state =
gst_video_decoder_get_output_state (decoder);
gst_core_video_texture_cache_set_format (vtdec->texture_cache,
GST_VTDEC_VIDEO_FORMAT_STR, output_state->caps);
gst_video_codec_state_unref (output_state);
}
}
if (drain)
VTDecompressionSessionWaitForAsynchronousFrames (vtdec->session);
@ -691,14 +702,10 @@ gst_vtdec_push_frames_if_needed (GstVtdec * vtdec, gboolean drain,
while ((g_async_queue_length (vtdec->reorder_queue) >=
vtdec->reorder_queue_length) || drain || flush) {
frame = (GstVideoCodecFrame *) g_async_queue_try_pop (vtdec->reorder_queue);
if (vtdec->texture_cache != NULL) {
GstVideoGLTextureType texture_types[4] =
{ GST_VIDEO_GL_TEXTURE_TYPE_RGBA, 0 };
gst_buffer_add_video_gl_texture_upload_meta (frame->output_buffer,
GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_NORMAL, 1, texture_types,
gst_core_video_texture_cache_upload, vtdec->texture_cache, NULL,
NULL);
}
if (vtdec->texture_cache != NULL)
frame->output_buffer =
gst_core_video_texture_cache_get_gl_buffer (vtdec->texture_cache,
frame->output_buffer);
/* we need to check this in case reorder_queue_length=0 (jpeg for
* example) or we're draining/flushing