applemedia: vtdec: improve negotiation

Rework negotiation implementing GstVideoDecoder::negotiate. Make it possible to
switch texture sharing on and off at runtime. Useful to (eventually) turn
texture sharing on in pipelines where glimagesink is linked only after
decoding has already started (for example OWR).
This commit is contained in:
Alessandro Decina 2015-11-17 11:21:27 +11:00
parent 8f14882b44
commit 119e09eac3
2 changed files with 111 additions and 102 deletions

View file

@ -59,8 +59,7 @@ static void gst_vtdec_finalize (GObject * object);
static gboolean gst_vtdec_start (GstVideoDecoder * decoder); static gboolean gst_vtdec_start (GstVideoDecoder * decoder);
static gboolean gst_vtdec_stop (GstVideoDecoder * decoder); static gboolean gst_vtdec_stop (GstVideoDecoder * decoder);
static gboolean gst_vtdec_decide_allocation (GstVideoDecoder * decoder, static gboolean gst_vtdec_negotiate (GstVideoDecoder * decoder);
GstQuery * query);
static gboolean gst_vtdec_set_format (GstVideoDecoder * decoder, static gboolean gst_vtdec_set_format (GstVideoDecoder * decoder,
GstVideoCodecState * state); GstVideoCodecState * state);
static gboolean gst_vtdec_flush (GstVideoDecoder * decoder); static gboolean gst_vtdec_flush (GstVideoDecoder * decoder);
@ -156,8 +155,7 @@ gst_vtdec_class_init (GstVtdecClass * klass)
gobject_class->finalize = gst_vtdec_finalize; gobject_class->finalize = gst_vtdec_finalize;
video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_vtdec_start); video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_vtdec_start);
video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_vtdec_stop); video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_vtdec_stop);
video_decoder_class->decide_allocation = video_decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_vtdec_negotiate);
GST_DEBUG_FUNCPTR (gst_vtdec_decide_allocation);
video_decoder_class->set_format = GST_DEBUG_FUNCPTR (gst_vtdec_set_format); video_decoder_class->set_format = GST_DEBUG_FUNCPTR (gst_vtdec_set_format);
video_decoder_class->flush = GST_DEBUG_FUNCPTR (gst_vtdec_flush); video_decoder_class->flush = GST_DEBUG_FUNCPTR (gst_vtdec_flush);
video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_vtdec_finish); video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_vtdec_finish);
@ -199,6 +197,10 @@ gst_vtdec_stop (GstVideoDecoder * decoder)
{ {
GstVtdec *vtdec = GST_VTDEC (decoder); GstVtdec *vtdec = GST_VTDEC (decoder);
if (vtdec->input_state)
gst_video_codec_state_unref (vtdec->input_state);
vtdec->input_state = NULL;
if (vtdec->session) if (vtdec->session)
gst_vtdec_invalidate_session (vtdec); gst_vtdec_invalidate_session (vtdec);
@ -211,46 +213,34 @@ gst_vtdec_stop (GstVideoDecoder * decoder)
return TRUE; return TRUE;
} }
static gboolean static GstGLContext *
gst_vtdec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query) query_gl_context (GstVtdec * vtdec)
{ {
gboolean ret;
GstCaps *caps;
GstCapsFeatures *features;
GstVtdec *vtdec = GST_VTDEC (decoder);
ret =
GST_VIDEO_DECODER_CLASS (gst_vtdec_parent_class)->decide_allocation
(decoder, query);
if (!ret)
goto out;
gst_query_parse_allocation (query, &caps, NULL);
if (caps) {
GstGLContext *gl_context = NULL; GstGLContext *gl_context = NULL;
features = gst_caps_get_features (caps, 0);
if (gst_caps_features_contains (features,
GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
GstContext *context = NULL; GstContext *context = NULL;
GstQuery *query = gst_query_new_context ("gst.gl.local_context"); GstQuery *query;
if (gst_pad_peer_query (GST_VIDEO_DECODER_SRC_PAD (decoder), query)) {
query = gst_query_new_context ("gst.gl.local_context");
if (gst_pad_peer_query (GST_VIDEO_DECODER_SRC_PAD (vtdec), query)) {
gst_query_parse_context (query, &context); gst_query_parse_context (query, &context);
if (context) { if (context) {
const GstStructure *s = gst_context_get_structure (context); const GstStructure *s = gst_context_get_structure (context);
gst_structure_get (s, "context", GST_GL_TYPE_CONTEXT, &gl_context, gst_structure_get (s, "context", GST_GL_TYPE_CONTEXT, &gl_context, NULL);
NULL);
} }
} }
gst_query_unref (query); gst_query_unref (query);
if (context) { return gl_context;
}
static void
setup_texture_cache (GstVtdec * vtdec, GstGLContext * context)
{
GstVideoFormat internal_format; GstVideoFormat internal_format;
GstVideoCodecState *output_state = GstVideoCodecState *output_state =
gst_video_decoder_get_output_state (decoder); gst_video_decoder_get_output_state (GST_VIDEO_DECODER (vtdec));
GST_INFO_OBJECT (decoder, "pushing textures. GL context %p", context); GST_INFO_OBJECT (vtdec, "pushing textures. GL context %p", context);
if (vtdec->texture_cache) if (vtdec->texture_cache)
gst_core_video_texture_cache_free (vtdec->texture_cache); gst_core_video_texture_cache_free (vtdec->texture_cache);
@ -259,64 +249,76 @@ gst_vtdec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
#else #else
internal_format = GST_VIDEO_FORMAT_UYVY; internal_format = GST_VIDEO_FORMAT_UYVY;
#endif #endif
vtdec->texture_cache = gst_core_video_texture_cache_new (gl_context); vtdec->texture_cache = gst_core_video_texture_cache_new (context);
gst_core_video_texture_cache_set_format (vtdec->texture_cache, gst_core_video_texture_cache_set_format (vtdec->texture_cache,
internal_format, output_state->caps); internal_format, output_state->caps);
gst_video_codec_state_unref (output_state); gst_video_codec_state_unref (output_state);
gst_object_unref (gl_context);
} else {
GST_WARNING_OBJECT (decoder,
"got memory:GLMemory caps but not GL context from downstream element");
}
}
}
out:
return ret;
} }
static gboolean static gboolean
gst_vtdec_negotiate_output_format (GstVtdec * vtdec, caps_filter_out_gl_memory (GstCapsFeatures * features, GstStructure * structure,
GstVideoCodecState * input_state) gpointer user_data)
{
return !gst_caps_features_contains (features,
GST_CAPS_FEATURE_MEMORY_GL_MEMORY);
}
static gboolean
gst_vtdec_negotiate (GstVideoDecoder * decoder)
{ {
GstCaps *caps = NULL, *peercaps = NULL, *templcaps;
GstVideoFormat output_format;
GstVideoCodecState *output_state = NULL; GstVideoCodecState *output_state = NULL;
GstCapsFeatures *features; GstCaps *caps = NULL, *prevcaps = NULL;
GstVideoFormat format;
GstStructure *structure; GstStructure *structure;
const gchar *s; const gchar *s;
GstGLContext *context;
GstVtdec *vtdec;
gboolean ret = TRUE;
peercaps = gst_pad_peer_query_caps (GST_VIDEO_DECODER_SRC_PAD (vtdec), NULL); vtdec = GST_VTDEC (decoder);
/* Check if output supports GL caps by preference */
templcaps = gst_pad_get_pad_template_caps (GST_VIDEO_DECODER_SRC_PAD (vtdec));
caps = caps =
gst_caps_intersect_full (templcaps, peercaps, GST_CAPS_INTERSECT_FIRST); gst_caps_make_writable (gst_pad_query_caps (GST_VIDEO_DECODER_SRC_PAD
(vtdec), NULL));
gst_caps_unref (peercaps); context = query_gl_context (vtdec);
gst_caps_unref (templcaps); if (!context)
gst_caps_filter_and_map_in_place (caps, caps_filter_out_gl_memory, NULL);
caps = gst_caps_truncate (caps); caps = gst_caps_truncate (caps);
structure = gst_caps_get_structure (caps, 0); structure = gst_caps_get_structure (caps, 0);
s = gst_structure_get_string (structure, "format"); s = gst_structure_get_string (structure, "format");
output_format = gst_video_format_from_string (s); format = gst_video_format_from_string (s);
features = gst_caps_features_copy (gst_caps_get_features (caps, 0));
gst_caps_unref (caps); gst_caps_unref (caps);
if (!gst_vtdec_create_session (vtdec, output_format)) {
gst_caps_features_free (features);
return FALSE;
}
output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (vtdec), output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (vtdec),
output_format, vtdec->video_info.width, vtdec->video_info.height, format, vtdec->video_info.width, vtdec->video_info.height,
input_state); vtdec->input_state);
output_state->caps = gst_video_info_to_caps (&output_state->info); output_state->caps = gst_video_info_to_caps (&output_state->info);
gst_caps_set_features (output_state->caps, 0, features); if (output_state->info.finfo->format == GST_VIDEO_FORMAT_RGBA) {
setup_texture_cache (vtdec, context);
gst_caps_set_features (output_state->caps, 0,
gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, NULL));
}
if (context)
gst_object_unref (context);
return TRUE; GST_INFO_OBJECT (vtdec, "negotiated output format %" GST_PTR_FORMAT,
output_state->caps);
prevcaps = gst_pad_get_current_caps (decoder->srcpad);
if (!prevcaps || !gst_caps_is_equal (prevcaps, output_state->caps)) {
if (vtdec->session) {
gst_vtdec_push_frames_if_needed (vtdec, TRUE, FALSE);
gst_vtdec_invalidate_session (vtdec);
}
ret = gst_vtdec_create_session (vtdec, format);
}
if (prevcaps)
gst_caps_unref (prevcaps);
if (!ret)
return ret;
return GST_VIDEO_DECODER_CLASS (gst_vtdec_parent_class)->negotiate (decoder);
} }
static gboolean static gboolean
@ -345,8 +347,10 @@ gst_vtdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
return TRUE; return TRUE;
} }
if (vtdec->session) if (vtdec->session) {
gst_vtdec_push_frames_if_needed (vtdec, TRUE, FALSE);
gst_vtdec_invalidate_session (vtdec); gst_vtdec_invalidate_session (vtdec);
}
gst_video_info_from_caps (&vtdec->video_info, state->caps); gst_video_info_from_caps (&vtdec->video_info, state->caps);
@ -366,8 +370,9 @@ gst_vtdec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
CFRelease (vtdec->format_description); CFRelease (vtdec->format_description);
vtdec->format_description = format_description; vtdec->format_description = format_description;
if (!gst_vtdec_negotiate_output_format (vtdec, state)) if (vtdec->input_state)
return FALSE; gst_video_codec_state_unref (vtdec->input_state);
vtdec->input_state = gst_video_codec_state_ref (state);
return TRUE; return TRUE;
} }
@ -444,6 +449,16 @@ error:
goto out; goto out;
} }
static void
gst_vtdec_invalidate_session (GstVtdec * vtdec)
{
g_return_if_fail (vtdec->session);
VTDecompressionSessionInvalidate (vtdec->session);
CFRelease (vtdec->session);
vtdec->session = NULL;
}
static gboolean static gboolean
gst_vtdec_create_session (GstVtdec * vtdec, GstVideoFormat format) gst_vtdec_create_session (GstVtdec * vtdec, GstVideoFormat format)
{ {
@ -453,6 +468,8 @@ gst_vtdec_create_session (GstVtdec * vtdec, GstVideoFormat format)
OSStatus status; OSStatus status;
guint32 cv_format = 0; guint32 cv_format = 0;
g_return_val_if_fail (vtdec->session == NULL, FALSE);
switch (format) { switch (format) {
case GST_VIDEO_FORMAT_NV12: case GST_VIDEO_FORMAT_NV12:
cv_format = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; cv_format = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
@ -514,16 +531,6 @@ gst_vtdec_create_session (GstVtdec * vtdec, GstVideoFormat format)
return TRUE; return TRUE;
} }
static void
gst_vtdec_invalidate_session (GstVtdec * vtdec)
{
g_return_if_fail (vtdec->session);
VTDecompressionSessionInvalidate (vtdec->session);
CFRelease (vtdec->session);
vtdec->session = NULL;
}
static CMFormatDescriptionRef static CMFormatDescriptionRef
create_format_description (GstVtdec * vtdec, CMVideoCodecType cm_format) create_format_description (GstVtdec * vtdec, CMVideoCodecType cm_format)
{ {
@ -793,8 +800,9 @@ gst_vtdec_push_frames_if_needed (GstVtdec * vtdec, gboolean drain,
*/ */
/* negotiate now so that we know whether we need to use the GL upload meta or /* negotiate now so that we know whether we need to use the GL upload meta or
* not */ * not */
if (gst_pad_check_reconfigure (decoder->srcpad)) if (gst_pad_check_reconfigure (decoder->srcpad)) {
gst_video_decoder_negotiate (decoder); gst_video_decoder_negotiate (decoder);
}
if (drain) if (drain)
VTDecompressionSessionWaitForAsynchronousFrames (vtdec->session); VTDecompressionSessionWaitForAsynchronousFrames (vtdec->session);

View file

@ -41,6 +41,7 @@ typedef struct _GstVtdecClass GstVtdecClass;
struct _GstVtdec struct _GstVtdec
{ {
GstVideoDecoder base_vtdec; GstVideoDecoder base_vtdec;
GstVideoCodecState *input_state;
GstVideoInfo video_info; GstVideoInfo video_info;
CMFormatDescriptionRef format_description; CMFormatDescriptionRef format_description;
VTDecompressionSessionRef session; VTDecompressionSessionRef session;