nvdecoder: Handle GstContext in helper object

... and move common code to helper object

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/5409>
This commit is contained in:
Seungha Yang 2023-09-27 23:02:43 +09:00 committed by GStreamer Marge Bot
parent 91e0c3aafa
commit 57e0a0bd61
7 changed files with 247 additions and 321 deletions

View file

@ -48,7 +48,6 @@ typedef struct _GstNvAV1Dec
{
GstAV1Decoder parent;
GstCudaContext *context;
GstNvDecoder *decoder;
GstAV1SequenceHeaderOBU seq_hdr;
@ -103,6 +102,7 @@ static GTypeClass *parent_class = nullptr;
#define GST_NV_AV1_DEC_GET_CLASS(object) \
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstNvAV1DecClass))
static void gst_nv_av1_dec_finalize (GObject * object);
static void gst_nv_av1_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_nv_av1_dec_get_property (GObject * object, guint prop_id,
@ -116,6 +116,8 @@ static gboolean gst_nv_av1_dec_stop (GstVideoDecoder * decoder);
static gboolean gst_nv_av1_dec_negotiate (GstVideoDecoder * decoder);
static gboolean gst_nv_av1_dec_decide_allocation (GstVideoDecoder *
decoder, GstQuery * query);
static gboolean gst_nv_av1_dec_sink_query (GstVideoDecoder * decoder,
GstQuery * query);
static gboolean gst_nv_av1_dec_src_query (GstVideoDecoder * decoder,
GstQuery * query);
static gboolean gst_nv_av1_dec_sink_event (GstVideoDecoder * decoder,
@ -147,6 +149,7 @@ gst_nv_av1_dec_class_init (GstNvAV1DecClass * klass,
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
GstAV1DecoderClass *av1decoder_class = GST_AV1_DECODER_CLASS (klass);
object_class->finalize = gst_nv_av1_dec_finalize;
object_class->set_property = gst_nv_av1_dec_set_property;
object_class->get_property = gst_nv_av1_dec_get_property;
@ -237,6 +240,7 @@ gst_nv_av1_dec_class_init (GstNvAV1DecClass * klass,
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_nv_av1_dec_negotiate);
decoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_nv_av1_dec_decide_allocation);
decoder_class->sink_query = GST_DEBUG_FUNCPTR (gst_nv_av1_dec_sink_query);
decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_nv_av1_dec_src_query);
decoder_class->sink_event = GST_DEBUG_FUNCPTR (gst_nv_av1_dec_sink_event);
@ -269,10 +273,24 @@ gst_nv_av1_dec_class_init (GstNvAV1DecClass * klass,
static void
gst_nv_av1_dec_init (GstNvAV1Dec * self)
{
GstNvAV1DecClass *klass = GST_NV_AV1_DEC_GET_CLASS (self);
self->decoder = gst_nv_decoder_new (klass->cuda_device_id);
self->num_output_surfaces = DEFAULT_NUM_OUTPUT_SURFACES;
self->max_display_delay = DEFAULT_MAX_DISPLAY_DELAY;
}
static void
gst_nv_av1_dec_finalize (GObject * object)
{
GstNvAV1Dec *self = GST_NV_AV1_DEC (object);
gst_object_unref (self->decoder);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_nv_av1_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
@ -331,20 +349,9 @@ static void
gst_nv_av1_dec_set_context (GstElement * element, GstContext * context)
{
GstNvAV1Dec *self = GST_NV_AV1_DEC (element);
GstNvAV1DecClass *klass = GST_NV_AV1_DEC_GET_CLASS (self);
GST_DEBUG_OBJECT (self, "set context %s",
gst_context_get_context_type (context));
gst_nv_decoder_handle_set_context (self->decoder, element, context);
if (gst_cuda_handle_set_context (element, context, klass->cuda_device_id,
&self->context)) {
goto done;
}
if (self->decoder)
gst_nv_decoder_handle_set_context (self->decoder, element, context);
done:
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@ -352,23 +359,8 @@ static gboolean
gst_nv_av1_dec_open (GstVideoDecoder * decoder)
{
GstNvAV1Dec *self = GST_NV_AV1_DEC (decoder);
GstNvAV1DecClass *klass = GST_NV_AV1_DEC_GET_CLASS (self);
if (!gst_cuda_ensure_element_context (GST_ELEMENT (self),
klass->cuda_device_id, &self->context)) {
GST_ERROR_OBJECT (self, "Required element data is unavailable");
return FALSE;
}
self->decoder = gst_nv_decoder_new (self->context);
if (!self->decoder) {
GST_ERROR_OBJECT (self, "Failed to create decoder object");
gst_clear_object (&self->context);
return FALSE;
}
return TRUE;
return gst_nv_decoder_open (self->decoder, GST_ELEMENT (decoder));
}
static void
@ -388,9 +380,6 @@ gst_nv_av1_dec_close (GstVideoDecoder * decoder)
{
GstNvAV1Dec *self = GST_NV_AV1_DEC (decoder);
gst_clear_object (&self->decoder);
gst_clear_object (&self->context);
gst_nv_av1_dec_reset_bitstream_params (self);
g_free (self->bitstream_buffer);
@ -402,7 +391,7 @@ gst_nv_av1_dec_close (GstVideoDecoder * decoder)
self->bitstream_buffer_alloc_size = 0;
self->tile_offsets_alloc_len = 0;
return TRUE;
return gst_nv_decoder_close (self->decoder);
}
static gboolean
@ -413,8 +402,7 @@ gst_nv_av1_dec_stop (GstVideoDecoder * decoder)
ret = GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
if (self->decoder)
gst_nv_decoder_reset (self->decoder);
gst_nv_decoder_reset (self->decoder);
return ret;
}
@ -427,7 +415,8 @@ gst_nv_av1_dec_negotiate (GstVideoDecoder * decoder)
GST_DEBUG_OBJECT (self, "negotiate");
gst_nv_decoder_negotiate (self->decoder, decoder, av1dec->input_state);
if (!gst_nv_decoder_negotiate (self->decoder, decoder, av1dec->input_state))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
}
@ -446,24 +435,24 @@ gst_nv_av1_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
(decoder, query);
}
static gboolean
gst_nv_av1_dec_sink_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstNvAV1Dec *self = GST_NV_AV1_DEC (decoder);
if (gst_nv_decoder_handle_query (self->decoder, GST_ELEMENT (decoder), query))
return TRUE;
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
}
static gboolean
gst_nv_av1_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstNvAV1Dec *self = GST_NV_AV1_DEC (decoder);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_cuda_handle_context_query (GST_ELEMENT (decoder), query,
self->context)) {
return TRUE;
} else if (self->decoder &&
gst_nv_decoder_handle_context_query (self->decoder, decoder, query)) {
return TRUE;
}
break;
default:
break;
}
if (gst_nv_decoder_handle_query (self->decoder, GST_ELEMENT (decoder), query))
return TRUE;
return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
}
@ -473,9 +462,6 @@ gst_nv_av1_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstNvAV1Dec *self = GST_NV_AV1_DEC (decoder);
if (!self->decoder)
goto done;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
gst_nv_decoder_set_flushing (self->decoder, TRUE);
@ -487,7 +473,6 @@ gst_nv_av1_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
break;
}
done:
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
}

View file

@ -81,6 +81,8 @@ struct _GstNvDecoder
{
GstObject parent;
guint device_id;
GstNvDecObject *object;
GstCudaContext *context;
GstCudaStream *stream;
@ -107,7 +109,6 @@ struct _GstNvDecoder
static void gst_nv_decoder_dispose (GObject * object);
static void gst_nv_decoder_finalize (GObject * object);
static void gst_nv_decoder_reset_unlocked (GstNvDecoder * self);
#define parent_class gst_nv_decoder_parent_class
G_DEFINE_TYPE (GstNvDecoder, gst_nv_decoder, GST_TYPE_OBJECT);
@ -132,13 +133,7 @@ gst_nv_decoder_dispose (GObject * object)
{
GstNvDecoder *self = GST_NV_DECODER (object);
gst_nv_decoder_reset_unlocked (self);
gst_clear_cuda_stream (&self->stream);
gst_clear_object (&self->context);
gst_clear_object (&self->gl_display);
gst_clear_object (&self->gl_context);
gst_clear_object (&self->other_gl_context);
gst_nv_decoder_close (self);
G_OBJECT_CLASS (parent_class)->dispose (object);
}
@ -200,31 +195,30 @@ output_format_from_video_format (GstVideoFormat format)
}
GstNvDecoder *
gst_nv_decoder_new (GstCudaContext * context)
gst_nv_decoder_new (guint device_id)
{
GstNvDecoder *self;
g_return_val_if_fail (GST_IS_CUDA_CONTEXT (context), nullptr);
self = (GstNvDecoder *) g_object_new (GST_TYPE_NV_DECODER, nullptr);
self->context = (GstCudaContext *) gst_object_ref (context);
self->device_id = device_id;
gst_object_ref_sink (self);
self->stream = gst_cuda_stream_new (self->context);
if (!self->stream) {
GST_WARNING_OBJECT (self,
"Could not create CUDA stream, will use default stream");
}
return self;
}
gboolean
gst_nv_decoder_is_configured (GstNvDecoder * decoder)
gst_nv_decoder_open (GstNvDecoder * decoder, GstElement * element)
{
g_return_val_if_fail (GST_IS_NV_DECODER (decoder), FALSE);
if (!gst_cuda_ensure_element_context (element,
decoder->device_id, &decoder->context)) {
GST_ERROR_OBJECT (element, "Couldn't create CUDA context");
return FALSE;
}
return decoder->configured;
gst_clear_cuda_stream (&decoder->stream);
decoder->stream = gst_cuda_stream_new (decoder->context);
return TRUE;
}
static void
@ -241,6 +235,28 @@ gst_nv_decoder_reset_unlocked (GstNvDecoder * self)
self->num_output_surfaces = 0;
}
gboolean
gst_nv_decoder_close (GstNvDecoder * decoder)
{
gst_nv_decoder_reset_unlocked (decoder);
gst_clear_cuda_stream (&decoder->stream);
gst_clear_object (&decoder->context);
gst_clear_object (&decoder->gl_display);
gst_clear_object (&decoder->gl_context);
gst_clear_object (&decoder->other_gl_context);
return TRUE;
}
gboolean
gst_nv_decoder_is_configured (GstNvDecoder * decoder)
{
g_return_val_if_fail (GST_IS_NV_DECODER (decoder), FALSE);
return decoder->configured;
}
gboolean
gst_nv_decoder_configure (GstNvDecoder * decoder, cudaVideoCodec codec,
GstVideoInfo * info, gint coded_width, gint coded_height,
@ -1331,33 +1347,33 @@ gst_cuda_video_codec_to_string (cudaVideoCodec codec)
return "unknown";
}
gboolean
void
gst_nv_decoder_handle_set_context (GstNvDecoder * decoder,
GstElement * videodec, GstContext * context)
GstElement * element, GstContext * context)
{
g_return_val_if_fail (GST_IS_NV_DECODER (decoder), FALSE);
g_return_val_if_fail (GST_IS_ELEMENT (videodec), FALSE);
#ifdef HAVE_CUDA_GST_GL
if (gst_gl_handle_set_context (videodec, context,
(GstGLDisplay **) & decoder->gl_display,
(GstGLContext **) & decoder->other_gl_context)) {
return TRUE;
if (gst_cuda_handle_set_context (element, context, decoder->device_id,
&decoder->context)) {
return;
}
#ifdef HAVE_CUDA_GST_GL
gst_gl_handle_set_context (element, context,
(GstGLDisplay **) & decoder->gl_display,
(GstGLContext **) & decoder->other_gl_context);
#endif
return FALSE;
}
gboolean
gst_nv_decoder_handle_context_query (GstNvDecoder * decoder,
GstVideoDecoder * videodec, GstQuery * query)
gst_nv_decoder_handle_query (GstNvDecoder * decoder, GstElement * element,
GstQuery * query)
{
g_return_val_if_fail (GST_IS_NV_DECODER (decoder), FALSE);
g_return_val_if_fail (GST_IS_ELEMENT (videodec), FALSE);
if (GST_QUERY_TYPE (query) != GST_QUERY_CONTEXT)
return FALSE;
if (gst_cuda_handle_context_query (element, query, decoder->context))
return TRUE;
#ifdef HAVE_CUDA_GST_GL
if (gst_gl_handle_context_query (GST_ELEMENT (videodec), query,
if (gst_gl_handle_context_query (element, query,
(GstGLDisplay *) decoder->gl_display,
(GstGLContext *) decoder->gl_context,
(GstGLContext *) decoder->other_gl_context)) {

View file

@ -42,7 +42,12 @@ typedef struct _GstNvDecoderClassData
guint max_height;
} GstNvDecoderClassData;
GstNvDecoder * gst_nv_decoder_new (GstCudaContext * context);
GstNvDecoder * gst_nv_decoder_new (guint device_id);
gboolean gst_nv_decoder_open (GstNvDecoder * decoder,
GstElement * element);
gboolean gst_nv_decoder_close (GstNvDecoder * decoder);
gboolean gst_nv_decoder_is_configured (GstNvDecoder * decoder);
@ -84,12 +89,12 @@ gboolean gst_nv_decoder_check_device_caps (CUcontext cuda_ctx,
const gchar * gst_cuda_video_codec_to_string (cudaVideoCodec codec);
/* helper methods */
gboolean gst_nv_decoder_handle_set_context (GstNvDecoder * decoder,
GstElement * videodec,
void gst_nv_decoder_handle_set_context (GstNvDecoder * decoder,
GstElement * element,
GstContext * context);
gboolean gst_nv_decoder_handle_context_query (GstNvDecoder * decoder,
GstVideoDecoder * videodec,
gboolean gst_nv_decoder_handle_query (GstNvDecoder * decoder,
GstElement * element,
GstQuery * query);
gboolean gst_nv_decoder_negotiate (GstNvDecoder * decoder,

View file

@ -101,7 +101,6 @@ typedef struct _GstNvH264Dec
{
GstH264Decoder parent;
GstCudaContext *context;
GstNvDecoder *decoder;
CUVIDPICPARAMS params;
@ -159,7 +158,7 @@ static GTypeClass *parent_class = nullptr;
#define GST_NV_H264_DEC_GET_CLASS(object) \
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstNvH264DecClass))
static void gst_nv_h264_decoder_dispose (GObject * object);
static void gst_nv_h264_decoder_finalize (GObject * object);
static void gst_nv_h264_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_nv_h264_dec_get_property (GObject * object, guint prop_id,
@ -173,6 +172,8 @@ static gboolean gst_nv_h264_dec_stop (GstVideoDecoder * decoder);
static gboolean gst_nv_h264_dec_negotiate (GstVideoDecoder * decoder);
static gboolean gst_nv_h264_dec_decide_allocation (GstVideoDecoder *
decoder, GstQuery * query);
static gboolean gst_nv_h264_dec_sink_query (GstVideoDecoder * decoder,
GstQuery * query);
static gboolean gst_nv_h264_dec_src_query (GstVideoDecoder * decoder,
GstQuery * query);
static gboolean gst_nv_h264_dec_sink_event (GstVideoDecoder * decoder,
@ -208,7 +209,7 @@ gst_nv_h264_dec_class_init (GstNvH264DecClass * klass,
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
GstH264DecoderClass *h264decoder_class = GST_H264_DECODER_CLASS (klass);
object_class->dispose = gst_nv_h264_decoder_dispose;
object_class->finalize = gst_nv_h264_decoder_finalize;
object_class->set_property = gst_nv_h264_dec_set_property;
object_class->get_property = gst_nv_h264_dec_get_property;
@ -307,6 +308,7 @@ gst_nv_h264_dec_class_init (GstNvH264DecClass * klass,
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_nv_h264_dec_negotiate);
decoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_nv_h264_dec_decide_allocation);
decoder_class->sink_query = GST_DEBUG_FUNCPTR (gst_nv_h264_dec_sink_query);
decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_nv_h264_dec_src_query);
decoder_class->sink_event = GST_DEBUG_FUNCPTR (gst_nv_h264_dec_sink_event);
@ -339,6 +341,9 @@ gst_nv_h264_dec_class_init (GstNvH264DecClass * klass,
static void
gst_nv_h264_dec_init (GstNvH264Dec * self)
{
GstNvH264DecClass *klass = GST_NV_H264_DEC_GET_CLASS (self);
self->decoder = gst_nv_decoder_new (klass->cuda_device_id);
self->ref_list = g_array_sized_new (FALSE, TRUE,
sizeof (GstH264Picture *), 16);
g_array_set_clear_func (self->ref_list,
@ -349,13 +354,14 @@ gst_nv_h264_dec_init (GstNvH264Dec * self)
}
static void
gst_nv_h264_decoder_dispose (GObject * object)
gst_nv_h264_decoder_finalize (GObject * object)
{
GstNvH264Dec *self = GST_NV_H264_DEC (object);
g_clear_pointer (&self->ref_list, g_array_unref);
g_array_unref (self->ref_list);
gst_object_unref (self->decoder);
G_OBJECT_CLASS (parent_class)->dispose (object);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
@ -416,20 +422,9 @@ static void
gst_nv_h264_dec_set_context (GstElement * element, GstContext * context)
{
GstNvH264Dec *self = GST_NV_H264_DEC (element);
GstNvH264DecClass *klass = GST_NV_H264_DEC_GET_CLASS (self);
GST_DEBUG_OBJECT (self, "set context %s",
gst_context_get_context_type (context));
gst_nv_decoder_handle_set_context (self->decoder, element, context);
if (gst_cuda_handle_set_context (element, context, klass->cuda_device_id,
&self->context)) {
goto done;
}
if (self->decoder)
gst_nv_decoder_handle_set_context (self->decoder, element, context);
done:
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@ -451,25 +446,10 @@ static gboolean
gst_nv_h264_dec_open (GstVideoDecoder * decoder)
{
GstNvH264Dec *self = GST_NV_H264_DEC (decoder);
GstNvH264DecClass *klass = GST_NV_H264_DEC_GET_CLASS (self);
if (!gst_cuda_ensure_element_context (GST_ELEMENT (self),
klass->cuda_device_id, &self->context)) {
GST_ERROR_OBJECT (self, "Required element data is unavailable");
return FALSE;
}
self->decoder = gst_nv_decoder_new (self->context);
if (!self->decoder) {
GST_ERROR_OBJECT (self, "Failed to create decoder object");
gst_clear_object (&self->context);
return FALSE;
}
gst_d3d11_h264_dec_reset (self);
return TRUE;
return gst_nv_decoder_open (self->decoder, GST_ELEMENT (decoder));
}
static gboolean
@ -477,16 +457,13 @@ gst_nv_h264_dec_close (GstVideoDecoder * decoder)
{
GstNvH264Dec *self = GST_NV_H264_DEC (decoder);
gst_clear_object (&self->decoder);
gst_clear_object (&self->context);
g_clear_pointer (&self->bitstream_buffer, g_free);
g_clear_pointer (&self->slice_offsets, g_free);
self->bitstream_buffer_alloc_size = 0;
self->slice_offsets_alloc_len = 0;
return TRUE;
return gst_nv_decoder_close (self->decoder);
}
static gboolean
@ -497,8 +474,7 @@ gst_nv_h264_dec_stop (GstVideoDecoder * decoder)
ret = GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
if (self->decoder)
gst_nv_decoder_reset (self->decoder);
gst_nv_decoder_reset (self->decoder);
return ret;
}
@ -511,9 +487,8 @@ gst_nv_h264_dec_negotiate (GstVideoDecoder * decoder)
GST_DEBUG_OBJECT (self, "negotiate");
gst_nv_decoder_negotiate (self->decoder, decoder, h264dec->input_state);
/* TODO: add support D3D11 memory */
if (!gst_nv_decoder_negotiate (self->decoder, decoder, h264dec->input_state))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
}
@ -532,24 +507,24 @@ gst_nv_h264_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
(decoder, query);
}
static gboolean
gst_nv_h264_dec_sink_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstNvH264Dec *self = GST_NV_H264_DEC (decoder);
if (gst_nv_decoder_handle_query (self->decoder, GST_ELEMENT (decoder), query))
return TRUE;
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
}
static gboolean
gst_nv_h264_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstNvH264Dec *self = GST_NV_H264_DEC (decoder);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_cuda_handle_context_query (GST_ELEMENT (decoder), query,
self->context)) {
return TRUE;
} else if (self->decoder &&
gst_nv_decoder_handle_context_query (self->decoder, decoder, query)) {
return TRUE;
}
break;
default:
break;
}
if (gst_nv_decoder_handle_query (self->decoder, GST_ELEMENT (decoder), query))
return TRUE;
return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
}
@ -559,9 +534,6 @@ gst_nv_h264_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstNvH264Dec *self = GST_NV_H264_DEC (decoder);
if (!self->decoder)
goto done;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
gst_nv_decoder_set_flushing (self->decoder, TRUE);
@ -573,7 +545,6 @@ gst_nv_h264_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
break;
}
done:
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
}

View file

@ -102,7 +102,6 @@ typedef struct _GstNvH265Dec
{
GstH265Decoder parent;
GstCudaContext *context;
GstNvDecoder *decoder;
CUVIDPICPARAMS params;
@ -158,6 +157,7 @@ static GTypeClass *parent_class = nullptr;
#define GST_NV_H265_DEC_GET_CLASS(object) \
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstNvH265DecClass))
static void gst_nv_h265_dec_finalize (GObject * object);
static void gst_nv_h265_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_nv_h265_dec_get_property (GObject * object, guint prop_id,
@ -171,6 +171,8 @@ static gboolean gst_nv_h265_dec_stop (GstVideoDecoder * decoder);
static gboolean gst_nv_h265_dec_negotiate (GstVideoDecoder * decoder);
static gboolean gst_nv_h265_dec_decide_allocation (GstVideoDecoder *
decoder, GstQuery * query);
static gboolean gst_nv_h265_dec_sink_query (GstVideoDecoder * decoder,
GstQuery * query);
static gboolean gst_nv_h265_dec_src_query (GstVideoDecoder * decoder,
GstQuery * query);
static gboolean gst_nv_h265_dec_sink_event (GstVideoDecoder * decoder,
@ -203,6 +205,7 @@ gst_nv_h265_dec_class_init (GstNvH265DecClass * klass,
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
GstH265DecoderClass *h265decoder_class = GST_H265_DECODER_CLASS (klass);
object_class->finalize = gst_nv_h265_dec_finalize;
object_class->set_property = gst_nv_h265_dec_set_property;
object_class->get_property = gst_nv_h265_dec_get_property;
@ -301,6 +304,7 @@ gst_nv_h265_dec_class_init (GstNvH265DecClass * klass,
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_nv_h265_dec_negotiate);
decoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_nv_h265_dec_decide_allocation);
decoder_class->sink_query = GST_DEBUG_FUNCPTR (gst_nv_h265_dec_sink_query);
decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_nv_h265_dec_src_query);
decoder_class->sink_event = GST_DEBUG_FUNCPTR (gst_nv_h265_dec_sink_event);
@ -331,10 +335,24 @@ gst_nv_h265_dec_class_init (GstNvH265DecClass * klass,
static void
gst_nv_h265_dec_init (GstNvH265Dec * self)
{
GstNvH265DecClass *klass = GST_NV_H265_DEC_GET_CLASS (self);
self->decoder = gst_nv_decoder_new (klass->cuda_device_id);
self->num_output_surfaces = DEFAULT_NUM_OUTPUT_SURFACES;
self->max_display_delay = DEFAULT_MAX_DISPLAY_DELAY;
}
static void
gst_nv_h265_dec_finalize (GObject * object)
{
GstNvH265Dec *self = GST_NV_H265_DEC (object);
gst_object_unref (self->decoder);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_nv_h265_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
@ -393,20 +411,9 @@ static void
gst_nv_h265_dec_set_context (GstElement * element, GstContext * context)
{
GstNvH265Dec *self = GST_NV_H265_DEC (element);
GstNvH265DecClass *klass = GST_NV_H265_DEC_GET_CLASS (self);
GST_DEBUG_OBJECT (self, "set context %s",
gst_context_get_context_type (context));
gst_nv_decoder_handle_set_context (self->decoder, element, context);
if (gst_cuda_handle_set_context (element, context, klass->cuda_device_id,
&self->context)) {
goto done;
}
if (self->decoder)
gst_nv_decoder_handle_set_context (self->decoder, element, context);
done:
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@ -426,25 +433,10 @@ static gboolean
gst_nv_h265_dec_open (GstVideoDecoder * decoder)
{
GstNvH265Dec *self = GST_NV_H265_DEC (decoder);
GstNvH265DecClass *klass = GST_NV_H265_DEC_GET_CLASS (self);
if (!gst_cuda_ensure_element_context (GST_ELEMENT (self),
klass->cuda_device_id, &self->context)) {
GST_ERROR_OBJECT (self, "Required element data is unavailable");
return FALSE;
}
self->decoder = gst_nv_decoder_new (self->context);
if (!self->decoder) {
GST_ERROR_OBJECT (self, "Failed to create decoder object");
gst_clear_object (&self->context);
return FALSE;
}
gst_nv_h265_dec_reset (self);
return TRUE;
return gst_nv_decoder_open (self->decoder, GST_ELEMENT (decoder));
}
static gboolean
@ -452,16 +444,13 @@ gst_nv_h265_dec_close (GstVideoDecoder * decoder)
{
GstNvH265Dec *self = GST_NV_H265_DEC (decoder);
gst_clear_object (&self->decoder);
gst_clear_object (&self->context);
g_clear_pointer (&self->bitstream_buffer, g_free);
g_clear_pointer (&self->slice_offsets, g_free);
self->bitstream_buffer_alloc_size = 0;
self->slice_offsets_alloc_len = 0;
return TRUE;
return gst_nv_decoder_close (self->decoder);
}
static gboolean
@ -472,8 +461,7 @@ gst_nv_h265_dec_stop (GstVideoDecoder * decoder)
ret = GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
if (self->decoder)
gst_nv_decoder_reset (self->decoder);
gst_nv_decoder_reset (self->decoder);
return ret;
}
@ -486,9 +474,8 @@ gst_nv_h265_dec_negotiate (GstVideoDecoder * decoder)
GST_DEBUG_OBJECT (self, "negotiate");
gst_nv_decoder_negotiate (self->decoder, decoder, h265dec->input_state);
/* TODO: add support D3D11 memory */
if (!gst_nv_decoder_negotiate (self->decoder, decoder, h265dec->input_state))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
}
@ -507,24 +494,24 @@ gst_nv_h265_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
(decoder, query);
}
static gboolean
gst_nv_h265_dec_sink_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstNvH265Dec *self = GST_NV_H265_DEC (decoder);
if (gst_nv_decoder_handle_query (self->decoder, GST_ELEMENT (decoder), query))
return TRUE;
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
}
static gboolean
gst_nv_h265_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstNvH265Dec *self = GST_NV_H265_DEC (decoder);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_cuda_handle_context_query (GST_ELEMENT (decoder), query,
self->context)) {
return TRUE;
} else if (self->decoder &&
gst_nv_decoder_handle_context_query (self->decoder, decoder, query)) {
return TRUE;
}
break;
default:
break;
}
if (gst_nv_decoder_handle_query (self->decoder, GST_ELEMENT (decoder), query))
return TRUE;
return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
}
@ -534,9 +521,6 @@ gst_nv_h265_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstNvH265Dec *self = GST_NV_H265_DEC (decoder);
if (!self->decoder)
goto done;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
gst_nv_decoder_set_flushing (self->decoder, TRUE);
@ -548,7 +532,6 @@ gst_nv_h265_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
break;
}
done:
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
}

View file

@ -48,7 +48,6 @@ typedef struct _GstNvVp8Dec
{
GstVp8Decoder parent;
GstCudaContext *context;
GstNvDecoder *decoder;
CUVIDPICPARAMS params;
@ -87,6 +86,7 @@ static GTypeClass *parent_class = nullptr;
#define GST_NV_VP8_DEC_GET_CLASS(object) \
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstNvVp8DecClass))
static void gst_nv_vp8_dec_finalize (GObject * object);
static void gst_nv_vp8_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_nv_vp8_dec_get_property (GObject * object, guint prop_id,
@ -100,6 +100,8 @@ static gboolean gst_nv_vp8_dec_stop (GstVideoDecoder * decoder);
static gboolean gst_nv_vp8_dec_negotiate (GstVideoDecoder * decoder);
static gboolean gst_nv_vp8_dec_decide_allocation (GstVideoDecoder *
decoder, GstQuery * query);
static gboolean gst_nv_vp8_dec_sink_query (GstVideoDecoder * decoder,
GstQuery * query);
static gboolean gst_nv_vp8_dec_src_query (GstVideoDecoder * decoder,
GstQuery * query);
static gboolean gst_nv_vp8_dec_sink_event (GstVideoDecoder * decoder,
@ -126,6 +128,7 @@ gst_nv_vp8_dec_class_init (GstNvVp8DecClass * klass,
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
GstVp8DecoderClass *vp8decoder_class = GST_VP8_DECODER_CLASS (klass);
object_class->finalize = gst_nv_vp8_dec_finalize;
object_class->set_property = gst_nv_vp8_dec_set_property;
object_class->get_property = gst_nv_vp8_dec_get_property;
@ -224,6 +227,7 @@ gst_nv_vp8_dec_class_init (GstNvVp8DecClass * klass,
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_nv_vp8_dec_negotiate);
decoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_nv_vp8_dec_decide_allocation);
decoder_class->sink_query = GST_DEBUG_FUNCPTR (gst_nv_vp8_dec_sink_query);
decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_nv_vp8_dec_src_query);
decoder_class->sink_event = GST_DEBUG_FUNCPTR (gst_nv_vp8_dec_sink_event);
@ -250,10 +254,24 @@ gst_nv_vp8_dec_class_init (GstNvVp8DecClass * klass,
static void
gst_nv_vp8_dec_init (GstNvVp8Dec * self)
{
GstNvVp8DecClass *klass = GST_NV_VP8_DEC_GET_CLASS (self);
self->decoder = gst_nv_decoder_new (klass->cuda_device_id);
self->num_output_surfaces = DEFAULT_NUM_OUTPUT_SURFACES;
self->max_display_delay = DEFAULT_MAX_DISPLAY_DELAY;
}
static void
gst_nv_vp8_dec_finalize (GObject * object)
{
GstNvVp8Dec *self = GST_NV_VP8_DEC (object);
gst_object_unref (self->decoder);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_nv_vp8_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
@ -312,20 +330,9 @@ static void
gst_nv_vp8_dec_set_context (GstElement * element, GstContext * context)
{
GstNvVp8Dec *self = GST_NV_VP8_DEC (element);
GstNvVp8DecClass *klass = GST_NV_VP8_DEC_GET_CLASS (self);
GST_DEBUG_OBJECT (self, "set context %s",
gst_context_get_context_type (context));
gst_nv_decoder_handle_set_context (self->decoder, element, context);
if (gst_cuda_handle_set_context (element, context, klass->cuda_device_id,
&self->context)) {
goto done;
}
if (self->decoder)
gst_nv_decoder_handle_set_context (self->decoder, element, context);
done:
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@ -333,23 +340,8 @@ static gboolean
gst_nv_vp8_dec_open (GstVideoDecoder * decoder)
{
GstNvVp8Dec *self = GST_NV_VP8_DEC (decoder);
GstNvVp8DecClass *klass = GST_NV_VP8_DEC_GET_CLASS (self);
if (!gst_cuda_ensure_element_context (GST_ELEMENT (self),
klass->cuda_device_id, &self->context)) {
GST_ERROR_OBJECT (self, "Required element data is unavailable");
return FALSE;
}
self->decoder = gst_nv_decoder_new (self->context);
if (!self->decoder) {
GST_ERROR_OBJECT (self, "Failed to create decoder object");
gst_clear_object (&self->context);
return FALSE;
}
return TRUE;
return gst_nv_decoder_open (self->decoder, GST_ELEMENT (decoder));
}
static gboolean
@ -357,10 +349,7 @@ gst_nv_vp8_dec_close (GstVideoDecoder * decoder)
{
GstNvVp8Dec *self = GST_NV_VP8_DEC (decoder);
gst_clear_object (&self->decoder);
gst_clear_object (&self->context);
return TRUE;
return gst_nv_decoder_close (self->decoder);
}
static gboolean
@ -371,8 +360,7 @@ gst_nv_vp8_dec_stop (GstVideoDecoder * decoder)
ret = GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
if (self->decoder)
gst_nv_decoder_reset (self->decoder);
gst_nv_decoder_reset (self->decoder);
return ret;
}
@ -385,9 +373,8 @@ gst_nv_vp8_dec_negotiate (GstVideoDecoder * decoder)
GST_DEBUG_OBJECT (self, "negotiate");
gst_nv_decoder_negotiate (self->decoder, decoder, vp8dec->input_state);
/* TODO: add support D3D11 memory */
if (!gst_nv_decoder_negotiate (self->decoder, decoder, vp8dec->input_state))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
}
@ -406,24 +393,24 @@ gst_nv_vp8_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
(decoder, query);
}
static gboolean
gst_nv_vp8_dec_sink_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstNvVp8Dec *self = GST_NV_VP8_DEC (decoder);
if (gst_nv_decoder_handle_query (self->decoder, GST_ELEMENT (decoder), query))
return TRUE;
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
}
static gboolean
gst_nv_vp8_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstNvVp8Dec *self = GST_NV_VP8_DEC (decoder);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_cuda_handle_context_query (GST_ELEMENT (decoder), query,
self->context)) {
return TRUE;
} else if (self->decoder &&
gst_nv_decoder_handle_context_query (self->decoder, decoder, query)) {
return TRUE;
}
break;
default:
break;
}
if (gst_nv_decoder_handle_query (self->decoder, GST_ELEMENT (decoder), query))
return TRUE;
return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
}
@ -433,9 +420,6 @@ gst_nv_vp8_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstNvVp8Dec *self = GST_NV_VP8_DEC (decoder);
if (!self->decoder)
goto done;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
gst_nv_decoder_set_flushing (self->decoder, TRUE);
@ -447,7 +431,6 @@ gst_nv_vp8_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
break;
}
done:
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
}

View file

@ -48,7 +48,6 @@ typedef struct _GstNvVp9Dec
{
GstVp9Decoder parent;
GstCudaContext *context;
GstNvDecoder *decoder;
CUVIDPICPARAMS params;
@ -88,6 +87,7 @@ static GTypeClass *parent_class = nullptr;
#define GST_NV_VP9_DEC_GET_CLASS(object) \
(G_TYPE_INSTANCE_GET_CLASS ((object),G_TYPE_FROM_INSTANCE (object),GstNvVp9DecClass))
static void gst_nv_vp9_dec_finalize (GObject * object);
static void gst_nv_vp9_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_nv_vp9_dec_get_property (GObject * object, guint prop_id,
@ -101,6 +101,8 @@ static gboolean gst_nv_vp9_dec_stop (GstVideoDecoder * decoder);
static gboolean gst_nv_vp9_dec_negotiate (GstVideoDecoder * decoder);
static gboolean gst_nv_vp9_dec_decide_allocation (GstVideoDecoder *
decoder, GstQuery * query);
static gboolean gst_nv_vp9_dec_sink_query (GstVideoDecoder * decoder,
GstQuery * query);
static gboolean gst_nv_vp9_dec_src_query (GstVideoDecoder * decoder,
GstQuery * query);
static gboolean gst_nv_vp9_dec_sink_event (GstVideoDecoder * decoder,
@ -129,6 +131,7 @@ gst_nv_vp9_dec_class_init (GstNvVp9DecClass * klass,
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
GstVp9DecoderClass *vp9decoder_class = GST_VP9_DECODER_CLASS (klass);
object_class->finalize = gst_nv_vp9_dec_finalize;
object_class->set_property = gst_nv_vp9_dec_set_property;
object_class->get_property = gst_nv_vp9_dec_get_property;
@ -227,6 +230,7 @@ gst_nv_vp9_dec_class_init (GstNvVp9DecClass * klass,
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_nv_vp9_dec_negotiate);
decoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_nv_vp9_dec_decide_allocation);
decoder_class->sink_query = GST_DEBUG_FUNCPTR (gst_nv_vp9_dec_sink_query);
decoder_class->src_query = GST_DEBUG_FUNCPTR (gst_nv_vp9_dec_src_query);
decoder_class->sink_event = GST_DEBUG_FUNCPTR (gst_nv_vp9_dec_sink_event);
@ -255,10 +259,24 @@ gst_nv_vp9_dec_class_init (GstNvVp9DecClass * klass,
static void
gst_nv_vp9_dec_init (GstNvVp9Dec * self)
{
GstNvVp9DecClass *klass = GST_NV_VP9_DEC_GET_CLASS (self);
self->decoder = gst_nv_decoder_new (klass->cuda_device_id);
self->num_output_surfaces = DEFAULT_NUM_OUTPUT_SURFACES;
self->max_display_delay = DEFAULT_MAX_DISPLAY_DELAY;
}
static void
gst_nv_vp9_dec_finalize (GObject * object)
{
GstNvVp9Dec *self = GST_NV_VP9_DEC (object);
gst_object_unref (self->decoder);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_nv_vp9_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
@ -317,20 +335,9 @@ static void
gst_nv_vp9_dec_set_context (GstElement * element, GstContext * context)
{
GstNvVp9Dec *self = GST_NV_VP9_DEC (element);
GstNvVp9DecClass *klass = GST_NV_VP9_DEC_GET_CLASS (self);
GST_DEBUG_OBJECT (self, "set context %s",
gst_context_get_context_type (context));
gst_nv_decoder_handle_set_context (self->decoder, element, context);
if (gst_cuda_handle_set_context (element, context, klass->cuda_device_id,
&self->context)) {
goto done;
}
if (self->decoder)
gst_nv_decoder_handle_set_context (self->decoder, element, context);
done:
GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
}
@ -339,27 +346,12 @@ gst_nv_vp9_dec_open (GstVideoDecoder * decoder)
{
GstVp9Decoder *vp9dec = GST_VP9_DECODER (decoder);
GstNvVp9Dec *self = GST_NV_VP9_DEC (decoder);
GstNvVp9DecClass *klass = GST_NV_VP9_DEC_GET_CLASS (self);
if (!gst_cuda_ensure_element_context (GST_ELEMENT (self),
klass->cuda_device_id, &self->context)) {
GST_ERROR_OBJECT (self, "Required element data is unavailable");
return FALSE;
}
self->decoder = gst_nv_decoder_new (self->context);
if (!self->decoder) {
GST_ERROR_OBJECT (self, "Failed to create decoder object");
gst_clear_object (&self->context);
return FALSE;
}
/* NVDEC doesn't support non-keyframe resolution change and it will result
* in outputting broken frames */
gst_vp9_decoder_set_non_keyframe_format_change_support (vp9dec, FALSE);
return TRUE;
return gst_nv_decoder_open (self->decoder, GST_ELEMENT (decoder));
}
static gboolean
@ -367,10 +359,7 @@ gst_nv_vp9_dec_close (GstVideoDecoder * decoder)
{
GstNvVp9Dec *self = GST_NV_VP9_DEC (decoder);
gst_clear_object (&self->decoder);
gst_clear_object (&self->context);
return TRUE;
return gst_nv_decoder_close (self->decoder);
}
static gboolean
@ -381,8 +370,7 @@ gst_nv_vp9_dec_stop (GstVideoDecoder * decoder)
ret = GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
if (self->decoder)
gst_nv_decoder_reset (self->decoder);
gst_nv_decoder_reset (self->decoder);
return ret;
}
@ -395,9 +383,8 @@ gst_nv_vp9_dec_negotiate (GstVideoDecoder * decoder)
GST_DEBUG_OBJECT (self, "negotiate");
gst_nv_decoder_negotiate (self->decoder, decoder, vp9dec->input_state);
/* TODO: add support D3D11 memory */
if (!gst_nv_decoder_negotiate (self->decoder, decoder, vp9dec->input_state))
return FALSE;
return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
}
@ -416,24 +403,24 @@ gst_nv_vp9_dec_decide_allocation (GstVideoDecoder * decoder, GstQuery * query)
(decoder, query);
}
static gboolean
gst_nv_vp9_dec_sink_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstNvVp9Dec *self = GST_NV_VP9_DEC (decoder);
if (gst_nv_decoder_handle_query (self->decoder, GST_ELEMENT (decoder), query))
return TRUE;
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_query (decoder, query);
}
static gboolean
gst_nv_vp9_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
{
GstNvVp9Dec *self = GST_NV_VP9_DEC (decoder);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
if (gst_cuda_handle_context_query (GST_ELEMENT (decoder), query,
self->context)) {
return TRUE;
} else if (self->decoder &&
gst_nv_decoder_handle_context_query (self->decoder, decoder, query)) {
return TRUE;
}
break;
default:
break;
}
if (gst_nv_decoder_handle_query (self->decoder, GST_ELEMENT (decoder), query))
return TRUE;
return GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
}
@ -443,9 +430,6 @@ gst_nv_vp9_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
{
GstNvVp9Dec *self = GST_NV_VP9_DEC (decoder);
if (!self->decoder)
goto done;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
gst_nv_decoder_set_flushing (self->decoder, TRUE);
@ -457,7 +441,6 @@ gst_nv_vp9_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
break;
}
done:
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
}