mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2025-01-12 02:15:31 +00:00
basevideodecoder: Don't reorder serialized src events
And allow to drop EOS by the subclass if ::finish returns DROPPED. This requires that the subclass sends EOS manually later. Fixes bug #653544.
This commit is contained in:
parent
525875727a
commit
f341386b64
4 changed files with 86 additions and 15 deletions
|
@ -185,6 +185,9 @@ gst_base_video_codec_free_frame (GstVideoFrame * frame)
|
||||||
gst_buffer_unref (frame->src_buffer);
|
gst_buffer_unref (frame->src_buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
g_list_foreach (frame->events, (GFunc) gst_event_unref, NULL);
|
||||||
|
g_list_free (frame->events);
|
||||||
|
|
||||||
if (frame->coder_hook_destroy_notify && frame->coder_hook)
|
if (frame->coder_hook_destroy_notify && frame->coder_hook)
|
||||||
frame->coder_hook_destroy_notify (frame->coder_hook);
|
frame->coder_hook_destroy_notify (frame->coder_hook);
|
||||||
|
|
||||||
|
|
|
@ -131,6 +131,10 @@ struct _GstVideoFrame
|
||||||
GstClockTime deadline;
|
GstClockTime deadline;
|
||||||
|
|
||||||
gboolean force_keyframe;
|
gboolean force_keyframe;
|
||||||
|
|
||||||
|
/* Events that should be pushed downstream *before*
|
||||||
|
* the next src_buffer */
|
||||||
|
GList *events;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct _GstBaseVideoCodec
|
struct _GstBaseVideoCodec
|
||||||
|
|
|
@ -237,6 +237,32 @@ gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder,
|
||||||
base_video_decoder->sink_clipping = TRUE;
|
base_video_decoder->sink_clipping = TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static gboolean
|
||||||
|
gst_base_video_decoder_push_src_event (GstBaseVideoDecoder * decoder,
|
||||||
|
GstEvent * event)
|
||||||
|
{
|
||||||
|
/* Forward non-serialized events and EOS/FLUSH_STOP immediately.
|
||||||
|
* For EOS this is required because no buffer or serialized event
|
||||||
|
* will come after EOS and nothing could trigger another
|
||||||
|
* _finish_frame() call. *
|
||||||
|
* If the subclass handles sending of EOS manually it can return
|
||||||
|
* _DROPPED from ::finish() and all other subclasses should have
|
||||||
|
* decoded/flushed all remaining data before this
|
||||||
|
*
|
||||||
|
* For FLUSH_STOP this is required because it is expected
|
||||||
|
* to be forwarded immediately and no buffers are queued anyway.
|
||||||
|
*/
|
||||||
|
if (!GST_EVENT_IS_SERIALIZED (event)
|
||||||
|
|| GST_EVENT_TYPE (event) == GST_EVENT_EOS
|
||||||
|
|| GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP)
|
||||||
|
return gst_pad_push_event (decoder->base_video_codec.srcpad, event);
|
||||||
|
|
||||||
|
decoder->current_frame_events =
|
||||||
|
g_list_prepend (decoder->current_frame_events, event);
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
static gboolean
|
static gboolean
|
||||||
gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
||||||
{
|
{
|
||||||
|
@ -338,6 +364,9 @@ gst_base_video_decoder_flush (GstBaseVideoDecoder * dec, gboolean hard)
|
||||||
GST_FORMAT_UNDEFINED);
|
GST_FORMAT_UNDEFINED);
|
||||||
gst_base_video_decoder_clear_queues (dec);
|
gst_base_video_decoder_clear_queues (dec);
|
||||||
dec->error_count = 0;
|
dec->error_count = 0;
|
||||||
|
g_list_foreach (dec->current_frame_events, (GFunc) gst_event_unref, NULL);
|
||||||
|
g_list_free (dec->current_frame_events);
|
||||||
|
dec->current_frame_events = NULL;
|
||||||
}
|
}
|
||||||
/* and get (re)set for the sequel */
|
/* and get (re)set for the sequel */
|
||||||
gst_base_video_decoder_reset (dec, FALSE);
|
gst_base_video_decoder_reset (dec, FALSE);
|
||||||
|
@ -368,9 +397,9 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
||||||
switch (GST_EVENT_TYPE (event)) {
|
switch (GST_EVENT_TYPE (event)) {
|
||||||
case GST_EVENT_EOS:
|
case GST_EVENT_EOS:
|
||||||
{
|
{
|
||||||
|
GstFlowReturn flow_ret;
|
||||||
|
;
|
||||||
if (!base_video_decoder->packetized) {
|
if (!base_video_decoder->packetized) {
|
||||||
GstFlowReturn flow_ret;
|
|
||||||
|
|
||||||
do {
|
do {
|
||||||
flow_ret =
|
flow_ret =
|
||||||
base_video_decoder_class->parse_data (base_video_decoder, TRUE);
|
base_video_decoder_class->parse_data (base_video_decoder, TRUE);
|
||||||
|
@ -378,12 +407,13 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (base_video_decoder_class->finish) {
|
if (base_video_decoder_class->finish) {
|
||||||
base_video_decoder_class->finish (base_video_decoder);
|
flow_ret = base_video_decoder_class->finish (base_video_decoder);
|
||||||
|
} else {
|
||||||
|
flow_ret = GST_FLOW_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
ret =
|
if (flow_ret == GST_FLOW_OK)
|
||||||
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
|
ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
|
||||||
event);
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case GST_EVENT_NEWSEGMENT:
|
case GST_EVENT_NEWSEGMENT:
|
||||||
|
@ -445,9 +475,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
||||||
gst_segment_set_newsegment_full (segment,
|
gst_segment_set_newsegment_full (segment,
|
||||||
update, rate, arate, format, start, stop, pos);
|
update, rate, arate, format, start, stop, pos);
|
||||||
|
|
||||||
ret =
|
ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
|
||||||
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
|
|
||||||
event);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case GST_EVENT_FLUSH_STOP:
|
case GST_EVENT_FLUSH_STOP:
|
||||||
|
@ -457,9 +485,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
/* FIXME this changes the order of events */
|
/* FIXME this changes the order of events */
|
||||||
ret =
|
ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
|
||||||
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
|
|
||||||
event);
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1207,9 +1233,7 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
|
||||||
event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
|
event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
|
||||||
GST_CLOCK_TIME_NONE, 0);
|
GST_CLOCK_TIME_NONE, 0);
|
||||||
|
|
||||||
ret =
|
ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
|
||||||
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
|
|
||||||
event);
|
|
||||||
if (!ret) {
|
if (!ret) {
|
||||||
GST_ERROR_OBJECT (base_video_decoder, "new segment event ret=%d", ret);
|
GST_ERROR_OBJECT (base_video_decoder, "new segment event ret=%d", ret);
|
||||||
return GST_FLOW_ERROR;
|
return GST_FLOW_ERROR;
|
||||||
|
@ -1272,6 +1296,10 @@ gst_base_video_decoder_change_state (GstElement * element,
|
||||||
base_video_decoder_class->stop (base_video_decoder);
|
base_video_decoder_class->stop (base_video_decoder);
|
||||||
}
|
}
|
||||||
gst_base_video_decoder_reset (base_video_decoder, TRUE);
|
gst_base_video_decoder_reset (base_video_decoder, TRUE);
|
||||||
|
g_list_foreach (base_video_decoder->current_frame_events,
|
||||||
|
(GFunc) gst_event_unref, NULL);
|
||||||
|
g_list_free (base_video_decoder->current_frame_events);
|
||||||
|
base_video_decoder->current_frame_events = NULL;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
|
@ -1292,6 +1320,9 @@ gst_base_video_decoder_free_frame (GstVideoFrame * frame)
|
||||||
gst_buffer_unref (frame->src_buffer);
|
gst_buffer_unref (frame->src_buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
g_list_foreach (frame->events, (GFunc) gst_event_unref, NULL);
|
||||||
|
g_list_free (frame->events);
|
||||||
|
|
||||||
if (frame->coder_hook_destroy_notify && frame->coder_hook)
|
if (frame->coder_hook_destroy_notify && frame->coder_hook)
|
||||||
frame->coder_hook_destroy_notify (frame->coder_hook);
|
frame->coder_hook_destroy_notify (frame->coder_hook);
|
||||||
|
|
||||||
|
@ -1317,6 +1348,9 @@ gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder)
|
||||||
frame->presentation_duration = GST_CLOCK_TIME_NONE;
|
frame->presentation_duration = GST_CLOCK_TIME_NONE;
|
||||||
frame->n_fields = 2;
|
frame->n_fields = 2;
|
||||||
|
|
||||||
|
frame->events = base_video_decoder->current_frame_events;
|
||||||
|
base_video_decoder->current_frame_events = NULL;
|
||||||
|
|
||||||
return frame;
|
return frame;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1339,6 +1373,7 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
|
||||||
GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state;
|
GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state;
|
||||||
GstBuffer *src_buffer;
|
GstBuffer *src_buffer;
|
||||||
GstFlowReturn ret = GST_FLOW_OK;
|
GstFlowReturn ret = GST_FLOW_OK;
|
||||||
|
GList *l;
|
||||||
|
|
||||||
GST_LOG_OBJECT (base_video_decoder, "finish frame");
|
GST_LOG_OBJECT (base_video_decoder, "finish frame");
|
||||||
GST_LOG_OBJECT (base_video_decoder, "n %d in %d out %d",
|
GST_LOG_OBJECT (base_video_decoder, "n %d in %d out %d",
|
||||||
|
@ -1350,6 +1385,26 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
|
||||||
"finish frame sync=%d pts=%" GST_TIME_FORMAT, frame->is_sync_point,
|
"finish frame sync=%d pts=%" GST_TIME_FORMAT, frame->is_sync_point,
|
||||||
GST_TIME_ARGS (frame->presentation_timestamp));
|
GST_TIME_ARGS (frame->presentation_timestamp));
|
||||||
|
|
||||||
|
/* Push all pending events that arrived before this frame */
|
||||||
|
for (l = base_video_decoder->base_video_codec.frames; l; l = l->next) {
|
||||||
|
GstVideoFrame *tmp = l->data;
|
||||||
|
|
||||||
|
if (tmp->events) {
|
||||||
|
GList *k, *events;
|
||||||
|
|
||||||
|
events = tmp->events;
|
||||||
|
tmp->events = NULL;
|
||||||
|
|
||||||
|
for (k = g_list_last (events); k; k = k->prev)
|
||||||
|
gst_pad_push_event (base_video_decoder->base_video_codec.srcpad,
|
||||||
|
k->data);
|
||||||
|
g_list_free (events);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tmp == frame)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
|
if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
|
||||||
if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
|
if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
|
||||||
GST_DEBUG_OBJECT (base_video_decoder,
|
GST_DEBUG_OBJECT (base_video_decoder,
|
||||||
|
|
|
@ -65,6 +65,13 @@ G_BEGIN_DECLS
|
||||||
**/
|
**/
|
||||||
#define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
|
#define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GST_BASE_VIDEO_DECODER_FLOW_DROPPED:
|
||||||
|
*
|
||||||
|
* Returned when the event/buffer should be dropped.
|
||||||
|
*/
|
||||||
|
#define GST_BASE_VIDEO_DECODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1
|
||||||
|
|
||||||
typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder;
|
typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder;
|
||||||
typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass;
|
typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass;
|
||||||
|
|
||||||
|
@ -136,6 +143,8 @@ struct _GstBaseVideoDecoder
|
||||||
* only available during parsing */
|
* only available during parsing */
|
||||||
/* FIXME remove and add parameter to method */
|
/* FIXME remove and add parameter to method */
|
||||||
GstVideoFrame *current_frame;
|
GstVideoFrame *current_frame;
|
||||||
|
/* events that should apply to the current frame */
|
||||||
|
GList *current_frame_events;
|
||||||
/* relative offset of input data */
|
/* relative offset of input data */
|
||||||
guint64 input_offset;
|
guint64 input_offset;
|
||||||
/* relative offset of frame */
|
/* relative offset of frame */
|
||||||
|
|
Loading…
Reference in a new issue