diff --git a/ext/vp8/gst/video/gstbasevideodecoder.c b/ext/vp8/gst/video/gstbasevideodecoder.c index 640717bd34..a565f496f9 100644 --- a/ext/vp8/gst/video/gstbasevideodecoder.c +++ b/ext/vp8/gst/video/gstbasevideodecoder.c @@ -904,9 +904,9 @@ gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder) frame->decode_frame_number = frame->system_frame_number - base_video_decoder->reorder_depth; - frame->decode_timestamp = -1; - frame->presentation_timestamp = -1; - frame->presentation_duration = -1; + frame->decode_timestamp = GST_CLOCK_TIME_NONE; + frame->presentation_timestamp = GST_CLOCK_TIME_NONE; + frame->presentation_duration = GST_CLOCK_TIME_NONE; frame->n_fields = 2; return frame; @@ -1010,8 +1010,8 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp; GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration; - GST_BUFFER_OFFSET (frame->src_buffer) = -1; - GST_BUFFER_OFFSET_END (frame->src_buffer) = -1; + GST_BUFFER_OFFSET (frame->src_buffer) = GST_BUFFER_OFFSET_NONE; + GST_BUFFER_OFFSET_END (frame->src_buffer) = GST_BUFFER_OFFSET_NONE; GST_DEBUG ("pushing frame %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->presentation_timestamp)); @@ -1270,6 +1270,8 @@ gst_base_video_decoder_have_frame_2 (GstBaseVideoDecoder * base_video_decoder) GstVideoFrame *frame = base_video_decoder->current_frame; GstBaseVideoDecoderClass *base_video_decoder_class; GstFlowReturn ret = GST_FLOW_OK; + GstClockTime running_time; + GstClockTimeDiff deadline; base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); @@ -1288,8 +1290,17 @@ gst_base_video_decoder_have_frame_2 (GstBaseVideoDecoder * base_video_decoder) base_video_decoder->frames = g_list_append (base_video_decoder->frames, frame); + running_time = gst_segment_to_running_time (&base_video_decoder->segment, + GST_FORMAT_TIME, frame->presentation_timestamp); + + if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->earliest_time)) + deadline = GST_CLOCK_DIFF (base_video_decoder->earliest_time, running_time); + else + deadline = 0; + /* do something with frame */ - ret = base_video_decoder_class->handle_frame (base_video_decoder, frame); + ret = base_video_decoder_class->handle_frame (base_video_decoder, frame, + deadline); if (!GST_FLOW_IS_SUCCESS (ret)) { GST_DEBUG ("flow error!"); } diff --git a/ext/vp8/gst/video/gstbasevideodecoder.h b/ext/vp8/gst/video/gstbasevideodecoder.h index 30b3c6c0b4..13b588605c 100644 --- a/ext/vp8/gst/video/gstbasevideodecoder.h +++ b/ext/vp8/gst/video/gstbasevideodecoder.h @@ -127,7 +127,8 @@ struct _GstBaseVideoDecoderClass int offset, int n); GstFlowReturn (*parse_data) (GstBaseVideoDecoder *decoder, gboolean at_eos); GstFlowReturn (*finish) (GstBaseVideoDecoder *coder); - GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrame *frame); + GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrame *frame, + GstClockTimeDiff deadline); GstFlowReturn (*shape_output) (GstBaseVideoDecoder *coder, GstVideoFrame *frame); GstCaps *(*get_caps) (GstBaseVideoDecoder *coder); diff --git a/ext/vp8/gst/video/gstbasevideoutils.h b/ext/vp8/gst/video/gstbasevideoutils.h index 3a4981093e..8b80b168a6 100644 --- a/ext/vp8/gst/video/gstbasevideoutils.h +++ b/ext/vp8/gst/video/gstbasevideoutils.h @@ -54,9 +54,9 @@ struct _GstVideoState struct _GstVideoFrame { - guint64 decode_timestamp; - guint64 presentation_timestamp; - guint64 presentation_duration; + GstClockTime decode_timestamp; + GstClockTime presentation_timestamp; + GstClockTime presentation_duration; gint system_frame_number; gint decode_frame_number; diff --git a/ext/vp8/gstvp8dec.c b/ext/vp8/gstvp8dec.c index 9d6e1d9eb9..87e2b18ff1 100644 --- a/ext/vp8/gstvp8dec.c +++ b/ext/vp8/gstvp8dec.c @@ -145,7 +145,7 @@ static gboolean gst_vp8_dec_reset (GstBaseVideoDecoder * decoder); static GstFlowReturn gst_vp8_dec_parse_data (GstBaseVideoDecoder * decoder, gboolean at_eos); static GstFlowReturn gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, - GstVideoFrame * frame); + GstVideoFrame * frame, GstClockTimeDiff deadline); GType gst_vp8_dec_get_type (void); @@ -416,7 +416,8 @@ gst_vp8_dec_image_to_buffer (GstVP8Dec * dec, const vpx_image_t * img, } static GstFlowReturn -gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame) +gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame, + GstClockTimeDiff deadline) { GstVP8Dec *dec; GstFlowReturn ret = GST_FLOW_OK; @@ -518,13 +519,19 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame) img = vpx_codec_get_frame (&dec->decoder, &iter); if (img) { - ret = gst_base_video_decoder_alloc_src_frame (decoder, frame); - - if (ret == GST_FLOW_OK) { - gst_vp8_dec_image_to_buffer (dec, img, frame->src_buffer); - gst_base_video_decoder_finish_frame (decoder, frame); - } else { + if (deadline < 0) { + GST_LOG_OBJECT (dec, "Skipping late frame (%f s past deadline)", + (double) -deadline / GST_SECOND); gst_base_video_decoder_skip_frame (decoder, frame); + } else { + ret = gst_base_video_decoder_alloc_src_frame (decoder, frame); + + if (ret == GST_FLOW_OK) { + gst_vp8_dec_image_to_buffer (dec, img, frame->src_buffer); + gst_base_video_decoder_finish_frame (decoder, frame); + } else { + gst_base_video_decoder_skip_frame (decoder, frame); + } } vpx_img_free (img);