From 08e6a65f0837c62ae35fc9ad9392c9d2f44b2e20 Mon Sep 17 00:00:00 2001 From: Carl-Anton Ingmarsson Date: Wed, 1 Sep 2010 22:03:05 +0200 Subject: [PATCH] vdpau: rework GstBaseVideoDecoder timestamping timestamps are now chosen in the following order: upstream -> parsed by decoder -> calculated from timestamp offset we also check the timestamps supplied from upstream/decoder to see if they atleast is increasing. --- .../basevideodecoder/gstbasevideodecoder.c | 224 +++++++----------- .../basevideodecoder/gstbasevideodecoder.h | 23 +- sys/vdpau/basevideodecoder/gstvideoframe.c | 8 +- sys/vdpau/basevideodecoder/gstvideoframe.h | 15 +- 4 files changed, 96 insertions(+), 174 deletions(-) diff --git a/sys/vdpau/basevideodecoder/gstbasevideodecoder.c b/sys/vdpau/basevideodecoder/gstbasevideodecoder.c index d49cf715b8..8dadf44510 100644 --- a/sys/vdpau/basevideodecoder/gstbasevideodecoder.c +++ b/sys/vdpau/basevideodecoder/gstbasevideodecoder.c @@ -118,27 +118,6 @@ gst_base_video_decoder_get_timestamp_at_offset (GstBaseVideoDecoder * offset, GST_TIME_ARGS (*timestamp)); } -static guint64 -gst_base_video_decoder_get_timestamp (GstBaseVideoDecoder * base_video_decoder, - gint picture_number) -{ - if (base_video_decoder->state.fps_d == 0) { - return -1; - } - if (picture_number < base_video_decoder->base_picture_number) { - return base_video_decoder->timestamp_offset - - (gint64) gst_util_uint64_scale (base_video_decoder->base_picture_number - - picture_number, base_video_decoder->state.fps_d * GST_SECOND, - base_video_decoder->state.fps_n); - } else { - return base_video_decoder->timestamp_offset + - gst_util_uint64_scale (picture_number - - base_video_decoder->base_picture_number, - base_video_decoder->state.fps_d * GST_SECOND, - base_video_decoder->state.fps_n); - } -} - static guint64 gst_base_video_decoder_get_field_timestamp (GstBaseVideoDecoder * base_video_decoder, gint field_offset) @@ -185,12 +164,6 @@ gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder) else frame = gst_video_frame_new (); - frame->system_frame_number = base_video_decoder->system_frame_number; - base_video_decoder->system_frame_number++; - - frame->decode_frame_number = frame->system_frame_number - - base_video_decoder->reorder_depth; - return frame; } @@ -203,9 +176,6 @@ gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder) base_video_decoder->have_sync = FALSE; base_video_decoder->timestamp_offset = GST_CLOCK_TIME_NONE; - base_video_decoder->system_frame_number = 0; - base_video_decoder->presentation_frame_number = 0; - base_video_decoder->base_picture_number = 0; base_video_decoder->last_timestamp = GST_CLOCK_TIME_NONE; base_video_decoder->input_offset = 0; @@ -962,37 +932,54 @@ gst_base_video_decoder_change_state (GstElement * element, return ret; } -GstFlowReturn -gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, - GstVideoFrame * frame) +static gboolean +gst_base_video_decoder_check_timestamp (GstBaseVideoDecoder * + base_video_decoder, GstClockTime timestamp) { - GstBaseVideoDecoderClass *base_video_decoder_class; - GstBuffer *src_buffer; + if (timestamp == GST_CLOCK_TIME_NONE) + return FALSE; - GST_DEBUG ("finish frame"); + if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) + return timestamp > base_video_decoder->last_timestamp; - base_video_decoder_class = - GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + return TRUE; +} - GST_DEBUG ("finish frame sync=%d pts=%" GST_TIME_FORMAT, +static void +gst_base_video_decoder_calculate_timestamps (GstBaseVideoDecoder * + base_video_decoder, GstVideoFrame * frame, + GstClockTime * presentation_timestamp, GstClockTime * presentation_duration) +{ + GST_DEBUG ("calculate timestamps sync=%d upstream timestamp: %" + GST_TIME_FORMAT " parsed timestamp: %" GST_TIME_FORMAT, GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT), - GST_TIME_ARGS (frame->presentation_timestamp)); + GST_TIME_ARGS (frame->upstream_timestamp), + GST_TIME_ARGS (frame->parsed_timestamp)); - if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) { - if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) { - GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT, - GST_TIME_ARGS (frame->presentation_timestamp), - GST_TIME_ARGS (frame->presentation_timestamp - - base_video_decoder->segment.start)); - base_video_decoder->timestamp_offset = frame->presentation_timestamp; - base_video_decoder->field_index = 0; - } else { - /* This case is for one initial timestamp and no others, e.g., - * filesrc ! decoder ! xvimagesink */ - GST_WARNING ("sync timestamp didn't change, ignoring"); - frame->presentation_timestamp = GST_CLOCK_TIME_NONE; - } - } else { + *presentation_timestamp = GST_CLOCK_TIME_NONE; + *presentation_duration = GST_CLOCK_TIME_NONE; + + if (gst_base_video_decoder_check_timestamp (base_video_decoder, + frame->upstream_timestamp)) { + *presentation_timestamp = frame->upstream_timestamp; + *presentation_duration = frame->upstream_duration; + } + + else if (gst_base_video_decoder_check_timestamp (base_video_decoder, + frame->parsed_timestamp)) + *presentation_timestamp = frame->parsed_timestamp; + + + if (GST_CLOCK_TIME_IS_VALID (*presentation_timestamp)) { + GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT, + GST_TIME_ARGS (*presentation_timestamp), + GST_TIME_ARGS (*presentation_timestamp - + base_video_decoder->segment.start)); + base_video_decoder->timestamp_offset = *presentation_timestamp; + base_video_decoder->field_index = 0; + } + + else { if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) { GST_WARNING ("sync point doesn't have timestamp"); if (!GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) { @@ -1003,33 +990,40 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, base_video_decoder->field_index = 0; } } - } - frame->field_index = base_video_decoder->field_index; - base_video_decoder->field_index += frame->n_fields; - if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) { - frame->presentation_timestamp = + *presentation_timestamp = gst_base_video_decoder_get_field_timestamp (base_video_decoder, - frame->field_index); - frame->presentation_duration = GST_CLOCK_TIME_NONE; - frame->decode_timestamp = - gst_base_video_decoder_get_timestamp (base_video_decoder, - frame->decode_frame_number); + base_video_decoder->field_index); } - if (frame->presentation_duration == GST_CLOCK_TIME_NONE) { - frame->presentation_duration = + + if (*presentation_duration == GST_CLOCK_TIME_NONE) { + *presentation_duration = gst_base_video_decoder_get_field_duration (base_video_decoder, frame->n_fields); } - if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) { - if (frame->presentation_timestamp < base_video_decoder->last_timestamp) { - GST_WARNING ("decreasing timestamp (%" GST_TIME_FORMAT " < %" - GST_TIME_FORMAT ")", GST_TIME_ARGS (frame->presentation_timestamp), - GST_TIME_ARGS (base_video_decoder->last_timestamp)); - } - } - base_video_decoder->last_timestamp = frame->presentation_timestamp; + base_video_decoder->field_index += frame->n_fields; + base_video_decoder->last_timestamp = *presentation_timestamp; +} + +GstFlowReturn +gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, + GstVideoFrame * frame) +{ + GstBaseVideoDecoderClass *base_video_decoder_class; + + GstClockTime presentation_timestamp; + GstClockTime presentation_duration; + + GstBuffer *src_buffer; + + GST_DEBUG ("finish frame"); + + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + gst_base_video_decoder_calculate_timestamps (base_video_decoder, frame, + &presentation_timestamp, &presentation_duration); src_buffer = frame->src_buffer; @@ -1063,13 +1057,13 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, base_video_decoder->discont = FALSE; } - GST_BUFFER_TIMESTAMP (src_buffer) = frame->presentation_timestamp; - GST_BUFFER_DURATION (src_buffer) = frame->presentation_duration; + GST_BUFFER_TIMESTAMP (src_buffer) = presentation_timestamp; + GST_BUFFER_DURATION (src_buffer) = presentation_duration; GST_BUFFER_OFFSET (src_buffer) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET_END (src_buffer) = GST_BUFFER_OFFSET_NONE; GST_DEBUG ("pushing frame %" GST_TIME_FORMAT, - GST_TIME_ARGS (frame->presentation_timestamp)); + GST_TIME_ARGS (presentation_timestamp)); gst_base_video_decoder_set_src_caps (base_video_decoder); @@ -1125,63 +1119,19 @@ gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder, { GstBaseVideoDecoderClass *base_video_decoder_class; + GstClockTime presentation_timestamp; + GstClockTime presentation_duration; + GST_DEBUG ("skip frame"); base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); - GST_DEBUG ("skip frame sync=%d pts=%" GST_TIME_FORMAT, - GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT), - GST_TIME_ARGS (frame->presentation_timestamp)); - - if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) { - if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) { - GST_DEBUG ("sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT, - GST_TIME_ARGS (frame->presentation_timestamp), - GST_TIME_ARGS (frame->presentation_timestamp - - base_video_decoder->segment.start)); - base_video_decoder->timestamp_offset = frame->presentation_timestamp; - base_video_decoder->field_index = 0; - } else { - /* This case is for one initial timestamp and no others, e.g., - * filesrc ! decoder ! xvimagesink */ - GST_WARNING ("sync timestamp didn't change, ignoring"); - frame->presentation_timestamp = GST_CLOCK_TIME_NONE; - } - } else { - if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) { - GST_WARNING ("sync point doesn't have timestamp"); - if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) { - GST_WARNING - ("No base timestamp. Assuming frames start at segment start"); - base_video_decoder->timestamp_offset = - base_video_decoder->segment.start; - base_video_decoder->field_index = 0; - } - } - } - frame->field_index = base_video_decoder->field_index; - base_video_decoder->field_index += frame->n_fields; - - if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) { - frame->presentation_timestamp = - gst_base_video_decoder_get_field_timestamp (base_video_decoder, - frame->field_index); - frame->presentation_duration = GST_CLOCK_TIME_NONE; - frame->decode_timestamp = - gst_base_video_decoder_get_timestamp (base_video_decoder, - frame->decode_frame_number); - } - if (frame->presentation_duration == GST_CLOCK_TIME_NONE) { - frame->presentation_duration = - gst_base_video_decoder_get_field_duration (base_video_decoder, - frame->n_fields); - } - - base_video_decoder->last_timestamp = frame->presentation_timestamp; + gst_base_video_decoder_calculate_timestamps (base_video_decoder, frame, + &presentation_timestamp, &presentation_duration); GST_DEBUG ("skipping frame %" GST_TIME_FORMAT, - GST_TIME_ARGS (frame->presentation_timestamp)); + GST_TIME_ARGS (presentation_timestamp)); gst_video_frame_unref (frame); } @@ -1209,22 +1159,14 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder, gst_base_video_decoder_get_timestamp_at_offset (base_video_decoder, frame_end_offset, ×tamp, &duration); - frame->presentation_timestamp = timestamp; - frame->presentation_duration = duration; + frame->upstream_timestamp = timestamp; + frame->upstream_duration = duration; - if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT)) - base_video_decoder->distance_from_sync = 0; - - frame->distance_from_sync = base_video_decoder->distance_from_sync; - base_video_decoder->distance_from_sync++; - - GST_DEBUG ("pts %" GST_TIME_FORMAT, - GST_TIME_ARGS (frame->presentation_timestamp)); - GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp)); - GST_DEBUG ("dist %d", frame->distance_from_sync); + GST_DEBUG ("upstream timestamp %" GST_TIME_FORMAT, + GST_TIME_ARGS (frame->upstream_timestamp)); running_time = gst_segment_to_running_time (&base_video_decoder->segment, - GST_FORMAT_TIME, frame->presentation_timestamp); + GST_FORMAT_TIME, frame->upstream_timestamp); if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->earliest_time)) deadline = GST_CLOCK_DIFF (base_video_decoder->earliest_time, running_time); diff --git a/sys/vdpau/basevideodecoder/gstbasevideodecoder.h b/sys/vdpau/basevideodecoder/gstbasevideodecoder.h index 5f0e78d7b2..e1c4c4572f 100644 --- a/sys/vdpau/basevideodecoder/gstbasevideodecoder.h +++ b/sys/vdpau/basevideodecoder/gstbasevideodecoder.h @@ -95,19 +95,14 @@ struct _GstBaseVideoDecoder GstVideoState state; GstSegment segment; - guint64 presentation_frame_number; - guint64 system_frame_number; - GstCaps *caps; gboolean have_src_caps; GstVideoFrame *current_frame; - gint distance_from_sync; - gint reorder_depth; - - GstClockTime buffer_timestamp; - + GList *timestamps; + guint64 field_index; + GstClockTime last_timestamp; GstClockTime timestamp_offset; gdouble proportion; @@ -116,22 +111,12 @@ struct _GstBaseVideoDecoder guint64 input_offset; guint64 current_buf_offset; guint64 prev_buf_offset; - - GstClockTime last_timestamp; - guint64 base_picture_number; - - gint field_index; - - gboolean is_delta_unit; - - GList *timestamps; gboolean have_segment; /* properties */ gboolean sink_clipping; - gboolean packetized; - + gboolean packetized; }; struct _GstBaseVideoDecoderClass diff --git a/sys/vdpau/basevideodecoder/gstvideoframe.c b/sys/vdpau/basevideodecoder/gstvideoframe.c index 8eecc1933f..704c22ddc7 100644 --- a/sys/vdpau/basevideodecoder/gstvideoframe.c +++ b/sys/vdpau/basevideodecoder/gstvideoframe.c @@ -57,9 +57,11 @@ gst_video_frame_finalize (GstVideoFrame * frame) static void gst_video_frame_init (GstVideoFrame * frame, gpointer g_class) { - frame->decode_timestamp = GST_CLOCK_TIME_NONE; - frame->presentation_timestamp = GST_CLOCK_TIME_NONE; - frame->presentation_duration = GST_CLOCK_TIME_NONE; + frame->upstream_timestamp = GST_CLOCK_TIME_NONE; + frame->upstream_duration = GST_CLOCK_TIME_NONE; + + frame->parsed_timestamp = GST_CLOCK_TIME_NONE; + frame->n_fields = 2; frame->sink_buffer = NULL; diff --git a/sys/vdpau/basevideodecoder/gstvideoframe.h b/sys/vdpau/basevideodecoder/gstvideoframe.h index 8c9e12c65e..ba63c4bf04 100644 --- a/sys/vdpau/basevideodecoder/gstvideoframe.h +++ b/sys/vdpau/basevideodecoder/gstvideoframe.h @@ -65,22 +65,15 @@ struct _GstVideoFrame { GstMiniObject mini_object; - GstClockTime decode_timestamp; - GstClockTime presentation_timestamp; - GstClockTime presentation_duration; + GstClockTime upstream_timestamp; + GstClockTime upstream_duration; - gint system_frame_number; - gint decode_frame_number; - gint presentation_frame_number; + GstClockTime parsed_timestamp; - gint distance_from_sync; + guint n_fields; GstBuffer *sink_buffer; GstBuffer *src_buffer; - - gint field_index; - gint n_fields; - }; struct _GstVideoFrameClass