videodecoder: Improve timestamp handling.

Fix problems with timestamp calculations when the incoming
buffers have sparse timestamps (as for theora) and reverse
playback. Fixes #675773
This commit is contained in:
Jan Schmidt 2012-06-20 00:36:38 +10:00
parent 1935cf0022
commit ffd0f28460

View file

@ -345,11 +345,8 @@ struct _GstVideoDecoderPrivate
/* tracking ts and offsets */
GList *timestamps;
/* combine to yield (presentation) ts */
GstClockTime timestamp_offset;
/* last outgoing ts */
GstClockTime last_timestamp;
GstClockTime last_timestamp_out;
/* reverse playback */
/* collect input */
@ -363,8 +360,12 @@ struct _GstVideoDecoderPrivate
/* collected output - of buffer objects, not frames */
GList *output_queued;
/* FIXME : base_picture_number is never set */
/* base_picture_number is the picture number of the reference picture */
guint64 base_picture_number;
/* combine with base_picture_number, framerate and calcs to yield (presentation) ts */
GstClockTime base_timestamp;
/* FIXME : reorder_depth is never set */
int reorder_depth;
int distance_from_sync;
@ -422,10 +423,10 @@ static gboolean gst_video_decoder_set_src_caps (GstVideoDecoder * decoder);
static void gst_video_decoder_release_frame (GstVideoDecoder * dec,
GstVideoCodecFrame * frame);
static guint64
gst_video_decoder_get_timestamp (GstVideoDecoder * decoder, int picture_number);
static guint64 gst_video_decoder_get_frame_duration (GstVideoDecoder * decoder,
static GstClockTime gst_video_decoder_get_timestamp (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame);
static GstClockTime gst_video_decoder_get_frame_duration (GstVideoDecoder *
decoder, GstVideoCodecFrame * frame);
static GstVideoCodecFrame *gst_video_decoder_new_frame (GstVideoDecoder *
decoder);
static GstFlowReturn gst_video_decoder_clip_and_push_buf (GstVideoDecoder *
@ -973,7 +974,8 @@ gst_video_decoder_sink_event_default (GstVideoDecoder * decoder,
gst_video_decoder_flush (decoder, FALSE);
priv->timestamp_offset = segment.start;
priv->base_timestamp = GST_CLOCK_TIME_NONE;
priv->base_picture_number = 0;
decoder->input_segment = segment;
@ -1262,7 +1264,7 @@ gst_video_decoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
}
/* we start from the last seen time */
time = dec->priv->last_timestamp;
time = dec->priv->last_timestamp_out;
/* correct for the segment values */
time = gst_segment_to_stream_time (&dec->output_segment,
GST_FORMAT_TIME, time);
@ -1523,8 +1525,8 @@ gst_video_decoder_reset (GstVideoDecoder * decoder, gboolean full)
priv->discont = TRUE;
priv->timestamp_offset = GST_CLOCK_TIME_NONE;
priv->last_timestamp = GST_CLOCK_TIME_NONE;
priv->base_timestamp = GST_CLOCK_TIME_NONE;
priv->last_timestamp_out = GST_CLOCK_TIME_NONE;
priv->input_offset = 0;
priv->frame_offset = 0;
@ -1739,15 +1741,15 @@ gst_video_decoder_flush_parse (GstVideoDecoder * dec)
/* Last chance to calculate a timestamp as we loop backwards
* through the list */
if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE)
priv->last_timestamp = GST_BUFFER_TIMESTAMP (buf);
else if (priv->last_timestamp != GST_CLOCK_TIME_NONE &&
priv->last_timestamp_out = GST_BUFFER_TIMESTAMP (buf);
else if (priv->last_timestamp_out != GST_CLOCK_TIME_NONE &&
GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE) {
GST_BUFFER_TIMESTAMP (buf) =
priv->last_timestamp - GST_BUFFER_DURATION (buf);
priv->last_timestamp = GST_BUFFER_TIMESTAMP (buf);
priv->last_timestamp_out - GST_BUFFER_DURATION (buf);
priv->last_timestamp_out = GST_BUFFER_TIMESTAMP (buf);
GST_LOG_OBJECT (dec,
"Calculated TS %" GST_TIME_FORMAT " working backwards",
GST_TIME_ARGS (priv->last_timestamp));
GST_TIME_ARGS (priv->last_timestamp_out));
}
res = gst_video_decoder_clip_and_push_buf (dec, buf);
@ -1834,7 +1836,7 @@ gst_video_decoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
GST_DEBUG_OBJECT (decoder, "received DISCONT buffer");
/* track present position */
ts = priv->timestamp_offset;
ts = priv->base_timestamp;
/* buffer may claim DISCONT loudly, if it can't tell us where we are now,
* we'll stick to where we were ...
@ -1842,7 +1844,7 @@ gst_video_decoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
if (decoder->input_segment.rate > 0.0
&& !GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
GST_DEBUG_OBJECT (decoder, "... but restoring previous ts tracking");
priv->timestamp_offset = ts;
priv->base_timestamp = ts;
}
}
@ -2013,46 +2015,48 @@ gst_video_decoder_prepare_finish_frame (GstVideoDecoder *
goto no_output_buffer;
if (GST_CLOCK_TIME_IS_VALID (frame->pts)) {
if (frame->pts != priv->timestamp_offset) {
if (frame->pts != priv->base_timestamp) {
GST_DEBUG_OBJECT (decoder,
"sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->pts),
GST_TIME_ARGS (frame->pts - decoder->output_segment.start));
priv->timestamp_offset = frame->pts;
} else if (GST_CLOCK_TIME_IS_VALID (priv->last_timestamp)) {
/* This case is for one initial timestamp and no others, e.g.,
* filesrc ! decoder ! xvimagesink */
GST_WARNING_OBJECT (decoder, "sync timestamp didn't change, ignoring");
frame->pts = GST_CLOCK_TIME_NONE;
priv->base_timestamp = frame->pts;
priv->base_picture_number = frame->decode_frame_number;
}
} else {
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
GST_WARNING_OBJECT (decoder, "sync point doesn't have timestamp");
if (!GST_CLOCK_TIME_IS_VALID (priv->timestamp_offset)) {
GST_WARNING_OBJECT (decoder,
"No base timestamp. Assuming frames start at segment start");
priv->timestamp_offset = decoder->output_segment.start;
}
}
}
if (frame->pts == GST_CLOCK_TIME_NONE) {
frame->pts =
gst_video_decoder_get_timestamp (decoder, frame->decode_frame_number);
frame->duration = GST_CLOCK_TIME_NONE;
}
if (frame->duration == GST_CLOCK_TIME_NONE) {
frame->duration = gst_video_decoder_get_frame_duration (decoder, frame);
}
if (GST_CLOCK_TIME_IS_VALID (priv->last_timestamp)) {
if (frame->pts < priv->last_timestamp) {
if (frame->pts == GST_CLOCK_TIME_NONE) {
frame->pts = gst_video_decoder_get_timestamp (decoder, frame);
frame->duration = gst_video_decoder_get_frame_duration (decoder, frame);
/* Last ditch timestamp guess: Just add the duration to the previous
* frame */
if (frame->pts == GST_CLOCK_TIME_NONE &&
priv->last_timestamp_out != GST_CLOCK_TIME_NONE &&
frame->duration != GST_CLOCK_TIME_NONE) {
frame->pts = priv->last_timestamp_out + frame->duration;
GST_LOG_OBJECT (decoder,
"Guessing timestamp %" GST_TIME_FORMAT " for frame...",
GST_TIME_ARGS (frame->pts));
}
} else if (frame->duration == GST_CLOCK_TIME_NONE) {
frame->duration = gst_video_decoder_get_frame_duration (decoder, frame);
GST_LOG_OBJECT (decoder,
"Guessing duration %" GST_TIME_FORMAT " for frame...",
GST_TIME_ARGS (frame->duration));
}
if (GST_CLOCK_TIME_IS_VALID (priv->last_timestamp_out)) {
if (frame->pts < priv->last_timestamp_out) {
GST_WARNING_OBJECT (decoder,
"decreasing timestamp (%" GST_TIME_FORMAT " < %"
GST_TIME_FORMAT ")",
GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (priv->last_timestamp));
GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (priv->last_timestamp_out));
}
}
priv->last_timestamp = frame->pts;
if (GST_CLOCK_TIME_IS_VALID (frame->pts))
priv->last_timestamp_out = frame->pts;
return;
@ -2218,6 +2222,7 @@ gst_video_decoder_clip_and_push_buf (GstVideoDecoder * decoder, GstBuffer * buf)
GstFlowReturn ret = GST_FLOW_OK;
GstVideoDecoderPrivate *priv = decoder->priv;
guint64 start, stop;
guint64 cstart, cstop;
GstSegment *segment;
GstClockTime duration;
@ -2232,9 +2237,13 @@ gst_video_decoder_clip_and_push_buf (GstVideoDecoder * decoder, GstBuffer * buf)
}
segment = &decoder->output_segment;
if (gst_segment_clip (segment, GST_FORMAT_TIME, start, stop, &start, &stop)) {
GST_BUFFER_TIMESTAMP (buf) = start;
GST_BUFFER_DURATION (buf) = stop - start;
if (gst_segment_clip (segment, GST_FORMAT_TIME, start, stop, &cstart, &cstop)) {
GST_BUFFER_TIMESTAMP (buf) = cstart;
if (stop != GST_CLOCK_TIME_NONE)
GST_BUFFER_DURATION (buf) = cstop - cstart;
GST_LOG_OBJECT (decoder,
"accepting buffer inside segment: %" GST_TIME_FORMAT " %"
GST_TIME_FORMAT " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
@ -2317,25 +2326,40 @@ gst_video_decoder_add_to_frame (GstVideoDecoder * decoder, int n_bytes)
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
}
static guint64
gst_video_decoder_get_timestamp (GstVideoDecoder * decoder, int picture_number)
static GstClockTime
gst_video_decoder_get_timestamp (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame)
{
GstVideoDecoderPrivate *priv = decoder->priv;
GstVideoCodecState *state = priv->output_state;
guint64 picture_number = frame->decode_frame_number;
GstClockTime res = GST_CLOCK_TIME_NONE;
if (state->info.fps_d == 0 || state->info.fps_n == 0) {
return -1;
return GST_CLOCK_TIME_NONE;
}
if (picture_number < priv->base_picture_number) {
return priv->timestamp_offset -
(gint64) gst_util_uint64_scale (priv->base_picture_number
- picture_number, state->info.fps_d * GST_SECOND, state->info.fps_n);
} else {
return priv->timestamp_offset +
gst_util_uint64_scale (picture_number -
priv->base_picture_number, state->info.fps_d * GST_SECOND,
state->info.fps_n);
if (priv->base_timestamp != GST_CLOCK_TIME_NONE) {
if (picture_number < priv->base_picture_number) {
GstClockTime offset;
offset = gst_util_uint64_scale (priv->base_picture_number
- picture_number, state->info.fps_d * GST_SECOND, state->info.fps_n);
if (offset <= priv->base_timestamp)
res = priv->base_timestamp - offset;
} else {
res = priv->base_timestamp +
gst_util_uint64_scale (picture_number - priv->base_picture_number,
state->info.fps_d * GST_SECOND, state->info.fps_n);
}
GST_LOG_OBJECT (decoder, "Interpolated TS %" GST_TIME_FORMAT
" for picture %" G_GUINT64_FORMAT " from base time %" GST_TIME_FORMAT
" and pic %" G_GUINT64_FORMAT, GST_TIME_ARGS (res), picture_number,
GST_TIME_ARGS (priv->base_timestamp), priv->base_picture_number);
}
return res;
}
static guint64