vdpau: slightly fix GstBaseVideoDecoder timestamping

clear timestamps on flush and properly calculate the frame's end offset
This commit is contained in:
Carl-Anton Ingmarsson 2010-08-02 18:23:11 +02:00
parent a07cee756a
commit 008a049b20
4 changed files with 68 additions and 58 deletions

View file

@ -53,6 +53,20 @@ struct _Timestamp
GstClockTime duration; GstClockTime duration;
}; };
static void
gst_base_video_decoder_clear_timestamps (GstBaseVideoDecoder *
base_video_decoder)
{
GList *l;
for (l = base_video_decoder->timestamps; l;
l = base_video_decoder->timestamps) {
g_slice_free (Timestamp, l->data);
base_video_decoder->timestamps = l->next;
g_list_free1 (l);
}
}
static void static void
gst_base_video_decoder_add_timestamp (GstBaseVideoDecoder * base_video_decoder, gst_base_video_decoder_add_timestamp (GstBaseVideoDecoder * base_video_decoder,
GstBuffer * buffer) GstBuffer * buffer)
@ -61,8 +75,8 @@ gst_base_video_decoder_add_timestamp (GstBaseVideoDecoder * base_video_decoder,
ts = g_slice_new (Timestamp); ts = g_slice_new (Timestamp);
GST_DEBUG ("adding timestamp %" GST_TIME_FORMAT " %" GST_TIME_FORMAT, GST_DEBUG ("adding timestamp %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT,
GST_TIME_ARGS (base_video_decoder->input_offset), base_video_decoder->input_offset,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer))); GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
ts->offset = base_video_decoder->input_offset; ts->offset = base_video_decoder->input_offset;
@ -100,8 +114,8 @@ gst_base_video_decoder_get_timestamp_at_offset (GstBaseVideoDecoder *
} }
} }
GST_DEBUG ("got timestamp %" GST_TIME_FORMAT " %" GST_TIME_FORMAT, GST_DEBUG ("got timestamp %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT,
GST_TIME_ARGS (offset), GST_TIME_ARGS (*timestamp)); offset, GST_TIME_ARGS (*timestamp));
} }
static guint64 static guint64
@ -195,18 +209,18 @@ gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder)
base_video_decoder->last_timestamp = GST_CLOCK_TIME_NONE; base_video_decoder->last_timestamp = GST_CLOCK_TIME_NONE;
base_video_decoder->input_offset = 0; base_video_decoder->input_offset = 0;
base_video_decoder->frame_offset = 0; base_video_decoder->current_buf_offset = -1;
base_video_decoder->prev_buf_offset = -1;
/* This function could be called from finalize() */ gst_adapter_clear (base_video_decoder->input_adapter);
if (base_video_decoder->input_adapter) {
gst_adapter_clear (base_video_decoder->input_adapter);
}
if (base_video_decoder->current_frame) { if (base_video_decoder->current_frame) {
gst_video_frame_unref (base_video_decoder->current_frame); gst_video_frame_unref (base_video_decoder->current_frame);
base_video_decoder->current_frame = NULL; base_video_decoder->current_frame = NULL;
} }
gst_base_video_decoder_clear_timestamps (base_video_decoder);
base_video_decoder->have_src_caps = FALSE; base_video_decoder->have_src_caps = FALSE;
GST_OBJECT_LOCK (base_video_decoder); GST_OBJECT_LOCK (base_video_decoder);
@ -335,12 +349,16 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
(base_video_decoder), event); (base_video_decoder), event);
} }
break; break;
case GST_EVENT_FLUSH_STOP:{
GST_OBJECT_LOCK (base_video_decoder); case GST_EVENT_FLUSH_STOP:
base_video_decoder->earliest_time = GST_CLOCK_TIME_NONE; gst_base_video_decoder_flush (base_video_decoder);
base_video_decoder->proportion = 0.5; gst_segment_init (&base_video_decoder->segment, GST_FORMAT_TIME);
GST_OBJECT_UNLOCK (base_video_decoder);
} ret =
gst_pad_push_event (GST_BASE_VIDEO_DECODER_SRC_PAD
(base_video_decoder), event);
break;
default: default:
/* FIXME this changes the order of events */ /* FIXME this changes the order of events */
ret = ret =
@ -768,7 +786,9 @@ lost_sync:
return GST_FLOW_OK; return GST_FLOW_OK;
buf = gst_adapter_take_buffer (dec->input_adapter, size); buf = gst_adapter_take_buffer (dec->input_adapter, size);
GST_BUFFER_OFFSET (buf) = dec->input_offset -
dec->prev_buf_offset = dec->current_buf_offset;
dec->current_buf_offset = dec->input_offset -
gst_adapter_available (dec->input_adapter); gst_adapter_available (dec->input_adapter);
ret = klass->parse_data (dec, buf, at_eos); ret = klass->parse_data (dec, buf, at_eos);
@ -851,16 +871,16 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
gst_base_video_decoder_new_frame (base_video_decoder); gst_base_video_decoder_new_frame (base_video_decoder);
} }
base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);
if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) { if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
gst_base_video_decoder_add_timestamp (base_video_decoder, buf); gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
} }
base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);
if (base_video_decoder->packetized) { if (base_video_decoder->packetized) {
base_video_decoder->current_frame->sink_buffer = buf; base_video_decoder->current_frame->sink_buffer = buf;
ret = gst_base_video_decoder_have_frame (base_video_decoder, NULL); ret = gst_base_video_decoder_have_frame (base_video_decoder, TRUE, NULL);
} else { } else {
gst_adapter_push (base_video_decoder->input_adapter, buf); gst_adapter_push (base_video_decoder->input_adapter, buf);
@ -882,6 +902,8 @@ gst_base_video_decoder_stop (GstBaseVideoDecoder * base_video_decoder)
base_video_decoder_class = base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
gst_base_video_decoder_reset (base_video_decoder);
if (base_video_decoder_class->stop) if (base_video_decoder_class->stop)
return base_video_decoder_class->stop (base_video_decoder); return base_video_decoder_class->stop (base_video_decoder);
@ -1166,10 +1188,12 @@ gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
GstFlowReturn GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder, gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame ** new_frame) gboolean include_current_buf, GstVideoFrame ** new_frame)
{ {
GstVideoFrame *frame = base_video_decoder->current_frame; GstVideoFrame *frame = base_video_decoder->current_frame;
GstBaseVideoDecoderClass *klass; GstBaseVideoDecoderClass *klass;
guint64 frame_end_offset;
GstClockTime timestamp, duration; GstClockTime timestamp, duration;
GstClockTime running_time; GstClockTime running_time;
GstClockTimeDiff deadline; GstClockTimeDiff deadline;
@ -1177,8 +1201,13 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
if (include_current_buf)
frame_end_offset = base_video_decoder->current_buf_offset;
else
frame_end_offset = base_video_decoder->prev_buf_offset;
gst_base_video_decoder_get_timestamp_at_offset (base_video_decoder, gst_base_video_decoder_get_timestamp_at_offset (base_video_decoder,
base_video_decoder->frame_offset, &timestamp, &duration); frame_end_offset, &timestamp, &duration);
frame->presentation_timestamp = timestamp; frame->presentation_timestamp = timestamp;
frame->presentation_duration = duration; frame->presentation_duration = duration;
@ -1218,13 +1247,6 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
return ret; return ret;
} }
void
gst_base_video_decoder_frame_start (GstBaseVideoDecoder * base_video_decoder,
GstBuffer * buf)
{
base_video_decoder->frame_offset = GST_BUFFER_OFFSET (buf);
}
GstVideoState GstVideoState
gst_base_video_decoder_get_state (GstBaseVideoDecoder * base_video_decoder) gst_base_video_decoder_get_state (GstBaseVideoDecoder * base_video_decoder)
{ {
@ -1311,12 +1333,7 @@ gst_base_video_decoder_finalize (GObject * object)
base_video_decoder = GST_BASE_VIDEO_DECODER (object); base_video_decoder = GST_BASE_VIDEO_DECODER (object);
base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (object); base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (object);
gst_base_video_decoder_reset (base_video_decoder); g_object_unref (base_video_decoder->input_adapter);
if (base_video_decoder->input_adapter) {
g_object_unref (base_video_decoder->input_adapter);
base_video_decoder->input_adapter = NULL;
}
GST_DEBUG_OBJECT (object, "finalize"); GST_DEBUG_OBJECT (object, "finalize");

View file

@ -114,7 +114,9 @@ struct _GstBaseVideoDecoder
GstClockTime earliest_time; GstClockTime earliest_time;
guint64 input_offset; guint64 input_offset;
guint64 frame_offset; guint64 current_buf_offset;
guint64 prev_buf_offset;
GstClockTime last_timestamp; GstClockTime last_timestamp;
guint64 base_picture_number; guint64 base_picture_number;
@ -178,12 +180,9 @@ GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder *base_vid
void gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder, void gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame); GstVideoFrame * frame);
void
gst_base_video_decoder_frame_start (GstBaseVideoDecoder *base_video_decoder,
GstBuffer *buf);
GstFlowReturn GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder *base_video_decoder, gst_base_video_decoder_have_frame (GstBaseVideoDecoder *base_video_decoder,
GstVideoFrame **new_frame); gboolean include_current_buf, GstVideoFrame **new_frame);
GstVideoState gst_base_video_decoder_get_state (GstBaseVideoDecoder *base_video_decoder); GstVideoState gst_base_video_decoder_get_state (GstBaseVideoDecoder *base_video_decoder);
void gst_base_video_decoder_set_state (GstBaseVideoDecoder *base_video_decoder, void gst_base_video_decoder_set_state (GstBaseVideoDecoder *base_video_decoder,

View file

@ -701,10 +701,9 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_H264_FRAME_GOT_PRIMARY)) { if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_H264_FRAME_GOT_PRIMARY)) {
if (nal_unit.type == GST_NAL_SPS || nal_unit.type == GST_NAL_PPS || if (nal_unit.type == GST_NAL_SPS || nal_unit.type == GST_NAL_PPS ||
nal_unit.type == GST_NAL_SEI || nal_unit.type == GST_NAL_AU_DELIMITER || nal_unit.type == GST_NAL_SEI || nal_unit.type == GST_NAL_AU_DELIMITER ||
(nal_unit.type >= 14 && nal_unit.type <= 18)) { (nal_unit.type >= 14 && nal_unit.type <= 18))
ret = gst_base_video_decoder_have_frame (base_video_decoder, &frame); ret =
gst_base_video_decoder_frame_start (base_video_decoder, buf); gst_base_video_decoder_have_frame (base_video_decoder, FALSE, &frame);
}
} }
if (nal_unit.type >= GST_NAL_SLICE && nal_unit.type <= GST_NAL_SLICE_IDR) { if (nal_unit.type >= GST_NAL_SLICE && nal_unit.type <= GST_NAL_SLICE_IDR) {
@ -749,10 +748,11 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
p_slice->delta_pic_order_cnt[1])) p_slice->delta_pic_order_cnt[1]))
finish_frame = TRUE; finish_frame = TRUE;
if (finish_frame) { if (finish_frame)
ret = gst_base_video_decoder_have_frame (base_video_decoder, &frame); ret =
gst_base_video_decoder_frame_start (base_video_decoder, buf); gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
} &frame);
} }
if (!GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_H264_FRAME_GOT_PRIMARY)) { if (!GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_H264_FRAME_GOT_PRIMARY)) {

View file

@ -432,11 +432,9 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
case MPEG_PACKET_SEQUENCE: case MPEG_PACKET_SEQUENCE:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE"); GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE");
if (mpeg_dec->prev_packet != -1) { if (mpeg_dec->prev_packet != -1)
ret = gst_base_video_decoder_have_frame (base_video_decoder, ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
(GstVideoFrame **) & mpeg_frame); (GstVideoFrame **) & mpeg_frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf);
}
mpeg_frame->seq = buf; mpeg_frame->seq = buf;
break; break;
@ -445,11 +443,9 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_PICTURE"); GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_PICTURE");
if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE && if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE &&
mpeg_dec->prev_packet != MPEG_PACKET_GOP) { mpeg_dec->prev_packet != MPEG_PACKET_GOP)
ret = gst_base_video_decoder_have_frame (base_video_decoder, ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
(GstVideoFrame **) & mpeg_frame); (GstVideoFrame **) & mpeg_frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf);
}
mpeg_frame->pic = buf; mpeg_frame->pic = buf;
break; break;
@ -457,11 +453,9 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
case MPEG_PACKET_GOP: case MPEG_PACKET_GOP:
GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP"); GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP");
if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE) { if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE)
ret = gst_base_video_decoder_have_frame (base_video_decoder, ret = gst_base_video_decoder_have_frame (base_video_decoder, FALSE,
(GstVideoFrame **) & mpeg_frame); (GstVideoFrame **) & mpeg_frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf);
}
mpeg_frame->gop = buf; mpeg_frame->gop = buf;
break; break;