vdpau: fixup decoder timestamping

This commit is contained in:
Carl-Anton Ingmarsson 2010-06-22 15:49:15 +02:00
parent 3826b3b57b
commit 90077a6f2b
4 changed files with 112 additions and 64 deletions

View file

@ -45,6 +45,65 @@ GST_BOILERPLATE (GstBaseVideoDecoder, gst_base_video_decoder,
typedef struct _Timestamp Timestamp;
struct _Timestamp
{
guint64 offset;
GstClockTime timestamp;
GstClockTime duration;
};
static void
gst_base_video_decoder_add_timestamp (GstBaseVideoDecoder * base_video_decoder,
GstBuffer * buffer)
{
Timestamp *ts;
ts = g_slice_new (Timestamp);
GST_DEBUG ("adding timestamp %" GST_TIME_FORMAT " %" GST_TIME_FORMAT,
GST_TIME_ARGS (base_video_decoder->input_offset),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
ts->offset = base_video_decoder->input_offset;
ts->timestamp = GST_BUFFER_TIMESTAMP (buffer);
ts->duration = GST_BUFFER_DURATION (buffer);
base_video_decoder->timestamps =
g_list_append (base_video_decoder->timestamps, ts);
}
static void
gst_base_video_decoder_get_timestamp_at_offset (GstBaseVideoDecoder *
base_video_decoder, guint64 offset, GstClockTime * timestamp,
GstClockTime * duration)
{
GList *g;
*timestamp = GST_CLOCK_TIME_NONE;
*duration = GST_CLOCK_TIME_NONE;
g = base_video_decoder->timestamps;
while (g) {
Timestamp *ts;
ts = g->data;
if (ts->offset <= offset) {
*timestamp = ts->timestamp;
*duration = ts->duration;
g_slice_free (Timestamp, ts);
g = g_list_next (g);
base_video_decoder->timestamps =
g_list_remove (base_video_decoder->timestamps, ts);
} else {
break;
}
}
GST_DEBUG ("got timestamp %" GST_TIME_FORMAT " %" GST_TIME_FORMAT,
GST_TIME_ARGS (offset), GST_TIME_ARGS (*timestamp));
}
static guint64
gst_base_video_decoder_get_timestamp (GstBaseVideoDecoder * base_video_decoder,
gint picture_number)
@ -701,39 +760,20 @@ lost_sync:
res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos);
while (res == GST_BASE_VIDEO_DECODER_SCAN_RESULT_OK) {
GstClockTime timestamp, duration;
guint64 offset;
gboolean preroll, gap;
GstBuffer *buf;
GstFlowReturn ret;
GST_DEBUG ("Packet size: %u", size);
if (size > gst_adapter_available (dec->input_adapter))
return GST_FLOW_OK;
timestamp = GST_BUFFER_TIMESTAMP (dec->input_adapter->buflist->data);
duration = GST_BUFFER_DURATION (dec->input_adapter->buflist->data);
offset = GST_BUFFER_OFFSET (dec->input_adapter->buflist->data);
preroll = GST_BUFFER_FLAG_IS_SET (dec->input_adapter->buflist->data,
GST_BUFFER_FLAG_PREROLL);
gap = GST_BUFFER_FLAG_IS_SET (dec->input_adapter->buflist->data,
GST_BUFFER_FLAG_GAP);
buf = gst_adapter_take_buffer (dec->input_adapter, size);
GST_BUFFER_TIMESTAMP (buf) = timestamp;
GST_BUFFER_DURATION (buf) = duration;
GST_BUFFER_OFFSET (buf) = offset;
GST_BUFFER_OFFSET (buf) = dec->input_offset -
gst_adapter_available (dec->input_adapter);
if (preroll)
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_PREROLL);
else
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_PREROLL);
if (gap)
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_GAP);
else
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_GAP);
ret = klass->parse_data (dec, buf, at_eos);
if (ret != GST_FLOW_OK)
return ret;
res = klass->scan_for_packet_end (dec, dec->input_adapter, &size, at_eos);
}
@ -810,14 +850,12 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
base_video_decoder->current_frame =
gst_base_video_decoder_new_frame (base_video_decoder);
}
#if 0
if (base_video_decoder->timestamp_offset == GST_CLOCK_TIME_NONE &&
GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
GST_DEBUG ("got new offset %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
base_video_decoder->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
gst_base_video_decoder_add_timestamp (base_video_decoder, buf);
}
#endif
base_video_decoder->input_offset += GST_BUFFER_SIZE (buf);
if (base_video_decoder->packetized) {
base_video_decoder->current_frame->sink_buffer = buf;
@ -1130,23 +1168,25 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
{
GstVideoFrame *frame = base_video_decoder->current_frame;
GstBaseVideoDecoderClass *klass;
GstClockTime timestamp, duration;
GstClockTime running_time;
GstClockTimeDiff deadline;
GstFlowReturn ret;
klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
gst_base_video_decoder_get_timestamp_at_offset (base_video_decoder,
base_video_decoder->frame_offset, &timestamp, &duration);
frame->presentation_duration = timestamp;
frame->presentation_duration = duration;
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_SYNC_POINT))
base_video_decoder->distance_from_sync = 0;
frame->distance_from_sync = base_video_decoder->distance_from_sync;
base_video_decoder->distance_from_sync++;
if (frame->sink_buffer) {
frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (frame->sink_buffer);
frame->presentation_duration = GST_BUFFER_DURATION (frame->sink_buffer);
}
GST_DEBUG ("pts %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp));
GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
@ -1176,6 +1216,13 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder,
return ret;
}
void
gst_base_video_decoder_frame_start (GstBaseVideoDecoder * base_video_decoder,
GstBuffer * buf)
{
base_video_decoder->frame_offset = GST_BUFFER_OFFSET (buf);
}
GstVideoState *
gst_base_video_decoder_get_state (GstBaseVideoDecoder * base_video_decoder)
{

View file

@ -184,6 +184,9 @@ GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder *base_vid
void gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame);
void
gst_base_video_decoder_frame_start (GstBaseVideoDecoder *base_video_decoder,
GstBuffer *buf);
GstFlowReturn
gst_base_video_decoder_have_frame (GstBaseVideoDecoder *base_video_decoder,
GstVideoFrame **new_frame);

View file

@ -594,7 +594,6 @@ gst_vdp_h264_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
return GST_FLOW_OK;
alloc_error:
GST_ERROR_OBJECT (h264_dec, "Could not allocate output buffer");
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return ret;
@ -704,6 +703,7 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
gint i;
GstVideoFrame *frame;
GstFlowReturn ret = GST_FLOW_OK;
GST_MEMDUMP ("data", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
@ -746,14 +746,18 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
frame = gst_base_video_decoder_get_current_frame (base_video_decoder);
/* does this mark the beginning of a new access unit */
if (nal_unit.type == GST_NAL_AU_DELIMITER)
gst_base_video_decoder_have_frame (base_video_decoder, &frame);
if (nal_unit.type == GST_NAL_AU_DELIMITER) {
ret = gst_base_video_decoder_have_frame (base_video_decoder, &frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf);
}
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY)) {
if (nal_unit.type == GST_NAL_SPS || nal_unit.type == GST_NAL_PPS ||
nal_unit.type == GST_NAL_SEI ||
(nal_unit.type >= 14 && nal_unit.type <= 18))
gst_base_video_decoder_have_frame (base_video_decoder, &frame);
(nal_unit.type >= 14 && nal_unit.type <= 18)) {
ret = gst_base_video_decoder_have_frame (base_video_decoder, &frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf);
}
}
if (nal_unit.type >= GST_NAL_SLICE && nal_unit.type <= GST_NAL_SLICE_IDR) {
@ -767,35 +771,41 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY)) {
GstH264Slice *p_slice;
guint8 pic_order_cnt_type, p_pic_order_cnt_type;
gboolean finish_frame = FALSE;
p_slice = &(GST_VDP_H264_FRAME_CAST (frame)->slice_hdr);
pic_order_cnt_type = slice.picture->sequence->pic_order_cnt_type;
p_pic_order_cnt_type = p_slice->picture->sequence->pic_order_cnt_type;
if (slice.frame_num != p_slice->frame_num)
gst_base_video_decoder_have_frame (base_video_decoder, &frame);
finish_frame = TRUE;
else if (slice.picture != p_slice->picture)
gst_base_video_decoder_have_frame (base_video_decoder, &frame);
finish_frame = TRUE;
else if (slice.bottom_field_flag != p_slice->bottom_field_flag)
gst_base_video_decoder_have_frame (base_video_decoder, &frame);
finish_frame = TRUE;
else if (nal_unit.ref_idc != p_slice->nal_unit.ref_idc &&
(nal_unit.ref_idc == 0 || p_slice->nal_unit.ref_idc == 0))
gst_base_video_decoder_have_frame (base_video_decoder, &frame);
finish_frame = TRUE;
else if ((pic_order_cnt_type == 0 && p_pic_order_cnt_type == 0) &&
(slice.pic_order_cnt_lsb != p_slice->pic_order_cnt_lsb ||
slice.delta_pic_order_cnt_bottom !=
p_slice->delta_pic_order_cnt_bottom))
gst_base_video_decoder_have_frame (base_video_decoder, &frame);
finish_frame = TRUE;
else if ((p_pic_order_cnt_type == 1 && p_pic_order_cnt_type == 1) &&
(slice.delta_pic_order_cnt[0] != p_slice->delta_pic_order_cnt[0] ||
slice.delta_pic_order_cnt[1] !=
p_slice->delta_pic_order_cnt[1]))
gst_base_video_decoder_have_frame (base_video_decoder, &frame);
finish_frame = TRUE;
if (finish_frame) {
ret = gst_base_video_decoder_have_frame (base_video_decoder, &frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf);
}
}
if (!GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY)) {
@ -820,22 +830,8 @@ gst_vdp_h264_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
goto invalid_packet;
}
if (nal_unit.type == GST_NAL_SEI) {
GstH264Sequence *seq;
GstH264SEIMessage sei;
if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VDP_H264_FRAME_GOT_PRIMARY))
seq = GST_VDP_H264_FRAME_CAST (frame)->slice_hdr.picture->sequence;
else
seq = NULL;
if (!gst_h264_parser_parse_sei_message (h264_dec->parser, seq, &sei, data,
size))
goto invalid_packet;
}
gst_buffer_unref (buf);
return GST_FLOW_OK;
return ret;
invalid_packet:
GST_WARNING ("Invalid packet size!");

View file

@ -477,7 +477,6 @@ gst_vdp_mpeg_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
return GST_FLOW_OK;
alloc_error:
GST_ERROR_OBJECT (mpeg_dec, "Could not allocate output buffer");
gst_base_video_decoder_skip_frame (base_video_decoder, frame);
return ret;
@ -546,6 +545,7 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
if (mpeg_dec->prev_packet != -1) {
ret = gst_base_video_decoder_have_frame (base_video_decoder,
(GstVideoFrame **) & mpeg_frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf);
}
mpeg_frame->seq = buf;
@ -559,6 +559,7 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
mpeg_dec->prev_packet != MPEG_PACKET_GOP) {
ret = gst_base_video_decoder_have_frame (base_video_decoder,
(GstVideoFrame **) & mpeg_frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf);
}
mpeg_frame->pic = buf;
@ -570,6 +571,7 @@ gst_vdp_mpeg_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
if (mpeg_dec->prev_packet != MPEG_PACKET_SEQUENCE) {
ret = gst_base_video_decoder_have_frame (base_video_decoder,
(GstVideoFrame **) & mpeg_frame);
gst_base_video_decoder_frame_start (base_video_decoder, buf);
}
mpeg_frame->gop = buf;