mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-27 12:11:13 +00:00
openh264dec: Handle B frame decoding
Support out-of-order decoding in OpenH264 by tracking our internal frame reference in the OpenH264 timestamp field. Drain any pending frames at EOS.
This commit is contained in:
parent
e75e637531
commit
d4594c5c75
1 changed files with 64 additions and 50 deletions
|
@ -206,78 +206,92 @@ gst_openh264dec_handle_frame (GstVideoDecoder * decoder,
|
|||
guint i;
|
||||
guint8 *p;
|
||||
guint row_stride, component_width, component_height, src_width, row;
|
||||
gboolean at_eos = (frame == NULL);
|
||||
|
||||
if (frame) {
|
||||
if (frame == NULL) {
|
||||
#if OPENH264_VERSION_CHECK (1,9)
|
||||
/* Called with no videoframe for EOS logic. Drain out */
|
||||
int end_of_stream = 1;
|
||||
memset (&dst_buf_info, 0, sizeof (SBufferInfo));
|
||||
|
||||
openh264dec->decoder->SetOption (DECODER_OPTION_END_OF_STREAM,
|
||||
&end_of_stream);
|
||||
ret = openh264dec->decoder->FlushFrame (yuvdata, &dst_buf_info);
|
||||
|
||||
if (ret != dsErrorFree || dst_buf_info.iBufferStatus != 1) {
|
||||
GST_DEBUG_OBJECT (decoder, "No more frames to retrieve at EOS");
|
||||
return GST_FLOW_EOS;
|
||||
}
|
||||
#else
|
||||
return GST_FLOW_EOS;
|
||||
#endif
|
||||
} else {
|
||||
if (!gst_buffer_map (frame->input_buffer, &map_info, GST_MAP_READ)) {
|
||||
GST_ERROR_OBJECT (openh264dec, "Cannot map input buffer!");
|
||||
gst_video_codec_frame_unref (frame);
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
GST_LOG_OBJECT (openh264dec, "handle frame, %d",
|
||||
GST_LOG_OBJECT (openh264dec, "handle frame, 1st NAL type %d",
|
||||
map_info.size > 4 ? map_info.data[4] & 0x1f : -1);
|
||||
|
||||
memset (&dst_buf_info, 0, sizeof (SBufferInfo));
|
||||
/* Use the unsigned long long OpenH264 timestamp to store the system_frame_number
|
||||
* to track the original frame through any OpenH264 reordering */
|
||||
dst_buf_info.uiInBsTimeStamp = frame->system_frame_number;
|
||||
|
||||
GST_LOG_OBJECT (decoder, "Submitting frame with PTS %" GST_TIME_FORMAT
|
||||
" and frame ref %" G_GUINT64_FORMAT,
|
||||
GST_TIME_ARGS (frame->pts), (guint64) frame->system_frame_number);
|
||||
|
||||
ret =
|
||||
openh264dec->decoder->DecodeFrame2 (map_info.data, map_info.size,
|
||||
yuvdata, &dst_buf_info);
|
||||
|
||||
if (ret == dsNoParamSets) {
|
||||
GST_DEBUG_OBJECT (openh264dec, "Requesting a key unit");
|
||||
gst_pad_push_event (GST_VIDEO_DECODER_SINK_PAD (decoder),
|
||||
gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
|
||||
FALSE, 0));
|
||||
}
|
||||
|
||||
if (ret != dsErrorFree && ret != dsNoParamSets) {
|
||||
GST_DEBUG_OBJECT (openh264dec, "Requesting a key unit");
|
||||
gst_pad_push_event (GST_VIDEO_DECODER_SINK_PAD (decoder),
|
||||
gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
|
||||
FALSE, 0));
|
||||
GST_LOG_OBJECT (openh264dec, "error decoding nal, return code: %d", ret);
|
||||
}
|
||||
|
||||
gst_buffer_unmap (frame->input_buffer, &map_info);
|
||||
if (ret != dsErrorFree)
|
||||
return gst_video_decoder_drop_frame (decoder, frame);
|
||||
|
||||
if (ret != dsErrorFree) {
|
||||
/* Request a key unit from upstream */
|
||||
GST_DEBUG_OBJECT (openh264dec, "Requesting a key unit");
|
||||
gst_pad_push_event (GST_VIDEO_DECODER_SINK_PAD (decoder),
|
||||
gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
|
||||
FALSE, 0));
|
||||
|
||||
GST_LOG_OBJECT (openh264dec, "error decoding nal, return code: %d", ret);
|
||||
gst_video_codec_frame_unref (frame);
|
||||
|
||||
/* Get back the frame that was reported as errored */
|
||||
frame =
|
||||
gst_video_decoder_get_frame (decoder, dst_buf_info.uiOutYuvTimeStamp);
|
||||
if (frame) {
|
||||
GST_LOG_OBJECT (decoder,
|
||||
"Dropping errored frame ref %" G_GUINT64_FORMAT,
|
||||
(guint64) dst_buf_info.uiOutYuvTimeStamp);
|
||||
return gst_video_decoder_drop_frame (decoder, frame);
|
||||
}
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
gst_video_codec_frame_unref (frame);
|
||||
frame = NULL;
|
||||
} else {
|
||||
memset (&dst_buf_info, 0, sizeof (SBufferInfo));
|
||||
ret = openh264dec->decoder->DecodeFrame2 (NULL, 0, yuvdata, &dst_buf_info);
|
||||
if (ret != dsErrorFree) {
|
||||
return GST_FLOW_EOS;
|
||||
|
||||
/* No output available yet */
|
||||
if (dst_buf_info.iBufferStatus != 1) {
|
||||
GST_LOG_OBJECT (decoder, "No buffer decoded yet");
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
}
|
||||
|
||||
/* No output available yet */
|
||||
if (dst_buf_info.iBufferStatus != 1) {
|
||||
if (at_eos)
|
||||
return GST_FLOW_EOS;
|
||||
GST_LOG_OBJECT (decoder, "Got back frame with frame ref %" G_GUINT64_FORMAT,
|
||||
(guint64) dst_buf_info.uiOutYuvTimeStamp);
|
||||
|
||||
gst_video_codec_frame_unref (frame);
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
/* FIXME: openh264 has no way for us to get a connection
|
||||
* between the input and output frames, we just have to
|
||||
* guess based on the input. Fortunately openh264 can
|
||||
* only do baseline profile. */
|
||||
frame = gst_video_decoder_get_oldest_frame (decoder);
|
||||
/* OpenH264 lets us pass an int reference through
|
||||
* so we can retrieve the input frame now */
|
||||
frame = gst_video_decoder_get_frame (decoder, dst_buf_info.uiOutYuvTimeStamp);
|
||||
if (!frame) {
|
||||
/* Can only happen in finish() */
|
||||
return GST_FLOW_EOS;
|
||||
}
|
||||
|
||||
{
|
||||
GstClockTime pts = dst_buf_info.uiOutYuvTimeStamp;
|
||||
if (pts != frame->pts) {
|
||||
GST_DEBUG_OBJECT (decoder, "Got output PTS %" GST_TIME_FORMAT
|
||||
" but expected %" GST_TIME_FORMAT, GST_TIME_ARGS (pts),
|
||||
GST_TIME_ARGS (frame->pts));
|
||||
}
|
||||
/* Where did our frame go? This is a reference tracking error. */
|
||||
GST_WARNING_OBJECT (decoder,
|
||||
"Failed to look up frame ref %" G_GUINT64_FORMAT,
|
||||
(guint64) dst_buf_info.uiOutYuvTimeStamp);
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
actual_width = dst_buf_info.UsrData.sSystemBuffer.iWidth;
|
||||
|
|
Loading…
Reference in a new issue