omxh26xdec: videodecoder support subframe

Use of subframe API from videodecoder base class.
This subframe allows to decode subframe instead of
waiting for a whole frame.
The subframe uses the same frame over the whole
subframe passing process and will wait
for a signal to know the last subframe.
In this implementation it will use
GST_VIDEO_BUFFER_FLAG_MARKER as the
end of batch of subframes.

This implement subframe mode negotation for the Zynq based on caps
negotation. This mode can be combined with low-latency mode, in order to
reach the lowest possible latency (assuming the stream is within the
low-latency constraints for the HW).

... ! video/x-h264,alignment=nal ! omxh264dec ! ...

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-omx/-/merge_requests/49>
This commit is contained in:
Nicolas Dufresne 2018-09-06 21:56:57 +00:00 committed by Stéphane Cerveau
parent 936c63b4bc
commit aa99c5387f
4 changed files with 82 additions and 17 deletions

View file

@ -3462,7 +3462,8 @@ gst_omx_port_set_subframe (GstOMXPort * port, gboolean enabled)
#ifdef USE_OMX_TARGET_ZYNQ_USCALE_PLUS
OMX_ALG_VIDEO_PARAM_SUBFRAME subframe_mode;
OMX_ERRORTYPE err;
GST_DEBUG_OBJECT (port->comp->parent, "%s subframe mode for Zynq",
enabled ? "Enable" : "Disable");
GST_OMX_INIT_STRUCT (&subframe_mode);
subframe_mode.nPortIndex = port->index;

View file

@ -50,6 +50,21 @@ enum
G_DEFINE_TYPE_WITH_CODE (GstOMXH264Dec, gst_omx_h264_dec,
GST_TYPE_OMX_VIDEO_DEC, DEBUG_INIT);
#define MAKE_CAPS(alignment) \
"video/x-h264, " \
"alignment=(string) " alignment ", " \
"stream-format=(string) byte-stream, " \
"width=(int) [1,MAX], height=(int) [1,MAX]"
/* The Zynq supports decoding subframes, though we want "au" to be the
* default, so we keep it prepended. This is the only way that it works with
* rtph264depay. */
#ifdef USE_OMX_TARGET_ZYNQ_USCALE_PLUS
#define SINK_CAPS MAKE_CAPS ("au") ";" MAKE_CAPS ("nal")
#else
#define SINK_CAPS MAKE_CAPS ("au")
#endif
static void
gst_omx_h264_dec_class_init (GstOMXH264DecClass * klass)
{
@ -60,10 +75,7 @@ gst_omx_h264_dec_class_init (GstOMXH264DecClass * klass)
GST_DEBUG_FUNCPTR (gst_omx_h264_dec_is_format_change);
videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_omx_h264_dec_set_format);
videodec_class->cdata.default_sink_template_caps = "video/x-h264, "
"alignment=(string) au, "
"stream-format=(string) byte-stream, "
"width=(int) [1,MAX], " "height=(int) [1,MAX]";
videodec_class->cdata.default_sink_template_caps = SINK_CAPS;
gst_element_class_set_static_metadata (element_class,
"OpenMAX H.264 Video Decoder",
@ -86,7 +98,8 @@ gst_omx_h264_dec_is_format_change (GstOMXVideoDec * dec,
GstCaps *old_caps = NULL;
GstCaps *new_caps = state->caps;
GstStructure *old_structure, *new_structure;
const gchar *old_profile, *old_level, *new_profile, *new_level;
const gchar *old_profile, *old_level, *old_alignment, *new_profile,
*new_level, *new_alignment;
if (dec->input_state) {
old_caps = dec->input_state->caps;
@ -100,11 +113,14 @@ gst_omx_h264_dec_is_format_change (GstOMXVideoDec * dec,
new_structure = gst_caps_get_structure (new_caps, 0);
old_profile = gst_structure_get_string (old_structure, "profile");
old_level = gst_structure_get_string (old_structure, "level");
old_alignment = gst_structure_get_string (old_structure, "alignment");
new_profile = gst_structure_get_string (new_structure, "profile");
new_level = gst_structure_get_string (new_structure, "level");
new_alignment = gst_structure_get_string (new_structure, "alignment");
if (g_strcmp0 (old_profile, new_profile) != 0
|| g_strcmp0 (old_level, new_level) != 0) {
|| g_strcmp0 (old_level, new_level) != 0
|| g_strcmp0 (old_alignment, new_alignment) != 0) {
return TRUE;
}
@ -176,6 +192,7 @@ gst_omx_h264_dec_set_format (GstOMXVideoDec * dec, GstOMXPort * port,
GstOMXVideoDecClass *klass = GST_OMX_VIDEO_DEC_GET_CLASS (dec);
OMX_PARAM_PORTDEFINITIONTYPE port_def;
OMX_ERRORTYPE err;
const GstStructure *s;
gst_omx_port_get_port_definition (port, &port_def);
port_def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
@ -188,5 +205,12 @@ gst_omx_h264_dec_set_format (GstOMXVideoDec * dec, GstOMXPort * port,
return FALSE;
}
/* Enable subframe mode if NAL aligned */
s = gst_caps_get_structure (state->caps, 0);
if (!g_strcmp0 (gst_structure_get_string (s, "alignment"), "nal")
&& gst_omx_port_set_subframe (dec->dec_in_port, TRUE)) {
gst_video_decoder_set_subframe_mode (GST_VIDEO_DECODER (dec), TRUE);
}
return TRUE;
}

View file

@ -51,6 +51,21 @@ enum
G_DEFINE_TYPE_WITH_CODE (GstOMXH265Dec, gst_omx_h265_dec,
GST_TYPE_OMX_VIDEO_DEC, DEBUG_INIT);
#define MAKE_CAPS(alignment) \
"video/x-h265, " \
"alignment=(string) " alignment ", " \
"stream-format=(string) byte-stream, " \
"width=(int) [1,MAX], height=(int) [1,MAX]"
/* The Zynq MPSoC supports decoding subframes though we want "au" to be the
* default, so we keep it prepended. This is the only way that it works with
* rtph265depay. */
#ifdef USE_OMX_TARGET_ZYNQ_USCALE_PLUS
#define SINK_CAPS MAKE_CAPS ("au") ";" MAKE_CAPS ("nal");
#else
#define SINK_CAPS MAKE_CAPS ("au")
#endif
static void
gst_omx_h265_dec_class_init (GstOMXH265DecClass * klass)
{
@ -61,10 +76,7 @@ gst_omx_h265_dec_class_init (GstOMXH265DecClass * klass)
GST_DEBUG_FUNCPTR (gst_omx_h265_dec_is_format_change);
videodec_class->set_format = GST_DEBUG_FUNCPTR (gst_omx_h265_dec_set_format);
videodec_class->cdata.default_sink_template_caps = "video/x-h265, "
"alignment=(string) au, "
"stream-format=(string) byte-stream, "
"width=(int) [1,MAX], " "height=(int) [1,MAX]";
videodec_class->cdata.default_sink_template_caps = SINK_CAPS;
gst_element_class_set_static_metadata (element_class,
"OpenMAX H.265 Video Decoder",
@ -87,8 +99,8 @@ gst_omx_h265_dec_is_format_change (GstOMXVideoDec * dec,
GstCaps *old_caps = NULL;
GstCaps *new_caps = state->caps;
GstStructure *old_structure, *new_structure;
const gchar *old_profile, *old_level, *old_tier, *new_profile, *new_level,
*new_tier;
const gchar *old_profile, *old_level, *old_tier, *old_alignment,
*new_profile, *new_level, *new_tier, *new_alignment;
if (dec->input_state) {
old_caps = dec->input_state->caps;
@ -103,13 +115,16 @@ gst_omx_h265_dec_is_format_change (GstOMXVideoDec * dec,
old_profile = gst_structure_get_string (old_structure, "profile");
old_level = gst_structure_get_string (old_structure, "level");
old_tier = gst_structure_get_string (old_structure, "tier");
old_alignment = gst_structure_get_string (old_structure, "alignment");
new_profile = gst_structure_get_string (new_structure, "profile");
new_level = gst_structure_get_string (new_structure, "level");
new_tier = gst_structure_get_string (new_structure, "tier");
new_alignment = gst_structure_get_string (new_structure, "alignment");
if (g_strcmp0 (old_profile, new_profile) != 0
|| g_strcmp0 (old_level, new_level) != 0
|| g_strcmp0 (old_tier, new_tier)) {
|| g_strcmp0 (old_tier, new_tier) != 0
|| g_strcmp0 (old_alignment, new_alignment) != 0) {
return TRUE;
}
@ -184,6 +199,7 @@ gst_omx_h265_dec_set_format (GstOMXVideoDec * dec, GstOMXPort * port,
GstOMXVideoDecClass *klass = GST_OMX_VIDEO_DEC_GET_CLASS (dec);
OMX_PARAM_PORTDEFINITIONTYPE port_def;
OMX_ERRORTYPE err;
const GstStructure *s;
gst_omx_port_get_port_definition (port, &port_def);
port_def.format.video.eCompressionFormat =
@ -197,5 +213,12 @@ gst_omx_h265_dec_set_format (GstOMXVideoDec * dec, GstOMXPort * port,
return FALSE;
}
/* Enable subframe mode if NAL aligned */
s = gst_caps_get_structure (state->caps, 0);
if (!g_strcmp0 (gst_structure_get_string (s, "alignment"), "nal")
&& gst_omx_port_set_subframe (dec->dec_in_port, TRUE)) {
gst_video_decoder_set_subframe_mode (GST_VIDEO_DECODER (dec), TRUE);
}
return TRUE;
}

View file

@ -2976,10 +2976,17 @@ gst_omx_video_dec_handle_frame (GstVideoDecoder * decoder,
gboolean done = FALSE;
gboolean first_ouput_buffer = TRUE;
guint memory_idx = 0; /* only used in dynamic buffer mode */
gboolean last_subframe = GST_BUFFER_FLAG_IS_SET (frame->input_buffer,
GST_VIDEO_BUFFER_FLAG_MARKER);
gboolean header =
GST_BUFFER_FLAG_IS_SET (frame->input_buffer, GST_BUFFER_FLAG_HEADER);
gboolean subframe_mode = gst_video_decoder_get_subframe_mode (decoder);
self = GST_OMX_VIDEO_DEC (decoder);
GST_DEBUG_OBJECT (self, "Handling frame");
GST_DEBUG_OBJECT (self,
"Handling frame %p last_subframe=%d header %d subframes %d", frame,
last_subframe, header, frame->abidata.ABI.num_subframes);
if (self->downstream_flow_ret != GST_FLOW_OK) {
gst_video_codec_frame_unref (frame);
@ -3203,8 +3210,18 @@ gst_omx_video_dec_handle_frame (GstVideoDecoder * decoder,
* the segment
*/
if (done)
buf->omx_buf->nFlags |= OMX_BUFFERFLAG_ENDOFFRAME;
if (done) {
/* If the input buffer is a subframe mark the OMX buffer as such */
if (subframe_mode && !last_subframe) {
#ifdef OMX_BUFFERFLAG_ENDOFSUBFRAME
buf->omx_buf->nFlags |= OMX_BUFFERFLAG_ENDOFSUBFRAME;
#endif
} else {
buf->omx_buf->nFlags |= OMX_BUFFERFLAG_ENDOFFRAME;
if (subframe_mode && last_subframe)
gst_video_decoder_have_last_subframe (decoder, frame);
}
}
self->started = TRUE;
err = gst_omx_port_release_buffer (port, buf);