h264decoder: Port to GstCodecPicture struct

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/5285>
This commit is contained in:
Seungha Yang 2023-09-05 21:27:30 +09:00 committed by GStreamer Marge Bot
parent d69bacb954
commit ea3dfadbed
8 changed files with 57 additions and 97 deletions

View file

@ -473,7 +473,7 @@ gst_h264_decoder_clear_dpb (GstH264Decoder * self, gboolean flush)
if (!flush) {
while ((picture = gst_h264_dpb_bump (priv->dpb, TRUE)) != NULL) {
GstVideoCodecFrame *frame = gst_video_decoder_get_frame (decoder,
picture->system_frame_number);
GST_CODEC_PICTURE_FRAME_NUMBER (picture));
if (frame)
gst_video_decoder_release_frame (decoder, frame);
@ -796,7 +796,7 @@ gst_h264_decoder_split_frame (GstH264Decoder * self, GstH264Picture * picture)
other_field->frame_num = picture->frame_num;
other_field->ref = picture->ref;
other_field->nonexisting = picture->nonexisting;
other_field->system_frame_number = picture->system_frame_number;
GST_CODEC_PICTURE_COPY_FRAME_NUMBER (other_field, picture);
other_field->field_pic_flag = picture->field_pic_flag;
return other_field;
@ -1034,11 +1034,12 @@ gst_h264_decoder_start_current_picture (GstH264Decoder * self)
g_assert (priv->active_sps != NULL);
g_assert (priv->active_pps != NULL);
current_picture = priv->current_picture;
/* If subclass didn't update output state at this point,
* marking this picture as a discont and stores current input state */
if (priv->input_state_changed) {
priv->current_picture->discont_state =
gst_video_codec_state_ref (self->input_state);
gst_h264_picture_set_discont_state (current_picture, self->input_state);
priv->input_state_changed = FALSE;
}
@ -1056,8 +1057,6 @@ gst_h264_decoder_start_current_picture (GstH264Decoder * self)
if (!gst_h264_decoder_init_current_picture (self))
return GST_FLOW_ERROR;
current_picture = priv->current_picture;
/* If the new picture is an IDR, flush DPB */
if (current_picture->idr) {
if (!current_picture->dec_ref_pic_marking.no_output_of_prior_pics_flag) {
@ -1294,7 +1293,8 @@ gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu)
}
/* This allows accessing the frame from the picture. */
picture->system_frame_number = priv->current_frame->system_frame_number;
GST_CODEC_PICTURE_FRAME_NUMBER (picture) =
priv->current_frame->system_frame_number;
priv->current_picture = picture;
ret = gst_h264_decoder_start_current_picture (self);
@ -1798,7 +1798,7 @@ gst_h264_decoder_do_output_picture (GstH264Decoder * self,
#endif
frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
picture->system_frame_number);
GST_CODEC_PICTURE_FRAME_NUMBER (picture));
if (!frame) {
/* The case where the end_picture() got failed and corresponding
@ -1808,7 +1808,7 @@ gst_h264_decoder_do_output_picture (GstH264Decoder * self,
} else {
GST_ERROR_OBJECT (self,
"No available codec frame with frame number %d",
picture->system_frame_number);
GST_CODEC_PICTURE_FRAME_NUMBER (picture));
UPDATE_FLOW_RETURN (ret, GST_FLOW_ERROR);
}
@ -2109,10 +2109,10 @@ gst_h264_decoder_finish_picture (GstH264Decoder * self,
* drop codec frame of the second field because we are consuming
* only the first codec frame via GstH264Decoder::output_picture() method */
if (picture->second_field && picture->other_field &&
picture->system_frame_number !=
picture->other_field->system_frame_number) {
GST_CODEC_PICTURE_FRAME_NUMBER (picture) !=
GST_CODEC_PICTURE_FRAME_NUMBER (picture->other_field)) {
GstVideoCodecFrame *frame = gst_video_decoder_get_frame (decoder,
picture->system_frame_number);
GST_CODEC_PICTURE_FRAME_NUMBER (picture));
gst_video_decoder_release_frame (decoder, frame);
}

View file

@ -132,7 +132,7 @@ struct _GstH264DecoderClass
*
* Optional. Called whenever new #GstH264Picture is created.
* Subclass can set implementation specific user data
* on the #GstH264Picture via gst_h264_picture_set_user_data()
* on the #GstH264Picture via gst_h264_picture_set_user_data
*/
GstFlowReturn (*new_picture) (GstH264Decoder * decoder,
GstVideoCodecFrame * frame,
@ -146,7 +146,7 @@ struct _GstH264DecoderClass
*
* Called when a new field picture is created for interlaced field picture.
* Subclass can attach implementation specific user data on @second_field via
* gst_h264_picture_set_user_data()
* gst_h264_picture_set_user_data
*
* Since: 1.20
*/

View file

@ -22,6 +22,7 @@
#endif
#include "gsth264picture-private.h"
#include "gstcodecpicture-private.h"
#include <stdlib.h>
@ -30,18 +31,6 @@ GST_DEBUG_CATEGORY_EXTERN (gst_h264_decoder_debug);
GST_DEFINE_MINI_OBJECT_TYPE (GstH264Picture, gst_h264_picture);
static void
_gst_h264_picture_free (GstH264Picture * picture)
{
if (picture->notify)
picture->notify (picture->user_data);
if (picture->discont_state)
gst_video_codec_state_unref (picture->discont_state);
g_free (picture);
}
/**
* gst_h264_picture_new:
*
@ -62,51 +51,11 @@ gst_h264_picture_new (void)
gst_mini_object_init (GST_MINI_OBJECT_CAST (pic), 0,
GST_TYPE_H264_PICTURE, NULL, NULL,
(GstMiniObjectFreeFunction) _gst_h264_picture_free);
(GstMiniObjectFreeFunction) gst_codec_picture_free);
return pic;
}
/**
* gst_h264_picture_set_user_data:
* @picture: a #GstH264Picture
* @user_data: (nullable): private data
* @notify: (closure user_data): a #GDestroyNotify
*
* Sets @user_data on the picture and the #GDestroyNotify that will be called when
* the picture is freed.
*
* If a @user_data was previously set, then the previous set @notify will be called
* before the @user_data is replaced.
*/
void
gst_h264_picture_set_user_data (GstH264Picture * picture, gpointer user_data,
GDestroyNotify notify)
{
g_return_if_fail (GST_IS_H264_PICTURE (picture));
if (picture->notify)
picture->notify (picture->user_data);
picture->user_data = user_data;
picture->notify = notify;
}
/**
* gst_h264_picture_get_user_data:
* @picture: a #GstH264Picture
*
* Gets private data set on the picture via
* gst_h264_picture_set_user_data() previously.
*
* Returns: (transfer none) (nullable): The previously set user_data
*/
gpointer
gst_h264_picture_get_user_data (GstH264Picture * picture)
{
return picture->user_data;
}
struct _GstH264Dpb
{
GArray *pic_list;
@ -646,7 +595,7 @@ gst_h264_dpb_get_picture (GstH264Dpb * dpb, guint32 system_frame_number)
GstH264Picture *picture =
g_array_index (dpb->pic_list, GstH264Picture *, i);
if (picture->system_frame_number == system_frame_number) {
if (GST_CODEC_PICTURE_FRAME_NUMBER (picture) == system_frame_number) {
gst_h264_picture_ref (picture);
return picture;
}

View file

@ -21,6 +21,7 @@
#define __GST_H264_PICTURE_H__
#include <gst/codecs/codecs-prelude.h>
#include <gst/codecs/gstcodecpicture.h>
#include <gst/codecparsers/gsth264parser.h>
#include <gst/video/video.h>
@ -114,13 +115,10 @@ typedef enum
struct _GstH264Picture
{
/*< private >*/
GstMiniObject parent;
GstCodecPicture parent;
GstH264SliceType type;
/* From GstVideoCodecFrame */
guint32 system_frame_number;
guint8 pic_order_cnt_type; /* SPS */
gint32 top_field_order_cnt;
gint32 bottom_field_order_cnt;
@ -160,12 +158,6 @@ struct _GstH264Picture
GstH264Picture * other_field;
GstVideoBufferFlags buffer_flags;
/* decoder input state if this picture is discont point */
GstVideoCodecState *discont_state;
gpointer user_data;
GDestroyNotify notify;
};
/**
@ -218,13 +210,27 @@ gst_clear_h264_picture (GstH264Picture ** picture)
}
}
GST_CODECS_API
void gst_h264_picture_set_user_data (GstH264Picture * picture,
gpointer user_data,
GDestroyNotify notify);
static inline void
gst_h264_picture_set_user_data (GstH264Picture * picture, gpointer user_data,
GDestroyNotify notify)
{
gst_codec_picture_set_user_data (GST_CODEC_PICTURE (picture),
user_data, notify);
}
GST_CODECS_API
gpointer gst_h264_picture_get_user_data (GstH264Picture * picture);
static inline gpointer
gst_h264_picture_get_user_data (GstH264Picture * picture)
{
return gst_codec_picture_get_user_data (GST_CODEC_PICTURE (picture));
}
static inline void
gst_h264_picture_set_discont_state (GstH264Picture * picture,
GstVideoCodecState * discont_state)
{
gst_codec_picture_set_discont_state (GST_CODEC_PICTURE (picture),
discont_state);
}
/*******************
* GstH264Dpb *

View file

@ -885,8 +885,8 @@ gst_d3d11_h264_dec_output_picture (GstH264Decoder * decoder,
}
if (!gst_d3d11_decoder_process_output (inner->d3d11_decoder, vdec,
picture->discont_state, inner->width, inner->height, view_buffer,
&frame->output_buffer)) {
GST_CODEC_PICTURE (picture)->discont_state, inner->width,
inner->height, view_buffer, &frame->output_buffer)) {
GST_ERROR_OBJECT (self, "Failed to copy buffer");
goto error;
}

View file

@ -750,7 +750,8 @@ gst_nv_h264_dec_output_picture (GstH264Decoder * decoder,
}
ret = gst_nv_decoder_finish_surface (self->decoder,
vdec, picture->discont_state, surface, &frame->output_buffer);
vdec, GST_CODEC_PICTURE (picture)->discont_state, surface,
&frame->output_buffer);
if (ret != GST_FLOW_OK)
goto error;

View file

@ -628,7 +628,7 @@ gst_v4l2_codec_h264_dec_fill_decoder_params (GstV4l2CodecH264Dec * self,
* The reference is multiplied by 1000 because it's was set as micro
* seconds and this TS is nanosecond.
*/
.reference_ts = (guint64) ref_pic->system_frame_number * 1000,
.reference_ts = (guint64) GST_CODEC_PICTURE_FRAME_NUMBER (ref_pic) * 1000,
.frame_num = frame_num,
.pic_num = pic_num,
.flags = V4L2_H264_DPB_ENTRY_FLAG_VALID
@ -779,7 +779,7 @@ lookup_dpb_index (struct v4l2_h264_dpb_entry dpb[16], GstH264Picture * ref_pic)
if (ref_pic->second_field && ref_pic->other_field)
ref_pic = ref_pic->other_field;
ref_ts = (guint64) ref_pic->system_frame_number * 1000;
ref_ts = (guint64) GST_CODEC_PICTURE_FRAME_NUMBER (ref_pic) * 1000;
for (i = 0; i < 16; i++) {
if (dpb[i].flags & V4L2_H264_DPB_ENTRY_FLAG_ACTIVE
&& dpb[i].reference_ts == ref_ts)
@ -1056,21 +1056,23 @@ gst_v4l2_codec_h264_dec_output_picture (GstH264Decoder * decoder,
GstV4l2CodecH264Dec *self = GST_V4L2_CODEC_H264_DEC (decoder);
GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
GstV4l2Request *request = gst_h264_picture_get_user_data (picture);
GstCodecPicture *codec_picture = GST_CODEC_PICTURE (picture);
gint ret;
if (picture->discont_state) {
if (codec_picture->discont_state) {
if (!gst_video_decoder_negotiate (vdec)) {
GST_ERROR_OBJECT (vdec, "Could not re-negotiate with updated state");
return FALSE;
}
}
GST_DEBUG_OBJECT (self, "Output picture %u", picture->system_frame_number);
GST_DEBUG_OBJECT (self, "Output picture %u",
codec_picture->system_frame_number);
ret = gst_v4l2_request_set_done (request);
if (ret == 0) {
GST_ELEMENT_ERROR (self, STREAM, DECODE,
("Decoding frame %u took too long", picture->system_frame_number),
("Decoding frame %u took too long", codec_picture->system_frame_number),
(NULL));
goto error;
} else if (ret < 0) {
@ -1082,7 +1084,8 @@ gst_v4l2_codec_h264_dec_output_picture (GstH264Decoder * decoder,
if (gst_v4l2_request_failed (request)) {
GST_ELEMENT_ERROR (self, STREAM, DECODE,
("Failed to decode frame %u", picture->system_frame_number), (NULL));
("Failed to decode frame %u", codec_picture->system_frame_number),
(NULL));
goto error;
}
@ -1175,16 +1178,17 @@ gst_v4l2_codec_h264_dec_submit_bitstream (GstV4l2CodecH264Dec * self,
self->bitstream);
} else {
GstVideoCodecFrame *frame;
guint32 system_frame_number = GST_CODEC_PICTURE_FRAME_NUMBER (picture);
frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
picture->system_frame_number);
system_frame_number);
g_return_val_if_fail (frame, FALSE);
if (!gst_v4l2_codec_h264_dec_ensure_output_buffer (self, frame))
goto done;
request = gst_v4l2_decoder_alloc_request (self->decoder,
picture->system_frame_number, self->bitstream, frame->output_buffer);
system_frame_number, self->bitstream, frame->output_buffer);
gst_video_codec_frame_unref (frame);
}

View file

@ -123,8 +123,8 @@ gst_va_h264_dec_output_picture (GstH264Decoder * decoder,
GST_LOG_OBJECT (self,
"Outputting picture %p (poc %d)", picture, picture->pic_order_cnt);
ret = gst_va_base_dec_process_output (base, frame, picture->discont_state,
picture->buffer_flags);
ret = gst_va_base_dec_process_output (base, frame,
GST_CODEC_PICTURE (picture)->discont_state, picture->buffer_flags);
gst_h264_picture_unref (picture);
if (ret)