codecs: h264decoder: Pass GstVideoCodecFrame to output_picture()

All subclasses are retrieving list to get target output frame, which
can be done by baseclass. And pass the ownership of the GstH264Picture
to subclass so that subclass can clear implementation dependent resources
before finishing the frame.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1449>
This commit is contained in:
Seungha Yang 2020-07-20 18:24:09 +09:00
parent 7a024a740f
commit e921a07715
6 changed files with 91 additions and 69 deletions

View file

@ -1217,6 +1217,7 @@ gst_h264_decoder_do_output_picture (GstH264Decoder * self,
{
GstH264DecoderPrivate *priv = self->priv;
GstH264DecoderClass *klass;
GstVideoCodecFrame *frame = NULL;
picture->outputted = TRUE;
@ -1237,10 +1238,23 @@ gst_h264_decoder_do_output_picture (GstH264Decoder * self,
priv->last_output_poc = picture->pic_order_cnt;
frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
picture->system_frame_number);
if (!frame) {
GST_ERROR_OBJECT (self,
"No available codec frame with frame number %d",
picture->system_frame_number);
priv->last_ret = GST_FLOW_ERROR;
return;
}
klass = GST_H264_DECODER_GET_CLASS (self);
g_assert (klass->output_picture);
priv->last_ret = klass->output_picture (self, picture);
priv->last_ret = klass->output_picture (self,
frame, gst_h264_picture_ref (picture));
}
static gboolean

View file

@ -115,7 +115,15 @@ struct _GstH264DecoderClass
gboolean (*end_picture) (GstH264Decoder * decoder,
GstH264Picture * picture);
/**
* GstVideoDecoder:output_picture:
*
* @decoder: a #GstH264Decoder
* @frame: (transfer full): a #GstVideoCodecFrame
* @picture: (transfer full): a #GstH264Picture
*/
GstFlowReturn (*output_picture) (GstH264Decoder * decoder,
GstVideoCodecFrame * frame,
GstH264Picture * picture);
/*< private >*/

View file

@ -153,7 +153,7 @@ static gboolean gst_d3d11_h264_dec_new_sequence (GstH264Decoder * decoder,
static gboolean gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstH264Picture * picture);
static GstFlowReturn gst_d3d11_h264_dec_output_picture (GstH264Decoder *
decoder, GstH264Picture * picture);
decoder, GstVideoCodecFrame * frame, GstH264Picture * picture);
static gboolean gst_d3d11_h264_dec_start_picture (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb);
static gboolean gst_d3d11_h264_dec_decode_slice (GstH264Decoder * decoder,
@ -605,12 +605,11 @@ gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
static GstFlowReturn
gst_d3d11_h264_dec_output_picture (GstH264Decoder * decoder,
GstH264Picture * picture)
GstVideoCodecFrame * frame, GstH264Picture * picture)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
GstVideoCodecFrame *frame = NULL;
GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
GstBuffer *output_buffer = NULL;
GstFlowReturn ret;
GstBuffer *view_buffer;
GST_LOG_OBJECT (self,
@ -620,12 +619,9 @@ gst_d3d11_h264_dec_output_picture (GstH264Decoder * decoder,
if (!view_buffer) {
GST_ERROR_OBJECT (self, "Could not get output view");
return GST_FLOW_ERROR;
goto error;
}
frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
picture->system_frame_number);
/* if downstream is d3d11 element and forward playback case,
* expose our decoder view without copy. In case of reverse playback, however,
* we cannot do that since baseclass will store the decoded buffer
@ -639,29 +635,19 @@ gst_d3d11_h264_dec_output_picture (GstH264Decoder * decoder,
mem = gst_buffer_peek_memory (output_buffer, 0);
GST_MINI_OBJECT_FLAG_SET (mem, GST_D3D11_MEMORY_TRANSFER_NEED_DOWNLOAD);
} else {
output_buffer =
gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
output_buffer = gst_video_decoder_allocate_output_buffer (vdec);
}
if (!output_buffer) {
GST_ERROR_OBJECT (self, "Couldn't allocate output buffer");
return GST_FLOW_ERROR;
goto error;
}
if (!frame) {
GST_WARNING_OBJECT (self,
"Failed to find codec frame for picture %p", picture);
GST_BUFFER_PTS (output_buffer) = picture->pts;
GST_BUFFER_DTS (output_buffer) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (output_buffer) = GST_CLOCK_TIME_NONE;
} else {
frame->output_buffer = output_buffer;
GST_BUFFER_PTS (output_buffer) = GST_BUFFER_PTS (frame->input_buffer);
GST_BUFFER_DTS (output_buffer) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (output_buffer) =
GST_BUFFER_DURATION (frame->input_buffer);
}
frame->output_buffer = output_buffer;
GST_BUFFER_PTS (output_buffer) = GST_BUFFER_PTS (frame->input_buffer);
GST_BUFFER_DTS (output_buffer) = GST_CLOCK_TIME_NONE;
GST_BUFFER_DURATION (output_buffer) =
GST_BUFFER_DURATION (frame->input_buffer);
if (!gst_d3d11_decoder_process_output (self->d3d11_decoder,
&self->output_state->info,
@ -669,24 +655,21 @@ gst_d3d11_h264_dec_output_picture (GstH264Decoder * decoder,
GST_VIDEO_INFO_HEIGHT (&self->output_state->info),
view_buffer, output_buffer)) {
GST_ERROR_OBJECT (self, "Failed to copy buffer");
if (frame)
gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
else
gst_buffer_unref (output_buffer);
return GST_FLOW_ERROR;
goto error;
}
GST_LOG_OBJECT (self, "Finish frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_PTS (output_buffer)));
if (frame) {
ret = gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
} else {
ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), output_buffer);
}
gst_h264_picture_unref (picture);
return ret;
return gst_video_decoder_finish_frame (vdec, frame);
error:
gst_video_decoder_drop_frame (vdec, frame);
gst_h264_picture_unref (picture);
return GST_FLOW_ERROR;
}
static gboolean

View file

@ -148,7 +148,7 @@ static gboolean gst_nv_h264_dec_new_sequence (GstH264Decoder * decoder,
static gboolean gst_nv_h264_dec_new_picture (GstH264Decoder * decoder,
GstVideoCodecFrame * frame, GstH264Picture * picture);
static GstFlowReturn gst_nv_h264_dec_output_picture (GstH264Decoder *
decoder, GstH264Picture * picture);
decoder, GstVideoCodecFrame * frame, GstH264Picture * picture);
static gboolean gst_nv_h264_dec_start_picture (GstH264Decoder * decoder,
GstH264Picture * picture, GstH264Slice * slice, GstH264Dpb * dpb);
static gboolean gst_nv_h264_dec_decode_slice (GstH264Decoder * decoder,
@ -439,11 +439,10 @@ gst_nv_h264_dec_new_picture (GstH264Decoder * decoder,
static GstFlowReturn
gst_nv_h264_dec_output_picture (GstH264Decoder * decoder,
GstH264Picture * picture)
GstVideoCodecFrame * frame, GstH264Picture * picture)
{
GstNvH264Dec *self = GST_NV_H264_DEC (decoder);
GstVideoCodecFrame *frame = NULL;
GstBuffer *output_buffer = NULL;
GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
GstNvDecoderFrame *decoder_frame;
gboolean ret G_GNUC_UNUSED = FALSE;
@ -454,24 +453,19 @@ gst_nv_h264_dec_output_picture (GstH264Decoder * decoder,
(GstNvDecoderFrame *) gst_h264_picture_get_user_data (picture);
if (!decoder_frame) {
GST_ERROR_OBJECT (self, "No decoder frame in picture %p", picture);
return GST_FLOW_ERROR;
goto error;
}
frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
picture->system_frame_number);
if (!frame) {
GST_ERROR_OBJECT (self, "Failed to retrieve codec frame");
return GST_FLOW_ERROR;
frame->output_buffer = gst_video_decoder_allocate_output_buffer (vdec);
if (!frame->output_buffer) {
GST_ERROR_OBJECT (self, "Couldn't allocate output buffer");
goto error;
}
output_buffer =
gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
frame->output_buffer = output_buffer;
if (self->output_type == GST_NV_DECOCER_OUTPUT_TYPE_GL) {
ret = gst_nv_decoder_finish_frame (self->decoder,
GST_NV_DECOCER_OUTPUT_TYPE_GL, self->gl_context,
decoder_frame, output_buffer);
decoder_frame, frame->output_buffer);
/* FIXME: This is the case where OpenGL context of downstream glbufferpool
* belongs to non-nvidia (or different device).
@ -487,14 +481,21 @@ gst_nv_h264_dec_output_picture (GstH264Decoder * decoder,
if (!ret) {
if (!gst_nv_decoder_finish_frame (self->decoder,
GST_NV_DECOCER_OUTPUT_TYPE_SYSTEM, NULL, decoder_frame,
output_buffer)) {
frame->output_buffer)) {
GST_ERROR_OBJECT (self, "Failed to finish frame");
gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
return GST_FLOW_ERROR;
goto error;
}
}
return gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
gst_h264_picture_unref (picture);
return gst_video_decoder_finish_frame (vdec, frame);
error:
gst_video_decoder_drop_frame (vdec, frame);
gst_h264_picture_unref (picture);
return GST_FLOW_ERROR;
}
static GstNvDecoderFrame *

View file

@ -854,12 +854,12 @@ gst_v4l2_codec_h264_dec_wait (GstV4l2CodecH264Dec * self,
static GstFlowReturn
gst_v4l2_codec_h264_dec_output_picture (GstH264Decoder * decoder,
GstH264Picture * picture)
GstVideoCodecFrame * frame, GstH264Picture * picture)
{
GstV4l2CodecH264Dec *self = GST_V4L2_CODEC_H264_DEC (decoder);
GstV4l2Request *request = gst_h264_picture_get_user_data (picture);
guint32 frame_num;
GstVideoCodecFrame *frame, *other_frame;
GstVideoCodecFrame *other_frame;
GstH264Picture *other_pic;
GstV4l2Request *other_request;
@ -875,6 +875,8 @@ gst_v4l2_codec_h264_dec_output_picture (GstH264Decoder * decoder,
if (!gst_v4l2_decoder_dequeue_src (self->decoder, &frame_num)) {
GST_ELEMENT_ERROR (self, STREAM, DECODE,
("Decoder did not produce a frame"), (NULL));
gst_video_decoder_drop_frame (GST_VIDEO_DECODER (decoder), frame);
gst_h264_picture_unref (picture);
return GST_FLOW_ERROR;
}
@ -886,16 +888,15 @@ gst_v4l2_codec_h264_dec_output_picture (GstH264Decoder * decoder,
g_return_val_if_fail (other_frame, GST_FLOW_ERROR);
other_pic = gst_video_codec_frame_get_user_data (other_frame);
other_request = gst_h264_picture_get_user_data (other_pic);
gst_v4l2_request_set_done (other_request);
if (other_pic) {
other_request = gst_h264_picture_get_user_data (other_pic);
gst_v4l2_request_set_done (other_request);
}
gst_video_codec_frame_unref (other_frame);
}
finish_frame:
gst_v4l2_request_set_done (request);
frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
picture->system_frame_number);
g_return_val_if_fail (frame, GST_FLOW_ERROR);
g_return_val_if_fail (frame->output_buffer, GST_FLOW_ERROR);
/* Hold on reference buffers for the rest of the picture lifetime */
@ -905,6 +906,22 @@ finish_frame:
if (self->copy_frames)
gst_v4l2_codec_h264_dec_copy_output_buffer (self, frame);
/* At this point, GstVideoCodecFrame holds
* - GstBuffer (GstVideoCodecFrame::output_buffer)
* - GstH264Picture and GstH264Picture holds GstBuffer as well.
* So the refcount of the output buffer would be at least 2 here
* if the given GstH264Picture is the last reference.
*
* To make a chance that only this GstVideoCodecFrame holds the reference
* of the GstBuffer, clear user data of GstVideoCodecFrame
* (i.e., drop the reference of GstH264Picture).
* Otherwise, if the reference count of the GstBuffer is not one,
* the buffer will be copied always
* by gst_buffer_make_writable() in gst_video_decoder_finish_frame()
*/
gst_video_codec_frame_set_user_data (frame, NULL, NULL);
gst_h264_picture_unref (picture);
return gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
}

View file

@ -159,18 +159,15 @@ fail:
static GstFlowReturn
gst_va_h264_dec_output_picture (GstH264Decoder * decoder,
GstH264Picture * picture)
GstVideoCodecFrame * frame, GstH264Picture * picture)
{
GstVaH264Dec *self = GST_VA_H264_DEC (decoder);
GstVideoCodecFrame *frame = NULL;
GST_LOG_OBJECT (self,
"Outputting picture %p (poc %d)", picture, picture->pic_order_cnt);
frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
picture->system_frame_number);
if (self->last_ret != GST_FLOW_OK) {
gst_h264_picture_unref (picture);
gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
return self->last_ret;
}
@ -186,6 +183,8 @@ gst_va_h264_dec_output_picture (GstH264Decoder * decoder,
GST_LOG_OBJECT (self, "Finish frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_PTS (frame->output_buffer)));
gst_h264_picture_unref (picture);
return gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
}