d3d11decoder: Move handle_frame implementation to baseclass

... and remove unused start, stop method from subclass.

Current implementation does not require subclass specific behavior
for the handle_frame() method.
This commit is contained in:
Seungha Yang 2020-01-30 20:04:58 +09:00 committed by GStreamer Merge Bot
parent 6da90b59f4
commit 3e78afbe0a
9 changed files with 131 additions and 188 deletions

View file

@ -125,10 +125,6 @@ static void gst_d3d11_h264_dec_set_context (GstElement * element,
static gboolean gst_d3d11_h264_dec_open (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h264_dec_close (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h264_dec_start (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h264_dec_stop (GstVideoDecoder * decoder);
static GstFlowReturn gst_d3d11_h264_dec_handle_frame (GstVideoDecoder *
decoder, GstVideoCodecFrame * frame);
static gboolean gst_d3d11_h264_dec_negotiate (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h264_dec_decide_allocation (GstVideoDecoder *
decoder, GstQuery * query);
@ -182,10 +178,6 @@ gst_d3d11_h264_dec_class_init (GstD3D11H264DecClass * klass)
decoder_class->open = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_open);
decoder_class->close = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_close);
decoder_class->start = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_start);
decoder_class->stop = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_stop);
decoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_handle_frame);
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_negotiate);
decoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_decide_allocation);
@ -295,57 +287,14 @@ gst_d3d11_h264_dec_close (GstVideoDecoder * decoder)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
gst_clear_object (&self->d3d11_decoder);
gst_clear_object (&self->device);
return TRUE;
}
static gboolean
gst_d3d11_h264_dec_start (GstVideoDecoder * decoder)
{
return GST_VIDEO_DECODER_CLASS (parent_class)->start (decoder);
}
static gboolean
gst_d3d11_h264_dec_stop (GstVideoDecoder * decoder)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
gst_h264_picture_replace (&self->current_picture, NULL);
if (self->output_state)
gst_video_codec_state_unref (self->output_state);
self->output_state = NULL;
return GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
}
gst_clear_object (&self->d3d11_decoder);
gst_clear_object (&self->device);
static GstFlowReturn
gst_d3d11_h264_dec_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame)
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
GstBuffer *in_buf = frame->input_buffer;
GST_LOG_OBJECT (self,
"handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
if (!self->current_picture) {
GST_ERROR_OBJECT (self, "No current picture");
gst_video_decoder_drop_frame (decoder, frame);
return GST_FLOW_ERROR;
}
gst_video_codec_frame_set_user_data (frame,
self->current_picture, (GDestroyNotify) gst_h264_picture_unref);
self->current_picture = NULL;
gst_video_codec_frame_unref (frame);
return GST_FLOW_OK;
return TRUE;
}
static gboolean
@ -711,8 +660,6 @@ gst_d3d11_h264_dec_new_picture (GstH264Decoder * decoder,
GST_LOG_OBJECT (self, "New h264picture %p", picture);
gst_h264_picture_replace (&self->current_picture, picture);
return TRUE;
}

View file

@ -61,8 +61,6 @@ struct _GstD3D11H264Dec
GstD3D11Decoder *d3d11_decoder;
GstH264Picture *current_picture;
/* Pointing current bitstream buffer */
guint current_offset;
guint bitstream_buffer_size;

View file

@ -92,10 +92,6 @@ static void gst_d3d11_h265_dec_set_context (GstElement * element,
static gboolean gst_d3d11_h265_dec_open (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h265_dec_close (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h265_dec_start (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h265_dec_stop (GstVideoDecoder * decoder);
static GstFlowReturn gst_d3d11_h265_dec_handle_frame (GstVideoDecoder *
decoder, GstVideoCodecFrame * frame);
static gboolean gst_d3d11_h265_dec_negotiate (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h265_dec_decide_allocation (GstVideoDecoder *
decoder, GstQuery * query);
@ -149,10 +145,6 @@ gst_d3d11_h265_dec_class_init (GstD3D11H265DecClass * klass)
decoder_class->open = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_open);
decoder_class->close = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_close);
decoder_class->start = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_start);
decoder_class->stop = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_stop);
decoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_handle_frame);
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_negotiate);
decoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_decide_allocation);
@ -262,56 +254,16 @@ gst_d3d11_h265_dec_close (GstVideoDecoder * decoder)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
if (self->output_state)
gst_video_codec_state_unref (self->output_state);
self->output_state = NULL;
gst_clear_object (&self->d3d11_decoder);
gst_clear_object (&self->device);
return TRUE;
}
static gboolean
gst_d3d11_h265_dec_start (GstVideoDecoder * decoder)
{
return GST_VIDEO_DECODER_CLASS (parent_class)->start (decoder);
}
static gboolean
gst_d3d11_h265_dec_stop (GstVideoDecoder * decoder)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
gst_h265_picture_replace (&self->current_picture, NULL);
return GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
}
static GstFlowReturn
gst_d3d11_h265_dec_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
GstBuffer *in_buf = frame->input_buffer;
GST_LOG_OBJECT (self,
"handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
if (!self->current_picture) {
GST_ERROR_OBJECT (self, "No current picture");
gst_video_decoder_drop_frame (decoder, frame);
return GST_FLOW_ERROR;
}
gst_video_codec_frame_set_user_data (frame,
self->current_picture, (GDestroyNotify) gst_h265_picture_unref);
self->current_picture = NULL;
gst_video_codec_frame_unref (frame);
return GST_FLOW_OK;
}
static gboolean
gst_d3d11_h265_dec_negotiate (GstVideoDecoder * decoder)
{
@ -750,8 +702,6 @@ gst_d3d11_h265_dec_new_picture (GstH265Decoder * decoder,
GST_LOG_OBJECT (self, "New h265picture %p", picture);
gst_h265_picture_replace (&self->current_picture, picture);
return TRUE;
}

View file

@ -62,8 +62,6 @@ struct _GstD3D11H265Dec
GstD3D11Decoder *d3d11_decoder;
GstH265Picture *current_picture;
/* Pointing current bitstream buffer */
guint current_offset;
guint bitstream_buffer_size;

View file

@ -109,10 +109,6 @@ static void gst_d3d11_vp9_dec_set_context (GstElement * element,
static gboolean gst_d3d11_vp9_dec_open (GstVideoDecoder * decoder);
static gboolean gst_d3d11_vp9_dec_close (GstVideoDecoder * decoder);
static gboolean gst_d3d11_vp9_dec_start (GstVideoDecoder * decoder);
static gboolean gst_d3d11_vp9_dec_stop (GstVideoDecoder * decoder);
static GstFlowReturn gst_d3d11_vp9_dec_handle_frame (GstVideoDecoder *
decoder, GstVideoCodecFrame * frame);
static gboolean gst_d3d11_vp9_dec_negotiate (GstVideoDecoder * decoder);
static gboolean gst_d3d11_vp9_dec_decide_allocation (GstVideoDecoder *
decoder, GstQuery * query);
@ -167,10 +163,6 @@ gst_d3d11_vp9_dec_class_init (GstD3D11Vp9DecClass * klass)
decoder_class->open = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_open);
decoder_class->close = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_close);
decoder_class->start = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_start);
decoder_class->stop = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_stop);
decoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_handle_frame);
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_negotiate);
decoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_decide_allocation);
@ -273,50 +265,6 @@ gst_d3d11_vp9_dec_close (GstVideoDecoder * decoder)
return TRUE;
}
static gboolean
gst_d3d11_vp9_dec_start (GstVideoDecoder * decoder)
{
return GST_VIDEO_DECODER_CLASS (parent_class)->start (decoder);
}
static gboolean
gst_d3d11_vp9_dec_stop (GstVideoDecoder * decoder)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
gst_vp9_picture_replace (&self->current_picture, NULL);
return GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
}
static GstFlowReturn
gst_d3d11_vp9_dec_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
GstBuffer *in_buf = frame->input_buffer;
GST_LOG_OBJECT (self,
"handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
if (!self->current_picture) {
GST_ERROR_OBJECT (self, "No current picture");
gst_video_decoder_drop_frame (decoder, frame);
return GST_FLOW_ERROR;
}
gst_video_codec_frame_set_user_data (frame,
self->current_picture, (GDestroyNotify) gst_vp9_picture_unref);
self->current_picture = NULL;
gst_video_codec_frame_unref (frame);
return GST_FLOW_OK;
}
static gboolean
gst_d3d11_vp9_dec_negotiate (GstVideoDecoder * decoder)
{
@ -550,8 +498,6 @@ gst_d3d11_vp9_dec_new_picture (GstVp9Decoder * decoder, GstVp9Picture * picture)
GST_LOG_OBJECT (self, "New VP9 picture %p", picture);
gst_vp9_picture_replace (&self->current_picture, picture);
return TRUE;
}
@ -582,8 +528,6 @@ gst_d3d11_vp9_dec_duplicate_picture (GstVp9Decoder * decoder,
gst_vp9_picture_set_user_data (new_picture,
gst_buffer_ref (view_buffer), (GDestroyNotify) gst_buffer_unref);
gst_vp9_picture_replace (&self->current_picture, new_picture);
return new_picture;
}

View file

@ -50,8 +50,6 @@ struct _GstD3D11Vp9Dec
GstD3D11Decoder *d3d11_decoder;
GstVp9Picture *current_picture;
guint width, height;
GstVP9Profile profile;

View file

@ -134,6 +134,8 @@ static gboolean gst_h264_decoder_set_format (GstVideoDecoder * decoder,
static GstFlowReturn gst_h264_decoder_finish (GstVideoDecoder * decoder);
static gboolean gst_h264_decoder_flush (GstVideoDecoder * decoder);
static GstFlowReturn gst_h264_decoder_drain (GstVideoDecoder * decoder);
static GstFlowReturn gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame);
/* codec spcific functions */
static gboolean gst_h264_decoder_process_sps (GstH264Decoder * self,
@ -163,6 +165,8 @@ gst_h264_decoder_class_init (GstH264DecoderClass * klass)
decoder_class->finish = GST_DEBUG_FUNCPTR (gst_h264_decoder_finish);
decoder_class->flush = GST_DEBUG_FUNCPTR (gst_h264_decoder_flush);
decoder_class->drain = GST_DEBUG_FUNCPTR (gst_h264_decoder_drain);
decoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_h264_decoder_handle_frame);
}
static void
@ -253,6 +257,35 @@ gst_h264_decoder_finish (GstVideoDecoder * decoder)
return gst_h264_decoder_drain (decoder);
}
static GstFlowReturn
gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame)
{
GstH264Decoder *self = GST_H264_DECODER (decoder);
GstH264DecoderPrivate *priv = self->priv;
GstBuffer *in_buf = frame->input_buffer;
GST_LOG_OBJECT (self,
"handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
if (!priv->current_picture) {
GST_ERROR_OBJECT (self, "No current picture");
gst_video_decoder_drop_frame (decoder, frame);
return GST_FLOW_ERROR;
}
gst_video_codec_frame_set_user_data (frame,
gst_h264_picture_ref (priv->current_picture),
(GDestroyNotify) gst_h264_picture_unref);
gst_video_codec_frame_unref (frame);
return GST_FLOW_OK;
}
static gboolean
gst_h264_decoder_parse_sps (GstH264Decoder * self, GstH264NalUnit * nalu)
{
@ -1197,29 +1230,28 @@ gst_h264_decoder_finish_current_picture (GstH264Decoder * self)
{
GstH264DecoderPrivate *priv = self->priv;
GstH264DecoderClass *klass;
GstH264Picture *picture;
gboolean ret = TRUE;
if (!priv->current_picture)
return TRUE;
picture = priv->current_picture;
priv->current_picture = NULL;
klass = GST_H264_DECODER_GET_CLASS (self);
if (klass->end_picture)
ret = klass->end_picture (self, picture);
ret = klass->end_picture (self, priv->current_picture);
gst_video_decoder_have_frame (GST_VIDEO_DECODER (self));
/* finish picture takes ownership of the picture */
if (!gst_h264_decoder_finish_picture (self, picture)) {
ret = gst_h264_decoder_finish_picture (self, priv->current_picture);
priv->current_picture = NULL;
if (!ret) {
GST_ERROR_OBJECT (self, "Failed to finish picture");
return FALSE;
}
return ret;
return TRUE;
}
static gint

View file

@ -108,6 +108,8 @@ static gboolean gst_h265_decoder_set_format (GstVideoDecoder * decoder,
static GstFlowReturn gst_h265_decoder_finish (GstVideoDecoder * decoder);
static gboolean gst_h265_decoder_flush (GstVideoDecoder * decoder);
static GstFlowReturn gst_h265_decoder_drain (GstVideoDecoder * decoder);
static GstFlowReturn gst_h265_decoder_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame);
static gboolean gst_h265_decoder_finish_current_picture (GstH265Decoder * self);
static void gst_h265_decoder_clear_dpb (GstH265Decoder * self);
@ -127,6 +129,8 @@ gst_h265_decoder_class_init (GstH265DecoderClass * klass)
decoder_class->finish = GST_DEBUG_FUNCPTR (gst_h265_decoder_finish);
decoder_class->flush = GST_DEBUG_FUNCPTR (gst_h265_decoder_flush);
decoder_class->drain = GST_DEBUG_FUNCPTR (gst_h265_decoder_drain);
decoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_h265_decoder_handle_frame);
}
static void
@ -1442,31 +1446,60 @@ gst_h265_decoder_finish_current_picture (GstH265Decoder * self)
{
GstH265DecoderPrivate *priv = self->priv;
GstH265DecoderClass *klass;
GstH265Picture *picture;
gboolean ret = TRUE;
if (!priv->current_picture)
return TRUE;
picture = priv->current_picture;
priv->current_picture = NULL;
klass = GST_H265_DECODER_GET_CLASS (self);
if (klass->end_picture)
ret = klass->end_picture (self, picture);
ret = klass->end_picture (self, priv->current_picture);
if (picture->output_flag) {
if (priv->current_picture->output_flag) {
gst_video_decoder_have_frame (GST_VIDEO_DECODER (self));
} else {
GST_DEBUG_OBJECT (self, "Skip have_frame for picture %p", picture);
GST_DEBUG_OBJECT (self, "Skip have_frame for picture %p",
priv->current_picture);
}
/* finish picture takes ownership of the picture */
if (!gst_h265_decoder_finish_picture (self, picture)) {
ret = gst_h265_decoder_finish_picture (self, priv->current_picture);
priv->current_picture = NULL;
if (!ret) {
GST_ERROR_OBJECT (self, "Failed to finish picture");
return FALSE;
}
return ret;
return TRUE;
}
static GstFlowReturn
gst_h265_decoder_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame)
{
GstH265Decoder *self = GST_H265_DECODER (decoder);
GstH265DecoderPrivate *priv = self->priv;
GstBuffer *in_buf = frame->input_buffer;
GST_LOG_OBJECT (self,
"handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
if (!priv->current_picture) {
GST_ERROR_OBJECT (self, "No current picture");
gst_video_decoder_drop_frame (decoder, frame);
return GST_FLOW_ERROR;
}
gst_video_codec_frame_set_user_data (frame,
gst_h265_picture_ref (priv->current_picture),
(GDestroyNotify) gst_h265_picture_unref);
gst_video_codec_frame_unref (frame);
return GST_FLOW_OK;
}

View file

@ -67,6 +67,8 @@ struct _GstVp9DecoderPrivate
GstVp9Parser *parser;
GstVp9Dpb *dpb;
GstVp9Picture *current_picture;
guint num_frames; /* number of frames in a super frame */
gsize frame_sizes[8]; /* size of frames in a super frame */
guint frame_cnt; /* frame count variable for super frame */
@ -85,6 +87,8 @@ static GstFlowReturn gst_vp9_decoder_parse (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
static gboolean gst_vp9_decoder_set_format (GstVideoDecoder * decoder,
GstVideoCodecState * state);
static GstFlowReturn gst_vp9_decoder_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame);
static GstVp9Picture *gst_vp9_decoder_duplicate_picture_default (GstVp9Decoder *
decoder, GstVp9Picture * picture);
@ -98,6 +102,8 @@ gst_vp9_decoder_class_init (GstVp9DecoderClass * klass)
decoder_class->stop = GST_DEBUG_FUNCPTR (gst_vp9_decoder_stop);
decoder_class->parse = GST_DEBUG_FUNCPTR (gst_vp9_decoder_parse);
decoder_class->set_format = GST_DEBUG_FUNCPTR (gst_vp9_decoder_set_format);
decoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_vp9_decoder_handle_frame);
klass->duplicate_picture =
GST_DEBUG_FUNCPTR (gst_vp9_decoder_duplicate_picture_default);
@ -309,6 +315,9 @@ gst_vp9_decoder_parse (GstVideoDecoder * decoder,
picture->size = buf_size;
gst_video_decoder_add_to_frame (GST_VIDEO_DECODER (self), picture->size);
/* hold pointer to picture. default handle_frame implementation uses it */
priv->current_picture = picture;
flow_ret = gst_video_decoder_have_frame (GST_VIDEO_DECODER (self));
if (flow_ret == GST_FLOW_OK) {
@ -317,6 +326,7 @@ gst_vp9_decoder_parse (GstVideoDecoder * decoder,
}
gst_vp9_picture_unref (picture);
priv->current_picture = NULL;
return flow_ret;
}
@ -368,6 +378,9 @@ gst_vp9_decoder_parse (GstVideoDecoder * decoder,
gst_adapter_unmap (adapter);
gst_video_decoder_add_to_frame (GST_VIDEO_DECODER (self), picture->size);
/* hold pointer to picture. default handle_frame implementation uses it */
priv->current_picture = picture;
flow_ret = gst_video_decoder_have_frame (GST_VIDEO_DECODER (self));
if (flow_ret == GST_FLOW_OK && klass->output_picture) {
@ -377,6 +390,7 @@ gst_vp9_decoder_parse (GstVideoDecoder * decoder,
picture->data = NULL;
gst_vp9_dpb_add (priv->dpb, picture);
priv->current_picture = NULL;
return flow_ret;
@ -432,3 +446,32 @@ gst_vp9_decoder_duplicate_picture_default (GstVp9Decoder * decoder,
return new_picture;
}
static GstFlowReturn
gst_vp9_decoder_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame)
{
GstVp9Decoder *self = GST_VP9_DECODER (decoder);
GstVp9DecoderPrivate *priv = self->priv;
GstBuffer *in_buf = frame->input_buffer;
GST_LOG_OBJECT (self,
"handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
if (!priv->current_picture) {
GST_ERROR_OBJECT (self, "No current picture");
gst_video_decoder_drop_frame (decoder, frame);
return GST_FLOW_ERROR;
}
gst_video_codec_frame_set_user_data (frame,
gst_vp9_picture_ref (priv->current_picture),
(GDestroyNotify) gst_vp9_picture_unref);
gst_video_codec_frame_unref (frame);
return GST_FLOW_OK;
}