codecs: av1decoder: Use GstFlowReturn everywhere

The same modification as that of VP8 decoder

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/2528>
This commit is contained in:
Seungha Yang 2021-09-18 00:33:12 +09:00 committed by Nicolas Dufresne
parent e009e80178
commit 405d311106
4 changed files with 134 additions and 102 deletions

View file

@ -256,7 +256,7 @@ gst_av1_decoder_profile_to_string (GstAV1Profile profile)
return NULL;
}
static gboolean
static GstFlowReturn
gst_av1_decoder_process_sequence (GstAV1Decoder * self, GstAV1OBU * obu)
{
GstAV1ParserResult res;
@ -264,6 +264,7 @@ gst_av1_decoder_process_sequence (GstAV1Decoder * self, GstAV1OBU * obu)
GstAV1SequenceHeaderOBU seq_header;
GstAV1SequenceHeaderOBU old_seq_header = { 0, };
GstAV1DecoderClass *klass = GST_AV1_DECODER_GET_CLASS (self);
GstFlowReturn ret = GST_FLOW_OK;
if (priv->parser->seq_header)
old_seq_header = *priv->parser->seq_header;
@ -272,12 +273,12 @@ gst_av1_decoder_process_sequence (GstAV1Decoder * self, GstAV1OBU * obu)
obu, &seq_header);
if (res != GST_AV1_PARSER_OK) {
GST_WARNING_OBJECT (self, "Parsing sequence failed.");
return FALSE;
return GST_FLOW_ERROR;
}
if (!memcmp (&old_seq_header, &seq_header, sizeof (GstAV1SequenceHeaderOBU))) {
GST_DEBUG_OBJECT (self, "Get same sequence header.");
return TRUE;
return GST_FLOW_OK;
}
g_assert (klass->new_sequence);
@ -289,9 +290,10 @@ gst_av1_decoder_process_sequence (GstAV1Decoder * self, GstAV1OBU * obu)
priv->max_width, priv->max_height, seq_header.max_frame_width_minus_1 + 1,
seq_header.max_frame_height_minus_1 + 1);
if (!klass->new_sequence (self, &seq_header)) {
ret = klass->new_sequence (self, &seq_header);
if (ret != GST_FLOW_OK) {
GST_ERROR_OBJECT (self, "subclass does not want accept new sequence");
return FALSE;
return ret;
}
priv->profile = seq_header.seq_profile;
@ -299,10 +301,10 @@ gst_av1_decoder_process_sequence (GstAV1Decoder * self, GstAV1OBU * obu)
priv->max_height = seq_header.max_frame_height_minus_1 + 1;
gst_av1_dpb_clear (priv->dpb);
return TRUE;
return GST_FLOW_OK;
}
static gboolean
static GstFlowReturn
gst_av1_decoder_decode_tile_group (GstAV1Decoder * self,
GstAV1TileGroupOBU * tile_group, GstAV1OBU * obu)
{
@ -310,42 +312,45 @@ gst_av1_decoder_decode_tile_group (GstAV1Decoder * self,
GstAV1DecoderClass *klass = GST_AV1_DECODER_GET_CLASS (self);
GstAV1Picture *picture = priv->current_picture;
GstAV1Tile tile;
GstFlowReturn ret = GST_FLOW_OK;
if (!picture) {
GST_ERROR_OBJECT (self, "No picture has created for current frame");
return FALSE;
return GST_FLOW_ERROR;
}
if (picture->frame_hdr.show_existing_frame) {
GST_ERROR_OBJECT (self, "Current picture is showing the existing frame.");
return FALSE;
return GST_FLOW_ERROR;
}
tile.obu = *obu;
tile.tile_group = *tile_group;
g_assert (klass->decode_tile);
if (!klass->decode_tile (self, picture, &tile)) {
GST_ERROR_OBJECT (self, "Decode tile error");
return FALSE;
ret = klass->decode_tile (self, picture, &tile);
if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (self, "Decode tile error");
return ret;
}
return TRUE;
return GST_FLOW_OK;
}
static gboolean
static GstFlowReturn
gst_av1_decoder_decode_frame_header (GstAV1Decoder * self,
GstAV1FrameHeaderOBU * frame_header)
{
GstAV1DecoderPrivate *priv = self->priv;
GstAV1DecoderClass *klass = GST_AV1_DECODER_GET_CLASS (self);
GstAV1Picture *picture = NULL;
GstFlowReturn ret = GST_FLOW_OK;
g_assert (priv->current_frame);
if (priv->current_picture != NULL) {
GST_ERROR_OBJECT (self, "Already have picture for current frame");
return FALSE;
return GST_FLOW_ERROR;
}
if (frame_header->show_existing_frame) {
@ -355,20 +360,22 @@ gst_av1_decoder_decode_frame_header (GstAV1Decoder * self,
if (!ref_picture) {
GST_WARNING_OBJECT (self, "Failed to find the frame index %d to show.",
frame_header->frame_to_show_map_idx);
return FALSE;
return GST_FLOW_ERROR;
}
if (gst_av1_parser_reference_frame_loading (priv->parser,
&ref_picture->frame_hdr) != GST_AV1_PARSER_OK) {
GST_WARNING_OBJECT (self, "load the reference frame failed");
return FALSE;
return GST_FLOW_ERROR;
}
/* FIXME: duplicate picture might be optional feature like that of VP9
* decoder baseclass */
g_assert (klass->duplicate_picture);
picture = klass->duplicate_picture (self, ref_picture);
if (!picture) {
GST_ERROR_OBJECT (self, "subclass didn't provide duplicated picture");
return FALSE;
return GST_FLOW_ERROR;
}
picture->system_frame_number = priv->current_frame->system_frame_number;
@ -390,27 +397,29 @@ gst_av1_decoder_decode_frame_header (GstAV1Decoder * self,
GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY);
if (klass->new_picture) {
if (!klass->new_picture (self, priv->current_frame, picture)) {
GST_ERROR_OBJECT (self, "new picture error");
return FALSE;
ret = klass->new_picture (self, priv->current_frame, picture);
if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (self, "new picture error");
return ret;
}
}
priv->current_picture = picture;
if (klass->start_picture) {
if (!klass->start_picture (self, picture, priv->dpb)) {
GST_ERROR_OBJECT (self, "start picture error");
return FALSE;
ret = klass->start_picture (self, picture, priv->dpb);
if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (self, "start picture error");
return ret;
}
}
}
g_assert (priv->current_picture != NULL);
return TRUE;
return GST_FLOW_OK;
}
static gboolean
static GstFlowReturn
gst_av1_decoder_process_frame_header (GstAV1Decoder * self, GstAV1OBU * obu)
{
GstAV1ParserResult res;
@ -421,13 +430,13 @@ gst_av1_decoder_process_frame_header (GstAV1Decoder * self, GstAV1OBU * obu)
&frame_header);
if (res != GST_AV1_PARSER_OK) {
GST_WARNING_OBJECT (self, "Parsing frame header failed.");
return FALSE;
return GST_FLOW_ERROR;
}
return gst_av1_decoder_decode_frame_header (self, &frame_header);
}
static gboolean
static GstFlowReturn
gst_av1_decoder_process_tile_group (GstAV1Decoder * self, GstAV1OBU * obu)
{
GstAV1ParserResult res;
@ -437,42 +446,50 @@ gst_av1_decoder_process_tile_group (GstAV1Decoder * self, GstAV1OBU * obu)
res = gst_av1_parser_parse_tile_group_obu (priv->parser, obu, &tile_group);
if (res != GST_AV1_PARSER_OK) {
GST_WARNING_OBJECT (self, "Parsing tile group failed.");
return FALSE;
return GST_FLOW_ERROR;
}
return gst_av1_decoder_decode_tile_group (self, &tile_group, obu);
}
static gboolean
static GstFlowReturn
gst_av1_decoder_process_frame (GstAV1Decoder * self, GstAV1OBU * obu)
{
GstAV1ParserResult res;
GstAV1DecoderPrivate *priv = self->priv;
GstAV1FrameOBU frame;
GstFlowReturn ret = GST_FLOW_OK;
res = gst_av1_parser_parse_frame_obu (priv->parser, obu, &frame);
if (res != GST_AV1_PARSER_OK) {
GST_WARNING_OBJECT (self, "Parsing frame failed.");
return FALSE;
return GST_FLOW_ERROR;
}
return gst_av1_decoder_decode_frame_header (self, &frame.frame_header) &&
gst_av1_decoder_decode_tile_group (self, &frame.tile_group, obu);
ret = gst_av1_decoder_decode_frame_header (self, &frame.frame_header);
if (ret != GST_FLOW_OK)
return ret;
return gst_av1_decoder_decode_tile_group (self, &frame.tile_group, obu);
}
static gboolean
static GstFlowReturn
gst_av1_decoder_temporal_delimiter (GstAV1Decoder * self, GstAV1OBU * obu)
{
GstAV1DecoderPrivate *priv = self->priv;
return gst_av1_parser_parse_temporal_delimiter_obu (priv->parser,
obu) == GST_AV1_PARSER_OK;
if (gst_av1_parser_parse_temporal_delimiter_obu (priv->parser, obu) ==
GST_AV1_PARSER_OK) {
return GST_FLOW_OK;
}
return GST_FLOW_ERROR;
}
static gboolean
static GstFlowReturn
gst_av1_decoder_decode_one_obu (GstAV1Decoder * self, GstAV1OBU * obu)
{
gboolean ret = TRUE;
GstFlowReturn ret = GST_FLOW_OK;
GST_LOG_OBJECT (self, "Decode obu %s", get_obu_name (obu->obu_type));
switch (obu->obu_type) {
@ -496,15 +513,13 @@ gst_av1_decoder_decode_one_obu (GstAV1Decoder * self, GstAV1OBU * obu)
case GST_AV1_OBU_REDUNDANT_FRAME_HEADER:
case GST_AV1_OBU_TILE_LIST:
case GST_AV1_OBU_PADDING:
ret = TRUE;
break;
default:
GST_WARNING_OBJECT (self, "an unrecognized obu type %d", obu->obu_type);
ret = FALSE;
break;
}
if (!ret)
if (ret != GST_FLOW_OK)
GST_WARNING_OBJECT (self, "Failed to handle %s OBU",
get_obu_name (obu->obu_type));
@ -558,8 +573,8 @@ gst_av1_decoder_handle_frame (GstVideoDecoder * decoder,
if (!gst_buffer_map (in_buf, &map, GST_MAP_READ)) {
priv->current_frame = NULL;
GST_ERROR_OBJECT (self, "can not map input buffer");
ret = GST_FLOW_ERROR;
return ret;
return GST_FLOW_ERROR;
}
total_consumed = 0;
@ -571,8 +586,8 @@ gst_av1_decoder_handle_frame (GstVideoDecoder * decoder,
goto out;
}
if (!gst_av1_decoder_decode_one_obu (self, &obu)) {
ret = GST_FLOW_ERROR;
ret = gst_av1_decoder_decode_one_obu (self, &obu);
if (ret != GST_FLOW_OK) {
goto out;
}
@ -587,9 +602,9 @@ gst_av1_decoder_handle_frame (GstVideoDecoder * decoder,
if (!priv->current_picture->frame_hdr.show_existing_frame) {
if (klass->end_picture) {
if (!klass->end_picture (self, priv->current_picture)) {
ret = GST_FLOW_ERROR;
GST_ERROR_OBJECT (self, "end picture error");
ret = klass->end_picture (self, priv->current_picture);
if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (self, "end picture error");
goto out;
}
}
@ -626,13 +641,19 @@ out:
if (priv->current_picture)
gst_av1_picture_unref (priv->current_picture);
GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE,
("Failed to handle the frame %d", frame->system_frame_number),
NULL, ret);
gst_video_decoder_drop_frame (decoder, frame);
}
priv->current_picture = NULL;
priv->current_frame = NULL;
return ret;
if (ret != GST_FLOW_OK) {
GST_VIDEO_DECODER_ERROR (decoder, 1, STREAM, DECODE,
("Failed to handle the frame %d", frame->system_frame_number),
NULL, ret);
return ret;
}
return GST_FLOW_OK;
}

View file

@ -75,7 +75,7 @@ struct _GstAV1DecoderClass
*
* Since: 1.20
*/
gboolean (*new_sequence) (GstAV1Decoder * decoder,
GstFlowReturn (*new_sequence) (GstAV1Decoder * decoder,
const GstAV1SequenceHeaderOBU * seq_hdr);
/**
* GstAV1DecoderClass::new_picture:
@ -89,7 +89,7 @@ struct _GstAV1DecoderClass
*
* Since: 1.20
*/
gboolean (*new_picture) (GstAV1Decoder * decoder,
GstFlowReturn (*new_picture) (GstAV1Decoder * decoder,
GstVideoCodecFrame * frame,
GstAV1Picture * picture);
/**
@ -115,7 +115,7 @@ struct _GstAV1DecoderClass
*
* Since: 1.20
*/
gboolean (*start_picture) (GstAV1Decoder * decoder,
GstFlowReturn (*start_picture) (GstAV1Decoder * decoder,
GstAV1Picture * picture,
GstAV1Dpb * dpb);
/**
@ -129,7 +129,7 @@ struct _GstAV1DecoderClass
*
* Since: 1.20
*/
gboolean (*decode_tile) (GstAV1Decoder * decoder,
GstFlowReturn (*decode_tile) (GstAV1Decoder * decoder,
GstAV1Picture * picture,
GstAV1Tile * tile);
/**
@ -142,7 +142,7 @@ struct _GstAV1DecoderClass
*
* Since: 1.20
*/
gboolean (*end_picture) (GstAV1Decoder * decoder,
GstFlowReturn (*end_picture) (GstAV1Decoder * decoder,
GstAV1Picture * picture);
/**
* GstAV1DecoderClass::output_picture:

View file

@ -408,17 +408,17 @@ static gboolean gst_d3d11_av1_dec_sink_event (GstVideoDecoder * decoder,
GstEvent * event);
/* GstAV1Decoder */
static gboolean gst_d3d11_av1_dec_new_sequence (GstAV1Decoder * decoder,
static GstFlowReturn gst_d3d11_av1_dec_new_sequence (GstAV1Decoder * decoder,
const GstAV1SequenceHeaderOBU * seq_hdr);
static gboolean gst_d3d11_av1_dec_new_picture (GstAV1Decoder * decoder,
static GstFlowReturn gst_d3d11_av1_dec_new_picture (GstAV1Decoder * decoder,
GstVideoCodecFrame * frame, GstAV1Picture * picture);
static GstAV1Picture *gst_d3d11_av1_dec_duplicate_picture (GstAV1Decoder *
decoder, GstAV1Picture * picture);
static gboolean gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
static GstFlowReturn gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
GstAV1Picture * picture, GstAV1Dpb * dpb);
static gboolean gst_d3d11_av1_dec_decode_tile (GstAV1Decoder * decoder,
static GstFlowReturn gst_d3d11_av1_dec_decode_tile (GstAV1Decoder * decoder,
GstAV1Picture * picture, GstAV1Tile * tile);
static gboolean gst_d3d11_av1_dec_end_picture (GstAV1Decoder * decoder,
static GstFlowReturn gst_d3d11_av1_dec_end_picture (GstAV1Decoder * decoder,
GstAV1Picture * picture);
static GstFlowReturn gst_d3d11_av1_dec_output_picture (GstAV1Decoder *
decoder, GstVideoCodecFrame * frame, GstAV1Picture * picture);
@ -615,7 +615,7 @@ gst_d3d11_av1_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
}
static gboolean
static GstFlowReturn
gst_d3d11_av1_dec_new_sequence (GstAV1Decoder * decoder,
const GstAV1SequenceHeaderOBU * seq_hdr)
{
@ -628,12 +628,12 @@ gst_d3d11_av1_dec_new_sequence (GstAV1Decoder * decoder,
if (seq_hdr->seq_profile != GST_AV1_PROFILE_0) {
GST_WARNING_OBJECT (self, "Unsupported profile %d", seq_hdr->seq_profile);
return FALSE;
return GST_FLOW_NOT_NEGOTIATED;
}
if (seq_hdr->num_planes != 3) {
GST_WARNING_OBJECT (self, "Monochrome is not supported");
return FALSE;
return GST_FLOW_NOT_NEGOTIATED;
}
inner->seq_hdr = *seq_hdr;
@ -666,7 +666,7 @@ gst_d3d11_av1_dec_new_sequence (GstAV1Decoder * decoder,
out_format = GST_VIDEO_FORMAT_P010_10LE;
} else {
GST_WARNING_OBJECT (self, "Invalid bit-depth %d", seq_hdr->bit_depth);
return FALSE;
return GST_FLOW_NOT_NEGOTIATED;
}
gst_video_info_set_format (&info,
@ -676,19 +676,19 @@ gst_d3d11_av1_dec_new_sequence (GstAV1Decoder * decoder,
decoder->input_state, &info, (gint) inner->max_width,
(gint) inner->max_height, NUM_OUTPUT_VIEW)) {
GST_ERROR_OBJECT (self, "Failed to create decoder");
return FALSE;
return GST_FLOW_NOT_NEGOTIATED;
}
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
GST_ERROR_OBJECT (self, "Failed to negotiate with downstream");
return FALSE;
return GST_FLOW_NOT_NEGOTIATED;
}
}
return TRUE;
return GST_FLOW_OK;
}
static gboolean
static GstFlowReturn
gst_d3d11_av1_dec_new_picture (GstAV1Decoder * decoder,
GstVideoCodecFrame * frame, GstAV1Picture * picture)
{
@ -700,7 +700,7 @@ gst_d3d11_av1_dec_new_picture (GstAV1Decoder * decoder,
GST_VIDEO_DECODER (decoder));
if (!view_buffer) {
GST_DEBUG_OBJECT (self, "No available output view buffer");
return FALSE;
return GST_FLOW_FLUSHING;
}
GST_LOG_OBJECT (self, "New output view buffer %" GST_PTR_FORMAT, view_buffer);
@ -710,7 +710,7 @@ gst_d3d11_av1_dec_new_picture (GstAV1Decoder * decoder,
GST_LOG_OBJECT (self, "New AV1 picture %p", picture);
return TRUE;
return GST_FLOW_OK;
}
static GstAV1Picture *
@ -764,7 +764,7 @@ gst_d3d11_av1_dec_get_output_view_from_picture (GstD3D11AV1Dec * self,
return view;
}
static gboolean
static GstFlowReturn
gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
GstAV1Picture * picture, GstAV1Dpb * dpb)
{
@ -781,7 +781,7 @@ gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
&view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
return GST_FLOW_OK;
}
memset (pic_params, 0, sizeof (GST_DXVA_PicParams_AV1));
@ -894,7 +894,7 @@ gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
if (!other_view) {
GST_ERROR_OBJECT (self,
"current picture does not have output view handle");
return FALSE;
return GST_FLOW_ERROR;
}
pic_params->RefFrameMapTextureIndex[i] = other_view_id;
@ -1090,10 +1090,10 @@ gst_d3d11_av1_dec_start_picture (GstAV1Decoder * decoder,
inner->bitstream_buffer.resize (0);
inner->tile_list.resize (0);
return TRUE;
return GST_FLOW_OK;
}
static gboolean
static GstFlowReturn
gst_d3d11_av1_dec_decode_tile (GstAV1Decoder * decoder,
GstAV1Picture * picture, GstAV1Tile * tile)
{
@ -1134,10 +1134,10 @@ gst_d3d11_av1_dec_decode_tile (GstAV1Decoder * decoder,
memcpy (&inner->bitstream_buffer[0] + pos,
tile->obu.data, tile->obu.obu_size);
return TRUE;
return GST_FLOW_OK;
}
static gboolean
static GstFlowReturn
gst_d3d11_av1_dec_end_picture (GstAV1Decoder * decoder, GstAV1Picture * picture)
{
GstD3D11AV1Dec *self = GST_D3D11_AV1_DEC (decoder);
@ -1150,14 +1150,14 @@ gst_d3d11_av1_dec_end_picture (GstAV1Decoder * decoder, GstAV1Picture * picture)
if (inner->bitstream_buffer.empty () || inner->tile_list.empty ()) {
GST_ERROR_OBJECT (self, "No bitstream buffer to submit");
return FALSE;
return GST_FLOW_ERROR;
}
view = gst_d3d11_av1_dec_get_output_view_from_picture (self, picture,
&view_id);
if (!view) {
GST_ERROR_OBJECT (self, "current picture does not have output view handle");
return FALSE;
return GST_FLOW_ERROR;
}
memset (&input_args, 0, sizeof (GstD3D11DecodeInputStreamArgs));
@ -1185,8 +1185,10 @@ gst_d3d11_av1_dec_end_picture (GstAV1Decoder * decoder, GstAV1Picture * picture)
input_args.bitstream = &inner->bitstream_buffer[0];
input_args.bitstream_size = inner->bitstream_buffer.size ();
return gst_d3d11_decoder_decode_frame (inner->d3d11_decoder,
view, &input_args);
if (!gst_d3d11_decoder_decode_frame (inner->d3d11_decoder, view, &input_args))
return GST_FLOW_ERROR;
return GST_FLOW_OK;
}
static GstFlowReturn

View file

@ -246,7 +246,7 @@ gst_va_av1_dec_getcaps (GstVideoDecoder * decoder, GstCaps * filter)
return caps;
}
static gboolean
static GstFlowReturn
gst_va_av1_dec_new_sequence (GstAV1Decoder * decoder,
const GstAV1SequenceHeaderOBU * seq_hdr)
{
@ -259,11 +259,11 @@ gst_va_av1_dec_new_sequence (GstAV1Decoder * decoder,
profile = _get_profile (self, seq_hdr);
if (profile == VAProfileNone)
return FALSE;
return GST_FLOW_NOT_NEGOTIATED;
rt_format = _get_rtformat (self, profile, seq_hdr);
if (!rt_format)
return FALSE;
return GST_FLOW_NOT_NEGOTIATED;
self->seq = *seq_hdr;
@ -284,10 +284,10 @@ gst_va_av1_dec_new_sequence (GstAV1Decoder * decoder,
base->need_valign = FALSE;
}
return TRUE;
return GST_FLOW_OK;
}
static gboolean
static GstFlowReturn
gst_va_av1_dec_new_picture (GstAV1Decoder * decoder,
GstVideoCodecFrame * frame, GstAV1Picture * picture)
{
@ -318,7 +318,7 @@ gst_va_av1_dec_new_picture (GstAV1Decoder * decoder,
if (self->need_negotiation) {
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
GST_ERROR_OBJECT (self, "Failed to negotiate with downstream");
return FALSE;
return GST_FLOW_NOT_NEGOTIATED;
}
}
@ -327,7 +327,7 @@ gst_va_av1_dec_new_picture (GstAV1Decoder * decoder,
GST_WARNING_OBJECT (self,
"Failed to allocated output buffer, return %s",
gst_flow_get_name (self->last_ret));
return FALSE;
return self->last_ret;
}
if (picture->apply_grain) {
@ -335,7 +335,7 @@ gst_va_av1_dec_new_picture (GstAV1Decoder * decoder,
GST_WARNING_OBJECT (self,
"Failed to allocated aux surface for buffer %p",
frame->output_buffer);
return FALSE;
return GST_FLOW_ERROR;
}
}
@ -353,7 +353,7 @@ gst_va_av1_dec_new_picture (GstAV1Decoder * decoder,
gst_va_decode_picture_get_surface (pic));
}
return TRUE;
return GST_FLOW_OK;
}
static GstAV1Picture *
@ -573,7 +573,7 @@ _setup_global_motion_info (VADecPictureParameterBufferAV1 * pic_param,
}
}
static gboolean
static GstFlowReturn
gst_va_av1_dec_start_picture (GstAV1Decoder * decoder, GstAV1Picture * picture,
GstAV1Dpb * dpb)
{
@ -771,12 +771,12 @@ gst_va_av1_dec_start_picture (GstAV1Decoder * decoder, GstAV1Picture * picture,
if (!gst_va_decoder_add_param_buffer (base->decoder, va_pic,
VAPictureParameterBufferType, &pic_param, sizeof (pic_param)))
return FALSE;
return GST_FLOW_ERROR;
return TRUE;
return GST_FLOW_OK;
}
static gboolean
static GstFlowReturn
gst_va_av1_dec_decode_tile (GstAV1Decoder * decoder, GstAV1Picture * picture,
GstAV1Tile * tile)
{
@ -804,12 +804,17 @@ gst_va_av1_dec_decode_tile (GstAV1Decoder * decoder, GstAV1Picture * picture,
}
va_pic = gst_av1_picture_get_user_data (picture);
return gst_va_decoder_add_slice_buffer_with_n_params (base->decoder, va_pic,
slice_param, sizeof (VASliceParameterBufferAV1), i, tile->obu.data,
tile->obu.obu_size);
if (!gst_va_decoder_add_slice_buffer_with_n_params (base->decoder, va_pic,
slice_param, sizeof (VASliceParameterBufferAV1), i, tile->obu.data,
tile->obu.obu_size)) {
return GST_FLOW_ERROR;
}
return GST_FLOW_OK;
}
static gboolean
static GstFlowReturn
gst_va_av1_dec_end_picture (GstAV1Decoder * decoder, GstAV1Picture * picture)
{
GstVaAV1Dec *self = GST_VA_AV1_DEC (decoder);
@ -821,8 +826,12 @@ gst_va_av1_dec_end_picture (GstAV1Decoder * decoder, GstAV1Picture * picture)
va_pic = gst_av1_picture_get_user_data (picture);
return gst_va_decoder_decode_with_aux_surface (base->decoder, va_pic,
picture->apply_grain);
if (!gst_va_decoder_decode_with_aux_surface (base->decoder, va_pic,
picture->apply_grain)) {
return GST_FLOW_ERROR;
}
return GST_FLOW_OK;
}
static GstFlowReturn