diff --git a/gst/vaapi/gstvaapidecode.c b/gst/vaapi/gstvaapidecode.c index 29aa4eb1ba..a016b71495 100644 --- a/gst/vaapi/gstvaapidecode.c +++ b/gst/vaapi/gstvaapidecode.c @@ -434,6 +434,11 @@ gst_vaapidecode_finish(GstVideoDecoder *vdec) GstVaapiDecode * const decode = GST_VAAPIDECODE(vdec); GstVaapiDecoderStatus status; + /* If there is something in GstVideoDecoder's output adapter, then + submit the frame for decoding */ + if (decode->current_frame_size) + gst_video_decoder_have_frame(vdec); + status = gst_vaapi_decoder_flush(decode->decoder); if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) goto error_flush; @@ -617,6 +622,9 @@ gst_vaapidecode_reset_full(GstVaapiDecode *decode, GstCaps *caps, gboolean hard) return TRUE; } + /* Reset tracked frame size */ + decode->current_frame_size = 0; + gst_vaapidecode_destroy(decode); return gst_vaapidecode_create(decode, caps); } @@ -705,10 +713,14 @@ gst_vaapidecode_parse(GstVideoDecoder *vdec, switch (status) { case GST_VAAPI_DECODER_STATUS_SUCCESS: - if (got_unit_size > 0) + if (got_unit_size > 0) { gst_video_decoder_add_to_frame(vdec, got_unit_size); - if (got_frame) + decode->current_frame_size += got_unit_size; + } + if (got_frame) { ret = gst_video_decoder_have_frame(vdec); + decode->current_frame_size = 0; + } else ret = GST_FLOW_OK; break; @@ -720,10 +732,12 @@ gst_vaapidecode_parse(GstVideoDecoder *vdec, case GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT: GST_WARNING("parse error %d", status); ret = GST_FLOW_NOT_SUPPORTED; + decode->current_frame_size = 0; break; default: GST_ERROR("parse error %d", status); ret = GST_FLOW_EOS; + decode->current_frame_size = 0; break; } return ret; diff --git a/gst/vaapi/gstvaapidecode.h b/gst/vaapi/gstvaapidecode.h index 7b0dc610a4..23991cfea9 100644 --- a/gst/vaapi/gstvaapidecode.h +++ b/gst/vaapi/gstvaapidecode.h @@ -78,6 +78,7 @@ struct _GstVaapiDecode { GstCaps *allowed_caps; gint64 render_time_base; GstClockTime last_buffer_time; + guint current_frame_size; }; struct _GstVaapiDecodeClass {