videoencoder: add API to push subframes

Introduce a new API so encoders can split the encoding in subframes.
This can be useful to reduce the overall latency as we no longer need to
wait for the full frame to be encoded to start decoding or sending it.
This commit is contained in:
Guillaume Desmottes 2018-08-31 12:09:57 +02:00 committed by GStreamer Merge Bot
parent 789803fd6d
commit 3e32896912
3 changed files with 132 additions and 4 deletions

View file

@ -2332,13 +2332,19 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
GstFlowReturn ret = GST_FLOW_OK; GstFlowReturn ret = GST_FLOW_OK;
GstVideoEncoderClass *encoder_class; GstVideoEncoderClass *encoder_class;
gboolean send_headers = FALSE; gboolean send_headers = FALSE;
gboolean discont = (frame->presentation_frame_number == 0); gboolean discont = FALSE;
GstBuffer *buffer; GstBuffer *buffer;
g_return_val_if_fail (frame, GST_FLOW_ERROR);
discont = (frame->presentation_frame_number == 0
&& frame->abidata.ABI.num_subframes == 0);
encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder); encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
GST_LOG_OBJECT (encoder, GST_LOG_OBJECT (encoder,
"finish frame fpn %d", frame->presentation_frame_number); "finish frame fpn %d sync point: %d", frame->presentation_frame_number,
GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts), ", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
@ -2350,7 +2356,8 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
if (ret != GST_FLOW_OK) if (ret != GST_FLOW_OK)
goto done; goto done;
gst_video_encoder_push_pending_unlocked (encoder, frame); if (frame->abidata.ABI.num_subframes == 0)
gst_video_encoder_push_pending_unlocked (encoder, frame);
/* no buffer data means this frame is skipped/dropped */ /* no buffer data means this frame is skipped/dropped */
if (!frame->output_buffer) { if (!frame->output_buffer) {
@ -2364,7 +2371,8 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers); gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
&& frame->abidata.ABI.num_subframes == 0) {
priv->distance_from_sync = 0; priv->distance_from_sync = 0;
GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT); GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
/* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */ /* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
@ -2436,6 +2444,121 @@ done:
return ret; return ret;
} }
/**
* gst_video_encoder_finish_subframe:
* @encoder: a #GstVideoEncoder
* @frame: (transfer none): a #GstVideoCodecFrame being encoded
*
* If multiple subframes are produced for one input frame then use this method
* for each subframe, except for the last one. Before calling this function,
* you need to fill frame->output_buffer with the encoded buffer to push.
* You must call #gst_video_encoder_finish_frame() for the last sub-frame
* to tell the encoder that the frame has been fully encoded.
*
* This function will change the metadata of @frame and frame->output_buffer
* will be pushed downstream.
*
* Returns: a #GstFlowReturn resulting from pushing the buffer downstream.
*
* Since: 1.18
*/
GstFlowReturn
gst_video_encoder_finish_subframe (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
GstVideoEncoderPrivate *priv = encoder->priv;
GstVideoEncoderClass *encoder_class;
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer *subframe_buffer = NULL;
gboolean discont = FALSE;
gboolean send_headers = FALSE;
g_return_val_if_fail (frame, GST_FLOW_ERROR);
g_return_val_if_fail (frame->output_buffer, GST_FLOW_ERROR);
subframe_buffer = frame->output_buffer;
discont = (frame->presentation_frame_number == 0
&& frame->abidata.ABI.num_subframes == 0);
encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
GST_LOG_OBJECT (encoder,
"finish subframe %u of frame fpn %u PTS %" GST_TIME_FORMAT ", DTS %"
GST_TIME_FORMAT " sync point: %d", frame->abidata.ABI.num_subframes,
frame->presentation_frame_number, GST_TIME_ARGS (frame->pts),
GST_TIME_ARGS (frame->dts), GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
ret = gst_video_encoder_can_push_unlocked (encoder);
if (ret != GST_FLOW_OK)
goto done;
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit)
gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
/* Push pending events only for the first subframe ie segment event.
* Push new incoming events on finish_frame otherwise.
*/
if (frame->abidata.ABI.num_subframes == 0)
gst_video_encoder_push_pending_unlocked (encoder, frame);
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
&& frame->abidata.ABI.num_subframes == 0) {
GST_BUFFER_FLAG_UNSET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
/* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
frame->dts = frame->pts;
}
} else {
GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
}
gst_video_encoder_infer_dts_unlocked (encoder, frame);
GST_BUFFER_PTS (subframe_buffer) = frame->pts;
GST_BUFFER_DTS (subframe_buffer) = frame->dts;
GST_BUFFER_DURATION (subframe_buffer) = frame->duration;
GST_OBJECT_LOCK (encoder);
/* update rate estimate */
priv->bytes += gst_buffer_get_size (subframe_buffer);
GST_OBJECT_UNLOCK (encoder);
if (G_UNLIKELY (send_headers))
priv->new_headers = TRUE;
gst_video_encoder_send_header_unlocked (encoder, &discont);
if (G_UNLIKELY (discont)) {
GST_LOG_OBJECT (encoder, "marking discont buffer: %" GST_PTR_FORMAT,
subframe_buffer);
GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DISCONT);
}
if (encoder_class->pre_push) {
ret = encoder_class->pre_push (encoder, frame);
}
gst_video_encoder_transform_meta_unlocked (encoder, frame);
if (ret == GST_FLOW_OK) {
ret = gst_pad_push (encoder->srcpad, subframe_buffer);
subframe_buffer = NULL;
}
done:
frame->abidata.ABI.num_subframes++;
if (subframe_buffer)
gst_buffer_unref (subframe_buffer);
frame->output_buffer = NULL;
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
return ret;
}
/** /**
* gst_video_encoder_get_output_state: * gst_video_encoder_get_output_state:
* @encoder: a #GstVideoEncoder * @encoder: a #GstVideoEncoder

View file

@ -336,6 +336,10 @@ GST_VIDEO_API
GstFlowReturn gst_video_encoder_finish_frame (GstVideoEncoder *encoder, GstFlowReturn gst_video_encoder_finish_frame (GstVideoEncoder *encoder,
GstVideoCodecFrame *frame); GstVideoCodecFrame *frame);
GST_VIDEO_API
GstFlowReturn gst_video_encoder_finish_subframe (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame);
GST_VIDEO_API GST_VIDEO_API
GstCaps * gst_video_encoder_proxy_getcaps (GstVideoEncoder * enc, GstCaps * gst_video_encoder_proxy_getcaps (GstVideoEncoder * enc,
GstCaps * caps, GstCaps * caps,

View file

@ -261,6 +261,7 @@ struct _GstVideoCodecFrame
struct { struct {
GstClockTime ts; GstClockTime ts;
GstClockTime ts2; GstClockTime ts2;
guint num_subframes;
} ABI; } ABI;
gpointer padding[GST_PADDING_LARGE]; gpointer padding[GST_PADDING_LARGE];
} abidata; } abidata;