videoencoder: factor out logic from gst_video_encoder_finish_frame()

No semantic change, I'm going to reuse all those functions in a new
finish_slice() methods.
This commit is contained in:
Guillaume Desmottes 2018-08-31 10:55:30 +02:00 committed by GStreamer Merge Bot
parent af909c6d82
commit 789803fd6d

View file

@ -2087,46 +2087,11 @@ gst_video_encoder_drop_frame (GstVideoEncoder * enc, GstVideoCodecFrame * frame)
gst_element_post_message (GST_ELEMENT_CAST (enc), qos_msg); gst_element_post_message (GST_ELEMENT_CAST (enc), qos_msg);
} }
/** static GstFlowReturn
* gst_video_encoder_finish_frame: gst_video_encoder_can_push_unlocked (GstVideoEncoder * encoder)
* @encoder: a #GstVideoEncoder
* @frame: (transfer full): an encoded #GstVideoCodecFrame
*
* @frame must have a valid encoded data buffer, whose metadata fields
* are then appropriately set according to frame data or no buffer at
* all if the frame should be dropped.
* It is subsequently pushed downstream or provided to @pre_push.
* In any case, the frame is considered finished and released.
*
* After calling this function the output buffer of the frame is to be
* considered read-only. This function will also change the metadata
* of the buffer.
*
* Returns: a #GstFlowReturn resulting from sending data downstream
*/
GstFlowReturn
gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{ {
GstVideoEncoderPrivate *priv = encoder->priv; GstVideoEncoderPrivate *priv = encoder->priv;
GstFlowReturn ret = GST_FLOW_OK; gboolean needs_reconfigure;
GstVideoEncoderClass *encoder_class;
GList *l;
gboolean send_headers = FALSE;
gboolean discont = (frame->presentation_frame_number == 0);
GstBuffer *buffer;
gboolean needs_reconfigure = FALSE;
encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
GST_LOG_OBJECT (encoder,
"finish frame fpn %d", frame->presentation_frame_number);
GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
GST_TIME_ARGS (frame->dts));
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad); needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
if (G_UNLIKELY (priv->output_state_changed || (priv->output_state if (G_UNLIKELY (priv->output_state_changed || (priv->output_state
@ -2134,15 +2099,28 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
if (!gst_video_encoder_negotiate_unlocked (encoder)) { if (!gst_video_encoder_negotiate_unlocked (encoder)) {
gst_pad_mark_reconfigure (encoder->srcpad); gst_pad_mark_reconfigure (encoder->srcpad);
if (GST_PAD_IS_FLUSHING (encoder->srcpad)) if (GST_PAD_IS_FLUSHING (encoder->srcpad))
ret = GST_FLOW_FLUSHING; return GST_FLOW_FLUSHING;
else else
ret = GST_FLOW_NOT_NEGOTIATED; return GST_FLOW_NOT_NEGOTIATED;
goto done;
} }
} }
if (G_UNLIKELY (priv->output_state == NULL)) if (G_UNLIKELY (priv->output_state == NULL)) {
goto no_output_state; GST_ERROR_OBJECT (encoder, "Output state was not configured");
GST_ELEMENT_ERROR (encoder, LIBRARY, FAILED,
("Output state was not configured"), (NULL));
return GST_FLOW_ERROR;
}
return GST_FLOW_OK;
}
static void
gst_video_encoder_push_pending_unlocked (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
GstVideoEncoderPrivate *priv = encoder->priv;
GList *l;
/* Push all pending events that arrived before this frame */ /* Push all pending events that arrived before this frame */
for (l = priv->frames; l; l = l->next) { for (l = priv->frames; l; l = l->next) {
@ -2162,16 +2140,111 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
} }
gst_video_encoder_check_and_push_tags (encoder); gst_video_encoder_check_and_push_tags (encoder);
}
/* no buffer data means this frame is skipped/dropped */ static void
if (!frame->output_buffer) { gst_video_encoder_infer_dts_unlocked (GstVideoEncoder * encoder,
gst_video_encoder_drop_frame (encoder, frame); GstVideoCodecFrame * frame)
goto done; {
/* DTS is expected to be monotonously increasing,
* so a good guess is the lowest unsent PTS (all being OK) */
GstVideoEncoderPrivate *priv = encoder->priv;
GList *l;
GstClockTime min_ts = GST_CLOCK_TIME_NONE;
GstVideoCodecFrame *oframe = NULL;
gboolean seen_none = FALSE;
/* some maintenance regardless */
for (l = priv->frames; l; l = l->next) {
GstVideoCodecFrame *tmp = l->data;
if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
seen_none = TRUE;
continue;
} }
priv->processed++; if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
min_ts = tmp->abidata.ABI.ts;
oframe = tmp;
}
}
/* save a ts if needed */
if (oframe && oframe != frame) {
oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
}
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit) { /* and set if needed */
if (!GST_CLOCK_TIME_IS_VALID (frame->dts) && !seen_none) {
frame->dts = min_ts;
GST_DEBUG_OBJECT (encoder,
"no valid DTS, using oldest PTS %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->pts));
}
}
static void
gst_video_encoder_send_header_unlocked (GstVideoEncoder * encoder,
gboolean * discont)
{
GstVideoEncoderPrivate *priv = encoder->priv;
if (G_UNLIKELY (priv->new_headers)) {
GList *tmp, *copy = NULL;
GST_DEBUG_OBJECT (encoder, "Sending headers");
/* First make all buffers metadata-writable */
for (tmp = priv->headers; tmp; tmp = tmp->next) {
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
copy = g_list_append (copy, gst_buffer_make_writable (tmpbuf));
}
g_list_free (priv->headers);
priv->headers = copy;
for (tmp = priv->headers; tmp; tmp = tmp->next) {
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
GST_OBJECT_LOCK (encoder);
priv->bytes += gst_buffer_get_size (tmpbuf);
GST_OBJECT_UNLOCK (encoder);
if (G_UNLIKELY (*discont)) {
GST_LOG_OBJECT (encoder, "marking discont");
GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
*discont = FALSE;
}
gst_pad_push (encoder->srcpad, gst_buffer_ref (tmpbuf));
}
priv->new_headers = FALSE;
}
}
static void
gst_video_encoder_transform_meta_unlocked (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
GstVideoEncoderClass *encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
if (encoder_class->transform_meta) {
if (G_LIKELY (frame->input_buffer)) {
CopyMetaData data;
data.encoder = encoder;
data.frame = frame;
gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data);
} else {
GST_FIXME_OBJECT (encoder,
"Can't copy metadata because input frame disappeared");
}
}
}
static void
gst_video_encoder_send_key_unit_unlocked (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame, gboolean * send_headers)
{
GstVideoEncoderPrivate *priv = encoder->priv;
GstClockTime stream_time, running_time; GstClockTime stream_time, running_time;
GstEvent *ev; GstEvent *ev;
ForcedKeyUnitEvent *fevt = NULL; ForcedKeyUnitEvent *fevt = NULL;
@ -2219,13 +2292,12 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
frame->pts); frame->pts);
ev = gst_video_event_new_downstream_force_key_unit ev = gst_video_event_new_downstream_force_key_unit
(frame->pts, stream_time, running_time, (frame->pts, stream_time, running_time, fevt->all_headers, fevt->count);
fevt->all_headers, fevt->count);
gst_video_encoder_push_event (encoder, ev); gst_video_encoder_push_event (encoder, ev);
if (fevt->all_headers) if (fevt->all_headers)
send_headers = TRUE; *send_headers = TRUE;
GST_DEBUG_OBJECT (encoder, GST_DEBUG_OBJECT (encoder,
"Forced key unit: running-time %" GST_TIME_FORMAT "Forced key unit: running-time %" GST_TIME_FORMAT
@ -2233,8 +2305,65 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count); GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
forced_key_unit_event_free (fevt); forced_key_unit_event_free (fevt);
} }
}
/**
* gst_video_encoder_finish_frame:
* @encoder: a #GstVideoEncoder
* @frame: (transfer full): an encoded #GstVideoCodecFrame
*
* @frame must have a valid encoded data buffer, whose metadata fields
* are then appropriately set according to frame data or no buffer at
* all if the frame should be dropped.
* It is subsequently pushed downstream or provided to @pre_push.
* In any case, the frame is considered finished and released.
*
* After calling this function the output buffer of the frame is to be
* considered read-only. This function will also change the metadata
* of the buffer.
*
* Returns: a #GstFlowReturn resulting from sending data downstream
*/
GstFlowReturn
gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
GstVideoEncoderPrivate *priv = encoder->priv;
GstFlowReturn ret = GST_FLOW_OK;
GstVideoEncoderClass *encoder_class;
gboolean send_headers = FALSE;
gboolean discont = (frame->presentation_frame_number == 0);
GstBuffer *buffer;
encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
GST_LOG_OBJECT (encoder,
"finish frame fpn %d", frame->presentation_frame_number);
GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
GST_TIME_ARGS (frame->dts));
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
ret = gst_video_encoder_can_push_unlocked (encoder);
if (ret != GST_FLOW_OK)
goto done;
gst_video_encoder_push_pending_unlocked (encoder, frame);
/* no buffer data means this frame is skipped/dropped */
if (!frame->output_buffer) {
gst_video_encoder_drop_frame (encoder, frame);
goto done;
} }
priv->processed++;
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit)
gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) { if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
priv->distance_from_sync = 0; priv->distance_from_sync = 0;
GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT); GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
@ -2246,40 +2375,7 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT); GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
} }
/* DTS is expected monotone ascending, gst_video_encoder_infer_dts_unlocked (encoder, frame);
* so a good guess is the lowest unsent PTS (all being OK) */
{
GstClockTime min_ts = GST_CLOCK_TIME_NONE;
GstVideoCodecFrame *oframe = NULL;
gboolean seen_none = FALSE;
/* some maintenance regardless */
for (l = priv->frames; l; l = l->next) {
GstVideoCodecFrame *tmp = l->data;
if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
seen_none = TRUE;
continue;
}
if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
min_ts = tmp->abidata.ABI.ts;
oframe = tmp;
}
}
/* save a ts if needed */
if (oframe && oframe != frame) {
oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
}
/* and set if needed */
if (!GST_CLOCK_TIME_IS_VALID (frame->dts) && !seen_none) {
frame->dts = min_ts;
GST_DEBUG_OBJECT (encoder,
"no valid DTS, using oldest PTS %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->pts));
}
}
frame->distance_from_sync = priv->distance_from_sync; frame->distance_from_sync = priv->distance_from_sync;
priv->distance_from_sync++; priv->distance_from_sync++;
@ -2299,38 +2395,8 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
} }
GST_OBJECT_UNLOCK (encoder); GST_OBJECT_UNLOCK (encoder);
if (G_UNLIKELY (send_headers || priv->new_headers)) { if (G_UNLIKELY (send_headers))
GList *tmp, *copy = NULL; gst_video_encoder_send_header_unlocked (encoder, &discont);
GST_DEBUG_OBJECT (encoder, "Sending headers");
/* First make all buffers metadata-writable */
for (tmp = priv->headers; tmp; tmp = tmp->next) {
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
copy = g_list_append (copy, gst_buffer_make_writable (tmpbuf));
}
g_list_free (priv->headers);
priv->headers = copy;
for (tmp = priv->headers; tmp; tmp = tmp->next) {
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
GST_OBJECT_LOCK (encoder);
priv->bytes += gst_buffer_get_size (tmpbuf);
GST_OBJECT_UNLOCK (encoder);
if (G_UNLIKELY (discont)) {
GST_LOG_OBJECT (encoder, "marking discont");
GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
discont = FALSE;
}
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
gst_pad_push (encoder->srcpad, gst_buffer_ref (tmpbuf));
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
}
priv->new_headers = FALSE;
}
if (G_UNLIKELY (discont)) { if (G_UNLIKELY (discont)) {
GST_LOG_OBJECT (encoder, "marking discont"); GST_LOG_OBJECT (encoder, "marking discont");
@ -2340,18 +2406,7 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
if (encoder_class->pre_push) if (encoder_class->pre_push)
ret = encoder_class->pre_push (encoder, frame); ret = encoder_class->pre_push (encoder, frame);
if (encoder_class->transform_meta) { gst_video_encoder_transform_meta_unlocked (encoder, frame);
if (G_LIKELY (frame->input_buffer)) {
CopyMetaData data;
data.encoder = encoder;
data.frame = frame;
gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data);
} else {
GST_WARNING_OBJECT (encoder,
"Can't copy metadata because input frame disappeared");
}
}
/* Get an additional ref to the buffer, which is going to be pushed /* Get an additional ref to the buffer, which is going to be pushed
* downstream, the original ref is owned by the frame */ * downstream, the original ref is owned by the frame */
@ -2379,15 +2434,6 @@ done:
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder); GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
return ret; return ret;
/* ERRORS */
no_output_state:
{
gst_video_encoder_release_frame (encoder, frame);
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
GST_ERROR_OBJECT (encoder, "Output state was not configured");
return GST_FLOW_ERROR;
}
} }
/** /**