videoencoder: factor out logic from gst_video_encoder_finish_frame()

No semantic change, I'm going to reuse all those functions in a new
finish_slice() methods.
This commit is contained in:
Stéphane Cerveau 2019-12-19 21:59:10 +00:00 committed by GStreamer Merge Bot
parent 61aeb4bbc3
commit b1ec312b8e
5 changed files with 695 additions and 197 deletions

View file

@ -2087,6 +2087,226 @@ gst_video_encoder_drop_frame (GstVideoEncoder * enc, GstVideoCodecFrame * frame)
gst_element_post_message (GST_ELEMENT_CAST (enc), qos_msg);
}
static GstFlowReturn
gst_video_encoder_can_push_unlocked (GstVideoEncoder * encoder)
{
GstVideoEncoderPrivate *priv = encoder->priv;
gboolean needs_reconfigure;
needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
if (G_UNLIKELY (priv->output_state_changed || (priv->output_state
&& needs_reconfigure))) {
if (!gst_video_encoder_negotiate_unlocked (encoder)) {
gst_pad_mark_reconfigure (encoder->srcpad);
if (GST_PAD_IS_FLUSHING (encoder->srcpad))
return GST_FLOW_FLUSHING;
else
return GST_FLOW_NOT_NEGOTIATED;
}
}
if (G_UNLIKELY (priv->output_state == NULL)) {
GST_ERROR_OBJECT (encoder, "Output state was not configured");
GST_ELEMENT_ERROR (encoder, LIBRARY, FAILED,
("Output state was not configured"), (NULL));
return GST_FLOW_ERROR;
}
return GST_FLOW_OK;
}
static void
gst_video_encoder_push_pending_unlocked (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
GstVideoEncoderPrivate *priv = encoder->priv;
GList *l;
/* Push all pending events that arrived before this frame */
for (l = priv->frames; l; l = l->next) {
GstVideoCodecFrame *tmp = l->data;
if (tmp->events) {
GList *k;
for (k = g_list_last (tmp->events); k; k = k->prev)
gst_video_encoder_push_event (encoder, k->data);
g_list_free (tmp->events);
tmp->events = NULL;
}
if (tmp == frame)
break;
}
gst_video_encoder_check_and_push_tags (encoder);
}
static void
gst_video_encoder_infer_dts_unlocked (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
/* DTS is expected to be monotonously increasing,
* so a good guess is the lowest unsent PTS (all being OK) */
GstVideoEncoderPrivate *priv = encoder->priv;
GList *l;
GstClockTime min_ts = GST_CLOCK_TIME_NONE;
GstVideoCodecFrame *oframe = NULL;
gboolean seen_none = FALSE;
/* some maintenance regardless */
for (l = priv->frames; l; l = l->next) {
GstVideoCodecFrame *tmp = l->data;
if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
seen_none = TRUE;
continue;
}
if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
min_ts = tmp->abidata.ABI.ts;
oframe = tmp;
}
}
/* save a ts if needed */
if (oframe && oframe != frame) {
oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
}
/* and set if needed */
if (!GST_CLOCK_TIME_IS_VALID (frame->dts) && !seen_none) {
frame->dts = min_ts;
GST_DEBUG_OBJECT (encoder,
"no valid DTS, using oldest PTS %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->pts));
}
}
static void
gst_video_encoder_send_header_unlocked (GstVideoEncoder * encoder,
gboolean * discont)
{
GstVideoEncoderPrivate *priv = encoder->priv;
if (G_UNLIKELY (priv->new_headers)) {
GList *tmp, *copy = NULL;
GST_DEBUG_OBJECT (encoder, "Sending headers");
/* First make all buffers metadata-writable */
for (tmp = priv->headers; tmp; tmp = tmp->next) {
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
copy = g_list_append (copy, gst_buffer_make_writable (tmpbuf));
}
g_list_free (priv->headers);
priv->headers = copy;
for (tmp = priv->headers; tmp; tmp = tmp->next) {
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
GST_OBJECT_LOCK (encoder);
priv->bytes += gst_buffer_get_size (tmpbuf);
GST_OBJECT_UNLOCK (encoder);
if (G_UNLIKELY (*discont)) {
GST_LOG_OBJECT (encoder, "marking discont");
GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
*discont = FALSE;
}
gst_pad_push (encoder->srcpad, gst_buffer_ref (tmpbuf));
}
priv->new_headers = FALSE;
}
}
static void
gst_video_encoder_transform_meta_unlocked (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
GstVideoEncoderClass *encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
if (encoder_class->transform_meta) {
if (G_LIKELY (frame->input_buffer)) {
CopyMetaData data;
data.encoder = encoder;
data.frame = frame;
gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data);
} else {
GST_FIXME_OBJECT (encoder,
"Can't copy metadata because input frame disappeared");
}
}
}
static void
gst_video_encoder_send_key_unit_unlocked (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame, gboolean * send_headers)
{
GstVideoEncoderPrivate *priv = encoder->priv;
GstClockTime stream_time, running_time;
GstEvent *ev;
ForcedKeyUnitEvent *fevt = NULL;
GList *l;
running_time =
gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
frame->pts);
GST_OBJECT_LOCK (encoder);
for (l = priv->force_key_unit; l; l = l->next) {
ForcedKeyUnitEvent *tmp = l->data;
/* Skip non-pending keyunits */
if (!tmp->pending)
continue;
/* Exact match using the frame id */
if (frame->system_frame_number == tmp->frame_id) {
fevt = tmp;
break;
}
/* Simple case, keyunit ASAP */
if (tmp->running_time == GST_CLOCK_TIME_NONE) {
fevt = tmp;
break;
}
/* Event for before this frame */
if (tmp->running_time <= running_time) {
fevt = tmp;
break;
}
}
if (fevt) {
priv->force_key_unit = g_list_remove (priv->force_key_unit, fevt);
}
GST_OBJECT_UNLOCK (encoder);
if (fevt) {
stream_time =
gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME,
frame->pts);
ev = gst_video_event_new_downstream_force_key_unit
(frame->pts, stream_time, running_time, fevt->all_headers, fevt->count);
gst_video_encoder_push_event (encoder, ev);
if (fevt->all_headers)
*send_headers = TRUE;
GST_DEBUG_OBJECT (encoder,
"Forced key unit: running-time %" GST_TIME_FORMAT
", all_headers %d, count %u",
GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
forced_key_unit_event_free (fevt);
}
}
/**
* gst_video_encoder_finish_frame:
* @encoder: a #GstVideoEncoder
@ -2111,16 +2331,20 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
GstVideoEncoderPrivate *priv = encoder->priv;
GstFlowReturn ret = GST_FLOW_OK;
GstVideoEncoderClass *encoder_class;
GList *l;
gboolean send_headers = FALSE;
gboolean discont = (frame->presentation_frame_number == 0);
gboolean discont = FALSE;
GstBuffer *buffer;
gboolean needs_reconfigure = FALSE;
g_return_val_if_fail (frame, GST_FLOW_ERROR);
discont = (frame->presentation_frame_number == 0
&& frame->abidata.ABI.num_subframes == 0);
encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
GST_LOG_OBJECT (encoder,
"finish frame fpn %d", frame->presentation_frame_number);
"finish frame fpn %d sync point: %d", frame->presentation_frame_number,
GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
@ -2128,40 +2352,12 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
if (G_UNLIKELY (priv->output_state_changed || (priv->output_state
&& needs_reconfigure))) {
if (!gst_video_encoder_negotiate_unlocked (encoder)) {
gst_pad_mark_reconfigure (encoder->srcpad);
if (GST_PAD_IS_FLUSHING (encoder->srcpad))
ret = GST_FLOW_FLUSHING;
else
ret = GST_FLOW_NOT_NEGOTIATED;
goto done;
}
}
ret = gst_video_encoder_can_push_unlocked (encoder);
if (ret != GST_FLOW_OK)
goto done;
if (G_UNLIKELY (priv->output_state == NULL))
goto no_output_state;
/* Push all pending events that arrived before this frame */
for (l = priv->frames; l; l = l->next) {
GstVideoCodecFrame *tmp = l->data;
if (tmp->events) {
GList *k;
for (k = g_list_last (tmp->events); k; k = k->prev)
gst_video_encoder_push_event (encoder, k->data);
g_list_free (tmp->events);
tmp->events = NULL;
}
if (tmp == frame)
break;
}
gst_video_encoder_check_and_push_tags (encoder);
if (frame->abidata.ABI.num_subframes == 0)
gst_video_encoder_push_pending_unlocked (encoder, frame);
/* no buffer data means this frame is skipped/dropped */
if (!frame->output_buffer) {
@ -2171,71 +2367,12 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
priv->processed++;
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit) {
GstClockTime stream_time, running_time;
GstEvent *ev;
ForcedKeyUnitEvent *fevt = NULL;
GList *l;
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit)
gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
running_time =
gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
frame->pts);
GST_OBJECT_LOCK (encoder);
for (l = priv->force_key_unit; l; l = l->next) {
ForcedKeyUnitEvent *tmp = l->data;
/* Skip non-pending keyunits */
if (!tmp->pending)
continue;
/* Exact match using the frame id */
if (frame->system_frame_number == tmp->frame_id) {
fevt = tmp;
break;
}
/* Simple case, keyunit ASAP */
if (tmp->running_time == GST_CLOCK_TIME_NONE) {
fevt = tmp;
break;
}
/* Event for before this frame */
if (tmp->running_time <= running_time) {
fevt = tmp;
break;
}
}
if (fevt) {
priv->force_key_unit = g_list_remove (priv->force_key_unit, fevt);
}
GST_OBJECT_UNLOCK (encoder);
if (fevt) {
stream_time =
gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME,
frame->pts);
ev = gst_video_event_new_downstream_force_key_unit
(frame->pts, stream_time, running_time,
fevt->all_headers, fevt->count);
gst_video_encoder_push_event (encoder, ev);
if (fevt->all_headers)
send_headers = TRUE;
GST_DEBUG_OBJECT (encoder,
"Forced key unit: running-time %" GST_TIME_FORMAT
", all_headers %d, count %u",
GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
forced_key_unit_event_free (fevt);
}
}
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
&& frame->abidata.ABI.num_subframes == 0) {
priv->distance_from_sync = 0;
GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
/* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
@ -2246,40 +2383,7 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
}
/* DTS is expected monotone ascending,
* so a good guess is the lowest unsent PTS (all being OK) */
{
GstClockTime min_ts = GST_CLOCK_TIME_NONE;
GstVideoCodecFrame *oframe = NULL;
gboolean seen_none = FALSE;
/* some maintenance regardless */
for (l = priv->frames; l; l = l->next) {
GstVideoCodecFrame *tmp = l->data;
if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
seen_none = TRUE;
continue;
}
if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
min_ts = tmp->abidata.ABI.ts;
oframe = tmp;
}
}
/* save a ts if needed */
if (oframe && oframe != frame) {
oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
}
/* and set if needed */
if (!GST_CLOCK_TIME_IS_VALID (frame->dts) && !seen_none) {
frame->dts = min_ts;
GST_DEBUG_OBJECT (encoder,
"no valid DTS, using oldest PTS %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->pts));
}
}
gst_video_encoder_infer_dts_unlocked (encoder, frame);
frame->distance_from_sync = priv->distance_from_sync;
priv->distance_from_sync++;
@ -2288,6 +2392,12 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
GST_BUFFER_DTS (frame->output_buffer) = frame->dts;
GST_BUFFER_DURATION (frame->output_buffer) = frame->duration;
/* At this stage we have a full frame in subframe use case ,
* let's mark it to enabled some latency optimization
* in some uses cases like RTP. */
GST_BUFFER_FLAG_SET (frame->output_buffer, GST_VIDEO_BUFFER_FLAG_MARKER);
GST_OBJECT_LOCK (encoder);
/* update rate estimate */
priv->bytes += gst_buffer_get_size (frame->output_buffer);
@ -2299,38 +2409,10 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
}
GST_OBJECT_UNLOCK (encoder);
if (G_UNLIKELY (send_headers || priv->new_headers)) {
GList *tmp, *copy = NULL;
if (G_UNLIKELY (send_headers))
priv->new_headers = TRUE;
GST_DEBUG_OBJECT (encoder, "Sending headers");
/* First make all buffers metadata-writable */
for (tmp = priv->headers; tmp; tmp = tmp->next) {
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
copy = g_list_append (copy, gst_buffer_make_writable (tmpbuf));
}
g_list_free (priv->headers);
priv->headers = copy;
for (tmp = priv->headers; tmp; tmp = tmp->next) {
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
GST_OBJECT_LOCK (encoder);
priv->bytes += gst_buffer_get_size (tmpbuf);
GST_OBJECT_UNLOCK (encoder);
if (G_UNLIKELY (discont)) {
GST_LOG_OBJECT (encoder, "marking discont");
GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
discont = FALSE;
}
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
gst_pad_push (encoder->srcpad, gst_buffer_ref (tmpbuf));
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
}
priv->new_headers = FALSE;
}
gst_video_encoder_send_header_unlocked (encoder, &discont);
if (G_UNLIKELY (discont)) {
GST_LOG_OBJECT (encoder, "marking discont");
@ -2340,18 +2422,7 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
if (encoder_class->pre_push)
ret = encoder_class->pre_push (encoder, frame);
if (encoder_class->transform_meta) {
if (G_LIKELY (frame->input_buffer)) {
CopyMetaData data;
data.encoder = encoder;
data.frame = frame;
gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data);
} else {
GST_WARNING_OBJECT (encoder,
"Can't copy metadata because input frame disappeared");
}
}
gst_video_encoder_transform_meta_unlocked (encoder, frame);
/* Get an additional ref to the buffer, which is going to be pushed
* downstream, the original ref is owned by the frame */
@ -2379,15 +2450,121 @@ done:
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
return ret;
}
/* ERRORS */
no_output_state:
{
gst_video_encoder_release_frame (encoder, frame);
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
GST_ERROR_OBJECT (encoder, "Output state was not configured");
return GST_FLOW_ERROR;
/**
* gst_video_encoder_finish_subframe:
* @encoder: a #GstVideoEncoder
* @frame: (transfer none): a #GstVideoCodecFrame being encoded
*
* If multiple subframes are produced for one input frame then use this method
* for each subframe, except for the last one. Before calling this function,
* you need to fill frame->output_buffer with the encoded buffer to push.
* You must call #gst_video_encoder_finish_frame() for the last sub-frame
* to tell the encoder that the frame has been fully encoded.
*
* This function will change the metadata of @frame and frame->output_buffer
* will be pushed downstream.
*
* Returns: a #GstFlowReturn resulting from pushing the buffer downstream.
*
* Since: 1.18
*/
GstFlowReturn
gst_video_encoder_finish_subframe (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
GstVideoEncoderPrivate *priv = encoder->priv;
GstVideoEncoderClass *encoder_class;
GstFlowReturn ret = GST_FLOW_OK;
GstBuffer *subframe_buffer = NULL;
gboolean discont = FALSE;
gboolean send_headers = FALSE;
g_return_val_if_fail (frame, GST_FLOW_ERROR);
g_return_val_if_fail (frame->output_buffer, GST_FLOW_ERROR);
subframe_buffer = frame->output_buffer;
discont = (frame->presentation_frame_number == 0
&& frame->abidata.ABI.num_subframes == 0);
encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
GST_LOG_OBJECT (encoder,
"finish subframe %u of frame fpn %u PTS %" GST_TIME_FORMAT ", DTS %"
GST_TIME_FORMAT " sync point: %d", frame->abidata.ABI.num_subframes,
frame->presentation_frame_number, GST_TIME_ARGS (frame->pts),
GST_TIME_ARGS (frame->dts), GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
ret = gst_video_encoder_can_push_unlocked (encoder);
if (ret != GST_FLOW_OK)
goto done;
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit)
gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
/* Push pending events only for the first subframe ie segment event.
* Push new incoming events on finish_frame otherwise.
*/
if (frame->abidata.ABI.num_subframes == 0)
gst_video_encoder_push_pending_unlocked (encoder, frame);
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
&& frame->abidata.ABI.num_subframes == 0) {
GST_BUFFER_FLAG_UNSET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
/* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
frame->dts = frame->pts;
}
} else {
GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
}
gst_video_encoder_infer_dts_unlocked (encoder, frame);
GST_BUFFER_PTS (subframe_buffer) = frame->pts;
GST_BUFFER_DTS (subframe_buffer) = frame->dts;
GST_BUFFER_DURATION (subframe_buffer) = frame->duration;
GST_OBJECT_LOCK (encoder);
/* update rate estimate */
priv->bytes += gst_buffer_get_size (subframe_buffer);
GST_OBJECT_UNLOCK (encoder);
if (G_UNLIKELY (send_headers))
priv->new_headers = TRUE;
gst_video_encoder_send_header_unlocked (encoder, &discont);
if (G_UNLIKELY (discont)) {
GST_LOG_OBJECT (encoder, "marking discont buffer: %" GST_PTR_FORMAT,
subframe_buffer);
GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DISCONT);
}
if (encoder_class->pre_push) {
ret = encoder_class->pre_push (encoder, frame);
}
gst_video_encoder_transform_meta_unlocked (encoder, frame);
if (ret == GST_FLOW_OK) {
ret = gst_pad_push (encoder->srcpad, subframe_buffer);
subframe_buffer = NULL;
}
done:
frame->abidata.ABI.num_subframes++;
if (subframe_buffer)
gst_buffer_unref (subframe_buffer);
frame->output_buffer = NULL;
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
return ret;
}
/**

View file

@ -336,6 +336,10 @@ GST_VIDEO_API
GstFlowReturn gst_video_encoder_finish_frame (GstVideoEncoder *encoder,
GstVideoCodecFrame *frame);
GST_VIDEO_API
GstFlowReturn gst_video_encoder_finish_subframe (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame);
GST_VIDEO_API
GstCaps * gst_video_encoder_proxy_getcaps (GstVideoEncoder * enc,
GstCaps * caps,

View file

@ -261,6 +261,7 @@ struct _GstVideoCodecFrame
struct {
GstClockTime ts;
GstClockTime ts2;
guint num_subframes;
} ABI;
gpointer padding[GST_PADDING_LARGE];
} abidata;

View file

@ -176,6 +176,8 @@ gboolean gst_video_frame_copy_plane (GstVideoFrame *dest, const GstVideoFr
* @GST_VIDEO_BUFFER_FLAG_BOTTOM_FIELD: The video frame has the bottom field only. This is
* the same as GST_VIDEO_BUFFER_FLAG_ONEFIELD
* (GST_VIDEO_BUFFER_FLAG_TFF flag unset) (Since: 1.16).
* @GST_VIDEO_BUFFER_FLAG_MARKER: The #GstBuffer contains the end of a video field or frame
* boundary such as the last subframe or packet (Since: 1.18).
* @GST_VIDEO_BUFFER_FLAG_LAST: Offset to define more flags
*
* Additional video buffer flags. These flags can potentially be used on any
@ -197,6 +199,8 @@ typedef enum {
GST_VIDEO_BUFFER_FLAG_ONEFIELD,
GST_VIDEO_BUFFER_FLAG_BOTTOM_FIELD = GST_VIDEO_BUFFER_FLAG_ONEFIELD,
GST_VIDEO_BUFFER_FLAG_MARKER = GST_BUFFER_FLAG_MARKER,
GST_VIDEO_BUFFER_FLAG_LAST = (GST_BUFFER_FLAG_LAST << 8)
} GstVideoBufferFlags;

View file

@ -38,6 +38,7 @@ static GList *events = NULL;
#define TEST_VIDEO_FPS_D 1
#define GST_VIDEO_ENCODER_TESTER_TYPE gst_video_encoder_tester_get_type()
#define GST_VIDEO_ENCODER_TESTER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_VIDEO_ENCODER_TESTER_TYPE, GstVideoEncoderTester))
static GType gst_video_encoder_tester_get_type (void);
typedef struct _GstVideoEncoderTester GstVideoEncoderTester;
@ -48,10 +49,18 @@ struct _GstVideoEncoderTester
GstVideoEncoder parent;
GstFlowReturn pre_push_result;
gint num_subframes;
gint current_subframe;
gboolean send_headers;
gboolean key_frame_sent;
gboolean enable_step_by_step;
GstVideoCodecFrame *last_frame;
};
struct _GstVideoEncoderTesterClass
{
GstFlowReturn (*step_by_step) (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame, int steps);
GstVideoEncoderClass parent_class;
};
@ -84,33 +93,85 @@ gst_video_encoder_tester_set_format (GstVideoEncoder * enc,
}
static GstFlowReturn
gst_video_encoder_tester_handle_frame (GstVideoEncoder * enc,
GstVideoCodecFrame * frame)
gst_video_encoder_push_subframe (GstVideoEncoder * enc,
GstVideoCodecFrame * frame, int current_subframe)
{
guint8 *data;
GstMapInfo map;
guint64 input_num;
GstClockTimeDiff deadline;
GstVideoEncoderTester *enc_tester = GST_VIDEO_ENCODER_TESTER (enc);
deadline = gst_video_encoder_get_max_encode_time (enc, frame);
if (deadline < 0) {
/* Calling finish_frame() with frame->output_buffer == NULL means to drop it */
goto out;
if (enc_tester->send_headers) {
GstBuffer *hdr;
GList *headers = NULL;
hdr = gst_buffer_new_and_alloc (0);
GST_BUFFER_FLAG_SET (hdr, GST_BUFFER_FLAG_HEADER);
headers = g_list_append (headers, hdr);
gst_video_encoder_set_headers (enc, headers);
enc_tester->send_headers = FALSE;
}
gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ);
input_num = *((guint64 *) map.data);
gst_buffer_unmap (frame->input_buffer, &map);
if (!enc_tester->key_frame_sent) {
GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
enc_tester->key_frame_sent = TRUE;
}
data = g_malloc (sizeof (guint64));
*(guint64 *) data = input_num;
frame->output_buffer = gst_buffer_new_wrapped (data, sizeof (guint64));
frame->pts = GST_BUFFER_PTS (frame->input_buffer);
frame->duration = GST_BUFFER_DURATION (frame->input_buffer);
out:
return gst_video_encoder_finish_frame (enc, frame);
if (current_subframe < enc_tester->num_subframes - 1)
return gst_video_encoder_finish_subframe (enc, frame);
else
return gst_video_encoder_finish_frame (enc, frame);
}
static GstFlowReturn
gst_video_encoder_tester_output_step_by_step (GstVideoEncoder * enc,
GstVideoCodecFrame * frame, gint steps)
{
GstVideoEncoderTester *enc_tester = GST_VIDEO_ENCODER_TESTER (enc);
GstFlowReturn ret = GST_FLOW_OK;
int i;
for (i = enc_tester->current_subframe;
i < MIN (steps + enc_tester->current_subframe, enc_tester->num_subframes);
i++) {
ret = gst_video_encoder_push_subframe (enc, frame, i);
}
enc_tester->current_subframe = i;
if (enc_tester->current_subframe >= enc_tester->num_subframes) {
enc_tester->current_subframe = 0;
gst_video_codec_frame_unref (enc_tester->last_frame);
}
return ret;
}
static GstFlowReturn
gst_video_encoder_tester_handle_frame (GstVideoEncoder * enc,
GstVideoCodecFrame * frame)
{
GstClockTimeDiff deadline;
GstVideoEncoderTester *enc_tester = GST_VIDEO_ENCODER_TESTER (enc);
deadline = gst_video_encoder_get_max_encode_time (enc, frame);
if (deadline < 0) {
/* Calling finish_frame() with frame->output_buffer == NULL means to drop it */
return gst_video_encoder_finish_frame (enc, frame);
}
enc_tester->last_frame = gst_video_codec_frame_ref (frame);
if (enc_tester->enable_step_by_step)
return GST_FLOW_OK;
return gst_video_encoder_tester_output_step_by_step (enc, frame,
enc_tester->num_subframes);
}
static GstFlowReturn
@ -118,7 +179,6 @@ gst_video_encoder_tester_pre_push (GstVideoEncoder * enc,
GstVideoCodecFrame * frame)
{
GstVideoEncoderTester *tester = (GstVideoEncoderTester *) enc;
return tester->pre_push_result;
}
@ -147,12 +207,15 @@ gst_video_encoder_tester_class_init (GstVideoEncoderTesterClass * klass)
videoencoder_class->handle_frame = gst_video_encoder_tester_handle_frame;
videoencoder_class->pre_push = gst_video_encoder_tester_pre_push;
videoencoder_class->set_format = gst_video_encoder_tester_set_format;
}
static void
gst_video_encoder_tester_init (GstVideoEncoderTester * tester)
{
tester->pre_push_result = GST_FLOW_OK;
/* One subframe is considered as a whole single frame. */
tester->num_subframes = 1;
}
static gboolean
@ -183,6 +246,16 @@ setup_videoencodertester (void)
gst_pad_set_event_function (mysinkpad, _mysinkpad_event);
}
static void
setup_videoencodertester_with_subframes (int num_subframes)
{
GstVideoEncoderTester *enc_tester;
setup_videoencodertester ();
enc_tester = GST_VIDEO_ENCODER_TESTER (enc);
enc_tester->num_subframes = num_subframes;
enc_tester->send_headers = TRUE;
}
static void
cleanup_videoencodertest (void)
{
@ -526,6 +599,7 @@ GST_START_TEST (videoencoder_pre_push_fails)
{
GstVideoEncoderTester *tester;
GstHarness *h;
GstFlowReturn ret;
tester = g_object_new (GST_VIDEO_ENCODER_TESTER_TYPE, NULL);
tester->pre_push_result = GST_FLOW_ERROR;
@ -533,8 +607,8 @@ GST_START_TEST (videoencoder_pre_push_fails)
h = gst_harness_new_with_element (GST_ELEMENT (tester), "sink", "src");
gst_harness_set_src_caps (h, create_test_caps ());
fail_unless_equals_int (gst_harness_push (h, create_test_buffer (0)),
GST_FLOW_ERROR);
ret = gst_harness_push (h, create_test_buffer (0));
fail_unless_equals_int (ret, GST_FLOW_ERROR);
gst_harness_teardown (h);
gst_object_unref (tester);
@ -602,6 +676,242 @@ GST_START_TEST (videoencoder_qos)
GST_END_TEST;
#define NUM_BUFFERS 100
GST_START_TEST (videoencoder_playback_subframes)
{
GstSegment segment;
GstBuffer *buffer;
guint64 i;
GList *iter;
int subframes = 4;
setup_videoencodertester_with_subframes (subframes);
gst_pad_set_active (mysrcpad, TRUE);
gst_element_set_state (enc, GST_STATE_PLAYING);
gst_pad_set_active (mysinkpad, TRUE);
send_startup_events ();
/* push a new segment */
gst_segment_init (&segment, GST_FORMAT_TIME);
fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment)));
/* push buffers, the data is actually a number so we can track them */
for (i = 0; i < NUM_BUFFERS; i++) {
buffer = create_test_buffer (i);
fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
}
fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ()));
/* check that all buffers (plus one header buffer) were received by our source pad */
fail_unless (g_list_length (buffers) == NUM_BUFFERS * subframes + 1);
/* check that first buffer is an header */
buffer = buffers->data;
fail_unless (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER));
/* check the other buffers */
i = 0;
for (iter = g_list_next (buffers); iter; iter = g_list_next (iter)) {
/* first buffer should be the header */
GstMapInfo map;
guint64 num;
buffer = iter->data;
fail_unless (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER));
gst_buffer_map (buffer, &map, GST_MAP_READ);
num = *(guint64 *) map.data;
fail_unless (i / subframes == num);
if (i % subframes)
fail_unless (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT));
fail_unless (GST_BUFFER_PTS (buffer) ==
gst_util_uint64_scale_round (i / subframes,
GST_SECOND * TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N));
fail_unless (GST_BUFFER_DURATION (buffer) ==
gst_util_uint64_scale_round (GST_SECOND, TEST_VIDEO_FPS_D,
TEST_VIDEO_FPS_N));
gst_buffer_unmap (buffer, &map);
i++;
}
g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref);
buffers = NULL;
cleanup_videoencodertest ();
}
GST_END_TEST;
GST_START_TEST (videoencoder_playback_events_subframes)
{
GstSegment segment;
GstBuffer *buffer;
GList *iter;
gint subframes = 4;
gint i, header_found;
GstVideoEncoderTester *enc_tester;
setup_videoencodertester_with_subframes (subframes);
enc_tester = GST_VIDEO_ENCODER_TESTER (enc);
enc_tester->send_headers = TRUE;
enc_tester->enable_step_by_step = TRUE;
gst_pad_set_active (mysrcpad, TRUE);
gst_element_set_state (enc, GST_STATE_PLAYING);
gst_pad_set_active (mysinkpad, TRUE);
send_startup_events ();
/* push a new segment -> no new buffer and no new events (still pending two custom events) */
gst_segment_init (&segment, GST_FORMAT_TIME);
fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment)));
fail_unless (g_list_length (buffers) == 0 && g_list_length (events) == 0);
/* push a first buffer -> no new buffer and no new events (still pending two custom events) */
buffer = create_test_buffer (0);
fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
fail_unless (g_list_length (buffers) == 0 && g_list_length (events) == 0);
/* ouput only one subframe -> 2 buffers(header + subframe) and 3 events (stream-start, caps, segment) */
gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
enc_tester->last_frame, 1);
fail_unless (g_list_length (buffers) == 2 && g_list_length (events) == 3);
fail_unless (GST_BUFFER_FLAG_IS_SET ((GstBuffer *) buffers->data,
GST_BUFFER_FLAG_HEADER));
fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
0)->data)) == GST_EVENT_STREAM_START);
fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
1)->data)) == GST_EVENT_CAPS);
fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
2)->data)) == GST_EVENT_SEGMENT);
/* output 3 last subframes -> 2 more buffers and no new events */
gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
enc_tester->last_frame, 3);
fail_unless (g_list_length (buffers) == 5 && g_list_length (events) == 3);
/* push a new buffer -> no new buffer and no new events */
buffer = create_test_buffer (1);
fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
fail_unless (g_list_length (buffers) == 5 && g_list_length (events) == 3);
/* push an event in between -> no new buffer and no new event */
fail_unless (gst_pad_push_event (mysrcpad,
gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
gst_structure_new_empty ("custom1"))));
fail_unless (g_list_length (buffers) == 5 && g_list_length (events) == 3);
/* output 1 subframe -> one new buffer and no new events */
gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
enc_tester->last_frame, 1);
fail_unless (g_list_length (buffers) == 6 && g_list_length (events) == 3);
/* push another custom event in between , no new event should appear until the next frame is handled */
fail_unless (gst_pad_push_event (mysrcpad,
gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
gst_structure_new_empty ("custom2"))));
fail_unless (g_list_length (buffers) == 6 && g_list_length (events) == 3);
/* output 2 subframes -> 2 new buffers and no new events */
gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
enc_tester->last_frame, 2);
fail_unless (g_list_length (buffers) == 8 && g_list_length (events) == 3);
/* output 1 last subframe -> 1 new buffers and no new events */
gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
enc_tester->last_frame, 1);
fail_unless (g_list_length (buffers) == 9 && g_list_length (events) == 3);
/* push a third buffer -> no new buffer and no new events (still pending two custom events) */
buffer = create_test_buffer (2);
fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
fail_unless (g_list_length (buffers) == 9 && g_list_length (events) == 3);
/* output 1 subframes -> 1 new buffer and 2 custom events from the last input frame */
gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
enc_tester->last_frame, 1);
fail_unless (g_list_length (buffers) == 10 && g_list_length (events) == 5);
fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
3)->data)) == GST_EVENT_CUSTOM_DOWNSTREAM);
fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
4)->data)) == GST_EVENT_CUSTOM_DOWNSTREAM);
/* push another custom event in between , no new event should appear until eos */
fail_unless (gst_pad_push_event (mysrcpad,
gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
gst_structure_new_empty ("custom3"))));
fail_unless (g_list_length (buffers) == 10 && g_list_length (events) == 5);
/* output 3 subframes -> 3 new buffer and no new events */
gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
enc_tester->last_frame, 3);
fail_unless (g_list_length (buffers) == 13 && g_list_length (events) == 5);
/* push a force key-unit event */
enc_tester->key_frame_sent = FALSE;
fail_unless (gst_pad_push_event (mysrcpad,
gst_video_event_new_downstream_force_key_unit (GST_CLOCK_TIME_NONE,
GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, TRUE, 1)));
/* Create a new buffer which should be a key unit -> no new buffer and no new event */
buffer = create_test_buffer (3);
fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
fail_unless (g_list_length (buffers) == 13 && g_list_length (events) == 5);
/* output 2 subframes -> 3 new buffer(one header and two subframes and two events key-unit and custom3 */
gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
enc_tester->last_frame, 2);
fail_unless (g_list_length (buffers) == 16 && g_list_length (events) == 7);
/* output 2 subframes -> 2 new buffer correspong the two last subframes */
gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
enc_tester->last_frame, 2);
fail_unless (g_list_length (buffers) == 18 && g_list_length (events) == 7);
/* push eos event -> 1 new event ( eos) */
fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ()));
fail_unless (g_list_length (buffers) == 18 && g_list_length (events) == 8);
/* check the order of the last events received */
fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
6)->data)) == GST_EVENT_CUSTOM_DOWNSTREAM);
fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
7)->data)) == GST_EVENT_EOS);
/* check that only last subframe owns the GST_VIDEO_BUFFER_FLAG_MARKER flag */
i = 0;
header_found = 0;
for (iter = g_list_next (buffers); iter; iter = g_list_next (iter)) {
buffer = (GstBuffer *) (iter->data);
if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER)) {
if ((i - header_found) % subframes == (subframes - 1))
fail_unless (GST_BUFFER_FLAG_IS_SET (buffer,
GST_VIDEO_BUFFER_FLAG_MARKER));
else
fail_unless (!GST_BUFFER_FLAG_IS_SET (buffer,
GST_VIDEO_BUFFER_FLAG_MARKER));
} else {
fail_unless (!GST_BUFFER_FLAG_IS_SET (buffer,
GST_VIDEO_BUFFER_FLAG_MARKER));
header_found++;
}
i++;
}
g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref);
buffers = NULL;
cleanup_videoencodertest ();
}
GST_END_TEST;
static Suite *
gst_videoencoder_suite (void)
{
@ -616,6 +926,8 @@ gst_videoencoder_suite (void)
tcase_add_test (tc, videoencoder_flush_events);
tcase_add_test (tc, videoencoder_pre_push_fails);
tcase_add_test (tc, videoencoder_qos);
tcase_add_test (tc, videoencoder_playback_subframes);
tcase_add_test (tc, videoencoder_playback_events_subframes);
return s;
}