mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-12-24 09:10:36 +00:00
androidmedia/enc: handle codec-data before popping GstVideoCodecFrames
Issue is that when amc was producing a codec-data buffer, a GstVideoCodecFrame was being popped off the internal queue. This meant that the codec-data was being associated with the first input frame and the second (first encoded buffer) output buffer with the second input frame. At the end (assuming one input produces one output which seems to hold in my testing and how the encoder is currently implemented) there would be an input frame missing and would be pushed without any timing information. This would lead to e.g. muxers rejecting the buffer without PTS and failing to mux. Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/5330>
This commit is contained in:
parent
bec57df2f8
commit
86f00367fc
1 changed files with 69 additions and 53 deletions
|
@ -885,6 +885,16 @@ _find_nearest_frame (GstAmcVideoEnc * self, GstClockTime reference_timestamp)
|
|||
if (best)
|
||||
gst_video_codec_frame_ref (best);
|
||||
|
||||
GST_DEBUG_OBJECT (self, "found best %p from %u frames", best,
|
||||
g_list_length (frames));
|
||||
if (best) {
|
||||
GST_LOG_OBJECT (self, "best %p (input pts %" GST_TIME_FORMAT " dts %"
|
||||
GST_TIME_FORMAT " frame no %" G_GUINT32_FORMAT " buffer %"
|
||||
GST_PTR_FORMAT, best, GST_TIME_ARGS (best->pts),
|
||||
GST_TIME_ARGS (best->dts), best->system_frame_number,
|
||||
best->input_buffer);
|
||||
}
|
||||
|
||||
g_list_foreach (frames, (GFunc) gst_video_codec_frame_unref, NULL);
|
||||
g_list_free (frames);
|
||||
|
||||
|
@ -962,52 +972,6 @@ gst_amc_video_enc_handle_output_frame (GstAmcVideoEnc * self,
|
|||
GstFlowReturn flow_ret = GST_FLOW_OK;
|
||||
GstVideoEncoder *encoder = GST_VIDEO_ENCODER_CAST (self);
|
||||
|
||||
/* The BUFFER_FLAG_CODEC_CONFIG logic is borrowed from
|
||||
* gst-omx. see *_handle_output_frame in
|
||||
* gstomxvideoenc.c and gstomxh264enc.c */
|
||||
if ((buffer_info->flags & BUFFER_FLAG_CODEC_CONFIG)
|
||||
&& buffer_info->size > 0) {
|
||||
|
||||
if (self->codec_data_in_bytestream) {
|
||||
if (buffer_info->size > 4 &&
|
||||
GST_READ_UINT32_BE (buf->data + buffer_info->offset) == 0x00000001) {
|
||||
GList *l = NULL;
|
||||
GstBuffer *hdrs;
|
||||
|
||||
GST_DEBUG_OBJECT (self, "got codecconfig in byte-stream format");
|
||||
|
||||
hdrs = gst_buffer_new_and_alloc (buffer_info->size);
|
||||
gst_buffer_fill (hdrs, 0, buf->data + buffer_info->offset,
|
||||
buffer_info->size);
|
||||
GST_BUFFER_PTS (hdrs) =
|
||||
gst_util_uint64_scale (buffer_info->presentation_time_us,
|
||||
GST_USECOND, 1);
|
||||
|
||||
l = g_list_append (l, hdrs);
|
||||
gst_video_encoder_set_headers (encoder, l);
|
||||
}
|
||||
} else {
|
||||
GstBuffer *codec_data;
|
||||
GstVideoCodecState *output_state =
|
||||
gst_video_encoder_get_output_state (GST_VIDEO_ENCODER (self));
|
||||
|
||||
GST_DEBUG_OBJECT (self, "Handling codec data");
|
||||
|
||||
codec_data = gst_buffer_new_and_alloc (buffer_info->size);
|
||||
gst_buffer_fill (codec_data, 0, buf->data + buffer_info->offset,
|
||||
buffer_info->size);
|
||||
output_state->codec_data = codec_data;
|
||||
gst_video_codec_state_unref (output_state);
|
||||
|
||||
if (!gst_video_encoder_negotiate (encoder)) {
|
||||
gst_video_codec_frame_unref (frame);
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
}
|
||||
|
||||
if (buffer_info->size > 0) {
|
||||
GstBuffer *out_buf;
|
||||
GstPad *srcpad;
|
||||
|
@ -1035,7 +999,10 @@ gst_amc_video_enc_handle_output_frame (GstAmcVideoEnc * self,
|
|||
* caps and filling it
|
||||
*/
|
||||
|
||||
GST_ERROR_OBJECT (self, "No corresponding frame found");
|
||||
GST_ERROR_OBJECT (self, "No corresponding frame found: buffer pts: %"
|
||||
GST_TIME_FORMAT " presentation_time_us %" G_GUINT64_FORMAT,
|
||||
GST_TIME_ARGS (GST_BUFFER_PTS (out_buf)),
|
||||
(guint64) buffer_info->presentation_time_us);
|
||||
flow_ret = gst_pad_push (srcpad, out_buf);
|
||||
}
|
||||
} else if (frame) {
|
||||
|
@ -1048,9 +1015,10 @@ gst_amc_video_enc_handle_output_frame (GstAmcVideoEnc * self,
|
|||
static void
|
||||
gst_amc_video_enc_loop (GstAmcVideoEnc * self)
|
||||
{
|
||||
GstVideoEncoder *encoder = GST_VIDEO_ENCODER_CAST (self);
|
||||
GstVideoCodecFrame *frame;
|
||||
GstFlowReturn flow_ret = GST_FLOW_OK;
|
||||
gboolean is_eos;
|
||||
gboolean is_eos, is_codec_data;
|
||||
GstAmcBufferInfo buffer_info;
|
||||
GstAmcBuffer *buf;
|
||||
gint idx;
|
||||
|
@ -1160,14 +1128,62 @@ process_buffer:
|
|||
goto got_null_output_buffer;
|
||||
}
|
||||
|
||||
frame =
|
||||
_find_nearest_frame (self,
|
||||
gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND, 1));
|
||||
is_codec_data = FALSE;
|
||||
/* The BUFFER_FLAG_CODEC_CONFIG logic is borrowed from
|
||||
* gst-omx. see *_handle_output_frame in
|
||||
* gstomxvideoenc.c and gstomxh264enc.c */
|
||||
if ((buffer_info.flags & BUFFER_FLAG_CODEC_CONFIG)
|
||||
&& buffer_info.size > 0) {
|
||||
|
||||
if (self->codec_data_in_bytestream) {
|
||||
if (buffer_info.size > 4 &&
|
||||
GST_READ_UINT32_BE (buf->data + buffer_info.offset) == 0x00000001) {
|
||||
GList *l = NULL;
|
||||
GstBuffer *hdrs;
|
||||
|
||||
GST_DEBUG_OBJECT (self, "got codecconfig in byte-stream format");
|
||||
|
||||
hdrs = gst_buffer_new_and_alloc (buffer_info.size);
|
||||
gst_buffer_fill (hdrs, 0, buf->data + buffer_info.offset,
|
||||
buffer_info.size);
|
||||
GST_BUFFER_PTS (hdrs) =
|
||||
gst_util_uint64_scale (buffer_info.presentation_time_us,
|
||||
GST_USECOND, 1);
|
||||
|
||||
l = g_list_append (l, hdrs);
|
||||
gst_video_encoder_set_headers (encoder, l);
|
||||
is_codec_data = TRUE;
|
||||
}
|
||||
} else {
|
||||
GstBuffer *codec_data;
|
||||
GstVideoCodecState *output_state =
|
||||
gst_video_encoder_get_output_state (GST_VIDEO_ENCODER (self));
|
||||
|
||||
GST_DEBUG_OBJECT (self, "Handling codec data");
|
||||
|
||||
codec_data = gst_buffer_new_and_alloc (buffer_info.size);
|
||||
gst_buffer_fill (codec_data, 0, buf->data + buffer_info.offset,
|
||||
buffer_info.size);
|
||||
output_state->codec_data = codec_data;
|
||||
gst_video_codec_state_unref (output_state);
|
||||
is_codec_data = TRUE;
|
||||
|
||||
if (!gst_video_encoder_negotiate (encoder))
|
||||
flow_ret = GST_FLOW_NOT_NEGOTIATED;
|
||||
}
|
||||
}
|
||||
|
||||
is_eos = !!(buffer_info.flags & BUFFER_FLAG_END_OF_STREAM);
|
||||
|
||||
flow_ret =
|
||||
gst_amc_video_enc_handle_output_frame (self, buf, &buffer_info, frame);
|
||||
if (flow_ret == GST_FLOW_OK && !is_codec_data) {
|
||||
frame =
|
||||
_find_nearest_frame (self,
|
||||
gst_util_uint64_scale (buffer_info.presentation_time_us, GST_USECOND,
|
||||
1));
|
||||
|
||||
flow_ret =
|
||||
gst_amc_video_enc_handle_output_frame (self, buf, &buffer_info, frame);
|
||||
}
|
||||
|
||||
gst_amc_buffer_free (buf);
|
||||
buf = NULL;
|
||||
|
|
Loading…
Reference in a new issue