cccombiner: Replace caption frame counting with max_buffer_time

The counting is defective when we are combining with a stream that has a
higher max_cea608_count (such as 60p to 30i), as we produce less caption
frames than we consume, leading to periodic queue drops.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/7996>
This commit is contained in:
Jan Alexander Steffens (heftig) 2024-11-19 17:38:43 +01:00 committed by GStreamer Marge Bot
parent 2619a59f82
commit b29262e662
2 changed files with 20 additions and 39 deletions

View file

@ -104,6 +104,8 @@ gst_cccombiner_input_meta_processing_get_type (void)
return cccombiner_input_meta_processing_type;
}
#define FALLBACK_FRAME_DURATION (50 * GST_MSECOND)
typedef struct
{
GstVideoCaptionType caption_type;
@ -252,8 +254,7 @@ schedule_cdp (GstCCCombiner * self, const GstVideoTimeCode * tc,
guint cc_data_len;
cc_data_len = extract_cdp (self, data, len, cc_data);
if (cc_buffer_push_cc_data (self->cc_buffer, cc_data, cc_data_len))
self->current_scheduled++;
cc_buffer_push_cc_data (self->cc_buffer, cc_data, cc_data_len);
}
static void
@ -286,24 +287,21 @@ schedule_cea608_s334_1a (GstCCCombiner * self, guint8 * data, guint len,
}
}
if (cc_buffer_push_separated (self->cc_buffer, field0_data, field0_len,
field1_data, field1_len, NULL, 0))
self->current_scheduled++;
cc_buffer_push_separated (self->cc_buffer, field0_data, field0_len,
field1_data, field1_len, NULL, 0);
}
static void
schedule_cea708_raw (GstCCCombiner * self, guint8 * data, guint len,
GstClockTime pts, GstClockTime duration)
{
if (cc_buffer_push_cc_data (self->cc_buffer, data, len))
self->current_scheduled++;
cc_buffer_push_cc_data (self->cc_buffer, data, len);
}
static void
schedule_cea608_raw (GstCCCombiner * self, guint8 * data, guint len)
{
if (cc_buffer_push_separated (self->cc_buffer, data, len, NULL, 0, NULL, 0))
self->current_scheduled++;
cc_buffer_push_separated (self->cc_buffer, data, len, NULL, 0, NULL, 0);
}
static void
@ -319,24 +317,6 @@ schedule_caption (GstCCCombiner * self, GstAggregatorPad * caption_pad,
running_time =
gst_segment_to_running_time (&caption_pad->segment, GST_FORMAT_TIME, pts);
if (self->current_scheduled + 1 >= self->max_scheduled) {
GstClockTime stream_time;
GST_WARNING_OBJECT (self,
"scheduled queue runs too long, discarding stored");
stream_time =
gst_segment_to_stream_time (&caption_pad->segment, GST_FORMAT_TIME,
pts);
gst_element_post_message (GST_ELEMENT_CAST (self),
gst_message_new_qos (GST_OBJECT_CAST (self), FALSE,
running_time, stream_time, pts, duration));
cc_buffer_discard (self->cc_buffer);
self->current_scheduled = 0;
}
self->last_caption_ts = running_time;
gst_buffer_map (caption_buf, &map, GST_MAP_READ);
@ -694,9 +674,6 @@ gst_cc_combiner_collect_captions (GstCCCombiner * self, gboolean timeout)
if (self->current_frame_captions->len > 0) {
guint i;
if (self->schedule)
self->current_scheduled = MAX (1, self->current_scheduled) - 1;
video_buf = gst_buffer_make_writable (self->current_video_buffer);
self->current_video_buffer = NULL;
@ -772,10 +749,10 @@ gst_cc_combiner_aggregate (GstAggregator * aggregator, gboolean timeout)
if (gst_aggregator_pad_is_eos (video_pad)) {
GST_DEBUG_OBJECT (aggregator, "Video pad is EOS, we're done");
/* Assume that this buffer ends where it started +50ms (25fps) and handle it */
/* Assume that this buffer ends where it started +50ms (20fps) and handle it */
if (self->current_video_buffer) {
self->current_video_running_time_end =
self->current_video_running_time + 50 * GST_MSECOND;
self->current_video_running_time + FALLBACK_FRAME_DURATION;
flow_ret = gst_cc_combiner_collect_captions (self, timeout);
}
@ -913,6 +890,7 @@ gst_cc_combiner_sink_event (GstAggregator * aggregator,
} else {
gint fps_n, fps_d;
const gchar *interlace_mode;
GstClockTime frame_duration = GST_CLOCK_TIME_NONE;
fps_n = fps_d = 0;
@ -923,11 +901,14 @@ gst_cc_combiner_sink_event (GstAggregator * aggregator,
self->progressive = !interlace_mode
|| !g_strcmp0 (interlace_mode, "progressive");
if (fps_n != self->video_fps_n || fps_d != self->video_fps_d) {
GstClockTime latency;
if (fps_d > 0)
frame_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
if (!GST_CLOCK_TIME_IS_VALID (frame_duration) || frame_duration == 0)
frame_duration = FALLBACK_FRAME_DURATION;
latency = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
gst_aggregator_set_latency (aggregator, latency, latency);
if (fps_n != self->video_fps_n || fps_d != self->video_fps_d) {
gst_aggregator_set_latency (aggregator, frame_duration,
frame_duration);
}
self->video_fps_n = fps_n;
@ -944,6 +925,9 @@ gst_cc_combiner_sink_event (GstAggregator * aggregator,
self->cdp_fps_entry = cdp_fps_entry_from_fps (60, 1);
}
cc_buffer_set_max_buffer_time (self->cc_buffer,
frame_duration * self->max_scheduled);
gst_aggregator_set_src_caps (aggregator, caps);
}
@ -985,7 +969,6 @@ gst_cc_combiner_stop (GstAggregator * aggregator)
self->caption_type = GST_VIDEO_CAPTION_TYPE_UNKNOWN;
cc_buffer_discard (self->cc_buffer);
self->current_scheduled = 0;
self->cdp_fps_entry = &null_fps_entry;
return TRUE;
@ -1009,7 +992,6 @@ gst_cc_combiner_flush (GstAggregator * aggregator)
self->cdp_hdr_sequence_cntr = 0;
cc_buffer_discard (self->cc_buffer);
self->current_scheduled = 0;
return GST_FLOW_OK;
}

View file

@ -87,7 +87,6 @@ struct _GstCCCombiner
gboolean schedule;
guint max_scheduled;
gboolean output_padding;
guint current_scheduled;
GstClockTime last_caption_ts;
CCBuffer *cc_buffer;