Implement signal loss and frame drop detection in the source

This commit is contained in:
Sebastian Dröge 2021-02-09 15:46:48 +02:00
parent 12380026e6
commit 7ce3f2f400
3 changed files with 79 additions and 13 deletions

View file

@ -1173,8 +1173,8 @@ static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink,
static void output_thread_func(AJAThread *thread, void *data) {
GstAjaSink *self = GST_AJA_SINK(data);
GstClock *clock = NULL;
guint64 frames_renderded_start = G_MAXUINT64;
GstClockTime frames_renderded_start_time = GST_CLOCK_TIME_NONE;
guint64 frames_rendered_start = G_MAXUINT64;
GstClockTime frames_rendered_start_time = GST_CLOCK_TIME_NONE;
guint64 frames_dropped_last = G_MAXUINT64;
AUTOCIRCULATE_TRANSFER transfer;
@ -1236,8 +1236,8 @@ restart:
gst_clear_object(&clock);
clock = gst_element_get_clock(GST_ELEMENT_CAST(self));
frames_renderded_start = G_MAXUINT64;
frames_renderded_start_time = GST_CLOCK_TIME_NONE;
frames_rendered_start = G_MAXUINT64;
frames_rendered_start_time = GST_CLOCK_TIME_NONE;
frames_dropped_last = G_MAXUINT64;
transfer.acANCBuffer.Allocate(2048);
@ -1410,14 +1410,14 @@ restart:
// a ringbuffer and calculate a linear regression over them
// FIXME: Add some compensation by dropping/duplicating frames as needed
// but make this configurable
if (frames_renderded_start_time == GST_CLOCK_TIME_NONE &&
if (frames_rendered_start_time == GST_CLOCK_TIME_NONE &&
transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime != 0 &&
transfer.acTransferStatus.acFramesProcessed +
transfer.acTransferStatus.acFramesDropped >
self->queue_size &&
clock) {
frames_renderded_start = transfer.acTransferStatus.acFramesProcessed +
transfer.acTransferStatus.acFramesDropped;
frames_rendered_start = transfer.acTransferStatus.acFramesProcessed +
transfer.acTransferStatus.acFramesDropped;
GstClockTime now_gst = gst_clock_get_time(clock);
GstClockTime now_sys = g_get_real_time() * 1000;
@ -1425,11 +1425,11 @@ restart:
transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime * 100;
if (render_time < now_sys) {
frames_renderded_start_time = now_gst - (now_sys - render_time);
frames_rendered_start_time = now_gst - (now_sys - render_time);
}
}
if (clock && frames_renderded_start_time != GST_CLOCK_TIME_NONE) {
if (clock && frames_rendered_start_time != GST_CLOCK_TIME_NONE) {
GstClockTime now_gst = gst_clock_get_time(clock);
GstClockTime now_sys = g_get_real_time() * 1000;
GstClockTime render_time =
@ -1442,12 +1442,12 @@ restart:
sys_diff = 0;
}
GstClockTime diff = now_gst - frames_renderded_start_time;
GstClockTime diff = now_gst - frames_rendered_start_time;
if (sys_diff < diff) diff -= sys_diff;
guint64 frames_rendered = (transfer.acTransferStatus.acFramesProcessed +
transfer.acTransferStatus.acFramesDropped) -
frames_renderded_start;
frames_rendered_start;
guint64 frames_produced =
gst_util_uint64_scale(diff, self->configured_info.fps_n,
self->configured_info.fps_d * GST_SECOND);

View file

@ -573,6 +573,8 @@ static gboolean gst_aja_src_start(GstAjaSrc *self) {
break;
}
self->configured_input_source = input_source;
// Need to remove old routes for the output and framebuffer we're going to
// use
NTV2ActualConnections connections = router.GetConnections();
@ -1088,6 +1090,8 @@ static void capture_thread_func(AJAThread *thread, void *data) {
GstAjaSrc *self = GST_AJA_SRC(data);
GstClock *clock = NULL;
AUTOCIRCULATE_TRANSFER transfer;
guint64 frames_dropped_last = G_MAXUINT64;
gboolean have_signal = TRUE;
if (self->capture_cpu_core != G_MAXUINT) {
cpu_set_t mask;
@ -1147,8 +1151,50 @@ restart:
gst_clear_object(&clock);
clock = gst_element_get_clock(GST_ELEMENT_CAST(self));
frames_dropped_last = G_MAXUINT64;
have_signal = TRUE;
g_mutex_lock(&self->queue_lock);
while (self->playing && !self->shutdown) {
// Check for valid signal first
NTV2VideoFormat current_video_format =
self->device->device->GetInputVideoFormat(
self->configured_input_source);
if (current_video_format == ::NTV2_FORMAT_UNKNOWN) {
GST_DEBUG_OBJECT(self, "No signal, waiting");
g_mutex_unlock(&self->queue_lock);
self->device->device->WaitForInputVerticalInterrupt(self->channel);
frames_dropped_last = G_MAXUINT64;
if (have_signal) {
GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, ("Signal lost"),
("No input source was detected"));
have_signal = FALSE;
}
g_mutex_lock(&self->queue_lock);
continue;
} else if (current_video_format != self->video_format) {
// TODO: Handle GST_AJA_VIDEO_FORMAT_AUTO here
GST_DEBUG_OBJECT(self,
"Different input format %u than configured %u, waiting",
current_video_format, self->video_format);
g_mutex_unlock(&self->queue_lock);
self->device->device->WaitForInputVerticalInterrupt(self->channel);
frames_dropped_last = G_MAXUINT64;
if (have_signal) {
GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, ("Signal lost"),
("Different input source was detected"));
have_signal = FALSE;
}
g_mutex_lock(&self->queue_lock);
continue;
}
if (!have_signal) {
GST_ELEMENT_INFO(GST_ELEMENT(self), RESOURCE, READ, ("Signal recovered"),
("Input source detected"));
have_signal = TRUE;
}
AUTOCIRCULATE_STATUS status;
self->device->device->AutoCirculateGetStatus(self->channel, status);
@ -1169,8 +1215,27 @@ restart:
status.acRDTSCCurrentTime, status.acFramesProcessed,
status.acFramesDropped, status.acBufferLevel);
// TODO: Drop detection
// TODO: Signal loss detection
if (frames_dropped_last == G_MAXUINT64) {
frames_dropped_last = status.acFramesDropped;
} else if (frames_dropped_last < status.acFramesDropped) {
GST_WARNING_OBJECT(self, "Dropped %" G_GUINT64_FORMAT " frames",
status.acFramesDropped - frames_dropped_last);
GstClockTime timestamp =
gst_util_uint64_scale(status.acFramesProcessed + frames_dropped_last,
self->configured_info.fps_n,
self->configured_info.fps_d * GST_SECOND);
GstClockTime timestamp_end = gst_util_uint64_scale(
status.acFramesProcessed + status.acFramesDropped,
self->configured_info.fps_n,
self->configured_info.fps_d * GST_SECOND);
GstMessage *msg = gst_message_new_qos(
GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE,
timestamp, timestamp_end - timestamp);
gst_element_post_message(GST_ELEMENT_CAST(self), msg);
frames_dropped_last = status.acFramesDropped;
}
if (status.IsRunning() && status.acBufferLevel > 1) {
GstBuffer *video_buffer = NULL;

View file

@ -73,6 +73,7 @@ struct _GstAjaSrc {
NTV2AudioSystem audio_system;
NTV2VideoFormat video_format;
NTV2InputSource configured_input_source;
guint32 f2_start_line;
NTV2TCIndex tc_index;