Add properties for selecting capture/output frame buffers to use

And calculate them automatically be default based on the number of frame
buffers available and the number of channels.

This works around various bugs in the AJA SDK when selecting these
manually.

See AJA support ticket #5056.
This commit is contained in:
Sebastian Dröge 2021-08-27 15:02:50 +03:00
parent a2b30015c7
commit d61b415230
4 changed files with 120 additions and 13 deletions

View file

@ -39,6 +39,8 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_sink_debug);
#define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC)
#define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_AUTO)
#define DEFAULT_QUEUE_SIZE (16)
#define DEFAULT_START_FRAME (0)
#define DEFAULT_END_FRAME (0)
#define DEFAULT_OUTPUT_CPU_CORE (G_MAXUINT)
enum {
@ -51,6 +53,8 @@ enum {
PROP_TIMECODE_INDEX,
PROP_REFERENCE_SOURCE,
PROP_QUEUE_SIZE,
PROP_START_FRAME,
PROP_END_FRAME,
PROP_OUTPUT_CPU_CORE,
};
@ -135,6 +139,24 @@ static void gst_aja_sink_class_init(GstAjaSinkClass *klass) {
1, G_MAXINT, DEFAULT_QUEUE_SIZE,
(GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property(
gobject_class, PROP_START_FRAME,
g_param_spec_uint(
"start-frame", "Start Frame",
"Start frame buffer to be used for output (auto if same number as "
"end-frame).",
0, G_MAXINT, DEFAULT_START_FRAME,
(GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property(
gobject_class, PROP_END_FRAME,
g_param_spec_uint(
"end-frame", "End Frame",
"End frame buffer to be used for output (auto if same number as "
"start-frame).",
0, G_MAXINT, DEFAULT_END_FRAME,
(GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property(
gobject_class, PROP_AUDIO_SYSTEM,
g_param_spec_enum(
@ -217,14 +239,14 @@ static void gst_aja_sink_init(GstAjaSink *self) {
self->device_identifier = g_strdup(DEFAULT_DEVICE_IDENTIFIER);
self->channel = DEFAULT_CHANNEL;
self->queue_size = DEFAULT_QUEUE_SIZE;
self->start_frame = DEFAULT_START_FRAME;
self->end_frame = DEFAULT_END_FRAME;
self->audio_system_setting = DEFAULT_AUDIO_SYSTEM;
self->output_destination = DEFAULT_OUTPUT_DESTINATION;
self->timecode_index = DEFAULT_TIMECODE_INDEX;
self->reference_source = DEFAULT_REFERENCE_SOURCE;
self->output_cpu_core = DEFAULT_OUTPUT_CPU_CORE;
gst_base_sink_set_render_delay(GST_BASE_SINK(self),
(self->queue_size / 2) * GST_SECOND / 30);
self->queue =
gst_queue_array_new_for_struct(sizeof(QueueItem), self->queue_size);
}
@ -244,6 +266,12 @@ void gst_aja_sink_set_property(GObject *object, guint property_id,
case PROP_QUEUE_SIZE:
self->queue_size = g_value_get_uint(value);
break;
case PROP_START_FRAME:
self->start_frame = g_value_get_uint(value);
break;
case PROP_END_FRAME:
self->end_frame = g_value_get_uint(value);
break;
case PROP_AUDIO_SYSTEM:
self->audio_system_setting = (GstAjaAudioSystem)g_value_get_enum(value);
break;
@ -283,6 +311,12 @@ void gst_aja_sink_get_property(GObject *object, guint property_id,
case PROP_QUEUE_SIZE:
g_value_set_uint(value, self->queue_size);
break;
case PROP_START_FRAME:
g_value_set_uint(value, self->start_frame);
break;
case PROP_END_FRAME:
g_value_set_uint(value, self->end_frame);
break;
case PROP_AUDIO_SYSTEM:
g_value_set_enum(value, self->audio_system_setting);
break;
@ -566,13 +600,6 @@ static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) {
self->quad_mode = quad_mode;
self->video_format = video_format;
// Configure render delay based on the framerate and queue size
gst_base_sink_set_render_delay(
GST_BASE_SINK(self),
gst_util_uint64_scale(self->queue_size / 2,
self->configured_info.fps_d * GST_SECOND,
self->configured_info.fps_n));
g_assert(self->device != NULL);
// Make sure to globally lock here as the routing settings and others are
@ -1700,12 +1727,39 @@ restart:
self->device->device->EnableOutputInterrupt(self->channel);
self->device->device->SubscribeOutputVerticalEvent(self->channel);
guint16 start_frame = self->start_frame;
guint16 end_frame = self->end_frame;
if (start_frame == end_frame) {
guint16 num_frames = ::NTV2DeviceGetNumberFrameBuffers(self->device_id);
guint16 num_channels = ::NTV2DeviceGetNumFrameStores(self->device_id);
start_frame = self->channel * (num_frames / num_channels);
end_frame = ((self->channel + 1) * (num_frames / num_channels)) - 1;
// Don't configure too many frames here. It needs to be in relation to
// our input queue.
end_frame = MIN(start_frame + self->queue_size / 2, end_frame);
}
GST_DEBUG_OBJECT(
self, "Configuring channel %u with start frame %u and end frame %u",
self->channel, start_frame, end_frame);
// Configure render delay based on the framerate and queue size
gst_base_sink_set_render_delay(
GST_BASE_SINK(self),
gst_util_uint64_scale(end_frame - start_frame + 1,
self->configured_info.fps_d * GST_SECOND,
self->configured_info.fps_n));
if (!self->device->device->AutoCirculateInitForOutput(
self->channel, self->queue_size / 2, self->audio_system,
self->channel, 0, self->audio_system,
AUTOCIRCULATE_WITH_RP188 |
(self->vanc_mode == ::NTV2_VANCMODE_OFF ? AUTOCIRCULATE_WITH_ANC
: 0),
1)) {
1, start_frame, end_frame)) {
GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL),
("Failed to initialize autocirculate"));
goto out;

View file

@ -69,6 +69,7 @@ struct _GstAjaSink {
gchar *device_identifier;
NTV2Channel channel;
guint queue_size;
guint start_frame, end_frame;
guint output_cpu_core;
GstAjaAudioSystem audio_system_setting;

View file

@ -42,6 +42,8 @@ GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug);
#define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC)
#define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_FREERUN)
#define DEFAULT_QUEUE_SIZE (16)
#define DEFAULT_START_FRAME (0)
#define DEFAULT_END_FRAME (0)
#define DEFAULT_CAPTURE_CPU_CORE (G_MAXUINT)
enum {
@ -55,6 +57,8 @@ enum {
PROP_AUDIO_SOURCE,
PROP_TIMECODE_INDEX,
PROP_REFERENCE_SOURCE,
PROP_START_FRAME,
PROP_END_FRAME,
PROP_QUEUE_SIZE,
PROP_CAPTURE_CPU_CORE,
PROP_SIGNAL,
@ -148,6 +152,24 @@ static void gst_aja_src_class_init(GstAjaSrcClass *klass) {
1, G_MAXINT, DEFAULT_QUEUE_SIZE,
(GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property(
gobject_class, PROP_START_FRAME,
g_param_spec_uint(
"start-frame", "Start Frame",
"Start frame buffer to be used for capturing (auto if same number as "
"end-frame).",
0, G_MAXINT, DEFAULT_START_FRAME,
(GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property(
gobject_class, PROP_END_FRAME,
g_param_spec_uint(
"end-frame", "End Frame",
"End frame buffer to be used for capturing (auto if same number as "
"start-frame).",
0, G_MAXINT, DEFAULT_END_FRAME,
(GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property(
gobject_class, PROP_AUDIO_SYSTEM,
g_param_spec_enum(
@ -244,6 +266,8 @@ static void gst_aja_src_init(GstAjaSrc *self) {
self->device_identifier = g_strdup(DEFAULT_DEVICE_IDENTIFIER);
self->channel = DEFAULT_CHANNEL;
self->queue_size = DEFAULT_QUEUE_SIZE;
self->start_frame = DEFAULT_START_FRAME;
self->end_frame = DEFAULT_END_FRAME;
self->video_format_setting = DEFAULT_VIDEO_FORMAT;
self->audio_system_setting = DEFAULT_AUDIO_SYSTEM;
self->input_source = DEFAULT_INPUT_SOURCE;
@ -275,6 +299,12 @@ void gst_aja_src_set_property(GObject *object, guint property_id,
case PROP_QUEUE_SIZE:
self->queue_size = g_value_get_uint(value);
break;
case PROP_START_FRAME:
self->start_frame = g_value_get_uint(value);
break;
case PROP_END_FRAME:
self->end_frame = g_value_get_uint(value);
break;
case PROP_VIDEO_FORMAT:
self->video_format_setting = (GstAjaVideoFormat)g_value_get_enum(value);
break;
@ -319,6 +349,12 @@ void gst_aja_src_get_property(GObject *object, guint property_id, GValue *value,
case PROP_QUEUE_SIZE:
g_value_set_uint(value, self->queue_size);
break;
case PROP_START_FRAME:
g_value_set_uint(value, self->start_frame);
break;
case PROP_END_FRAME:
g_value_set_uint(value, self->end_frame);
break;
case PROP_VIDEO_FORMAT:
g_value_set_enum(value, self->video_format_setting);
break;
@ -1742,12 +1778,27 @@ restart:
continue;
}
guint16 start_frame = self->start_frame;
guint16 end_frame = self->end_frame;
if (start_frame == end_frame) {
guint16 num_frames = ::NTV2DeviceGetNumberFrameBuffers(self->device_id);
guint16 num_channels = ::NTV2DeviceGetNumFrameStores(self->device_id);
start_frame = self->channel * (num_frames / num_channels);
end_frame = ((self->channel + 1) * (num_frames / num_channels)) - 1;
}
GST_DEBUG_OBJECT(
self, "Configuring channel %u with start frame %u and end frame %u",
self->channel, start_frame, end_frame);
if (!self->device->device->AutoCirculateInitForInput(
self->channel, self->queue_size / 2, self->audio_system,
self->channel, 0, self->audio_system,
AUTOCIRCULATE_WITH_RP188 | (self->vanc_mode == ::NTV2_VANCMODE_OFF
? AUTOCIRCULATE_WITH_ANC
: 0),
1)) {
1, start_frame, end_frame)) {
GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL),
("Failed to initialize autocirculate"));
goto out;

View file

@ -70,6 +70,7 @@ struct _GstAjaSrc {
GstAjaTimecodeIndex timecode_index;
GstAjaReferenceSource reference_source;
guint queue_size;
guint start_frame, end_frame;
guint capture_cpu_core;
gboolean signal;