diff --git a/.clang-format b/.clang-format new file mode 100644 index 0000000000..70da6add54 --- /dev/null +++ b/.clang-format @@ -0,0 +1,182 @@ +--- +Language: Cpp +# BasedOnStyle: Google +AccessModifierOffset: -1 +AlignAfterOpenBracket: Align +AlignConsecutiveMacros: false +AlignConsecutiveAssignments: false +AlignConsecutiveBitFields: false +AlignConsecutiveDeclarations: false +AlignEscapedNewlines: Left +AlignOperands: Align +AlignTrailingComments: true +AllowAllArgumentsOnNextLine: true +AllowAllConstructorInitializersOnNextLine: true +AllowAllParametersOfDeclarationOnNextLine: true +AllowShortEnumsOnASingleLine: true +AllowShortBlocksOnASingleLine: Never +AllowShortCaseLabelsOnASingleLine: false +AllowShortFunctionsOnASingleLine: All +AllowShortLambdasOnASingleLine: All +AllowShortIfStatementsOnASingleLine: WithoutElse +AllowShortLoopsOnASingleLine: true +AlwaysBreakAfterDefinitionReturnType: None +AlwaysBreakAfterReturnType: None +AlwaysBreakBeforeMultilineStrings: true +AlwaysBreakTemplateDeclarations: Yes +BinPackArguments: true +BinPackParameters: true +BraceWrapping: + AfterCaseLabel: false + AfterClass: false + AfterControlStatement: Never + AfterEnum: false + AfterFunction: false + AfterNamespace: false + AfterObjCDeclaration: false + AfterStruct: false + AfterUnion: false + AfterExternBlock: false + BeforeCatch: false + BeforeElse: false + BeforeLambdaBody: false + BeforeWhile: false + IndentBraces: false + SplitEmptyFunction: true + SplitEmptyRecord: true + SplitEmptyNamespace: true +BreakBeforeBinaryOperators: None +BreakBeforeBraces: Attach +BreakBeforeInheritanceComma: false +BreakInheritanceList: BeforeColon +BreakBeforeTernaryOperators: true +BreakConstructorInitializersBeforeComma: false +BreakConstructorInitializers: BeforeColon +BreakAfterJavaFieldAnnotations: false +BreakStringLiterals: true +ColumnLimit: 80 +CommentPragmas: '^ IWYU pragma:' +CompactNamespaces: false +ConstructorInitializerAllOnOneLineOrOnePerLine: true +ConstructorInitializerIndentWidth: 4 +ContinuationIndentWidth: 4 +Cpp11BracedListStyle: true +DeriveLineEnding: true +DerivePointerAlignment: true +DisableFormat: false +ExperimentalAutoDetectBinPacking: false +FixNamespaceComments: true +ForEachMacros: + - foreach + - Q_FOREACH + - BOOST_FOREACH +IncludeBlocks: Regroup +IncludeCategories: + - Regex: '^' + Priority: 2 + SortPriority: 0 + - Regex: '^<.*\.h>' + Priority: 1 + SortPriority: 0 + - Regex: '^<.*' + Priority: 2 + SortPriority: 0 + - Regex: '.*' + Priority: 3 + SortPriority: 0 +IncludeIsMainRegex: '([-_](test|unittest))?$' +IncludeIsMainSourceRegex: '' +IndentCaseLabels: true +IndentCaseBlocks: false +IndentGotoLabels: true +IndentPPDirectives: None +IndentExternBlock: AfterExternBlock +IndentWidth: 2 +IndentWrappedFunctionNames: false +InsertTrailingCommas: None +JavaScriptQuotes: Leave +JavaScriptWrapImports: true +KeepEmptyLinesAtTheStartOfBlocks: false +MacroBlockBegin: '' +MacroBlockEnd: '' +MaxEmptyLinesToKeep: 1 +NamespaceIndentation: None +ObjCBinPackProtocolList: Never +ObjCBlockIndentWidth: 2 +ObjCBreakBeforeNestedBlockParam: true +ObjCSpaceAfterProperty: false +ObjCSpaceBeforeProtocolList: true +PenaltyBreakAssignment: 2 +PenaltyBreakBeforeFirstCallParameter: 1 +PenaltyBreakComment: 300 +PenaltyBreakFirstLessLess: 120 +PenaltyBreakString: 1000 +PenaltyBreakTemplateDeclaration: 10 +PenaltyExcessCharacter: 1000000 +PenaltyReturnTypeOnItsOwnLine: 200 +PointerAlignment: Left +RawStringFormats: + - Language: Cpp + Delimiters: + - cc + - CC + - cpp + - Cpp + - CPP + - 'c++' + - 'C++' + CanonicalDelimiter: '' + BasedOnStyle: google + - Language: TextProto + Delimiters: + - pb + - PB + - proto + - PROTO + EnclosingFunctions: + - EqualsProto + - EquivToProto + - PARSE_PARTIAL_TEXT_PROTO + - PARSE_TEST_PROTO + - PARSE_TEXT_PROTO + - ParseTextOrDie + - ParseTextProtoOrDie + - ParseTestProto + - ParsePartialTestProto + CanonicalDelimiter: '' + BasedOnStyle: google +ReflowComments: true +SortIncludes: true +SortUsingDeclarations: true +SpaceAfterCStyleCast: false +SpaceAfterLogicalNot: false +SpaceAfterTemplateKeyword: true +SpaceBeforeAssignmentOperators: true +SpaceBeforeCpp11BracedList: false +SpaceBeforeCtorInitializerColon: true +SpaceBeforeInheritanceColon: true +SpaceBeforeParens: ControlStatements +SpaceBeforeRangeBasedForLoopColon: true +SpaceInEmptyBlock: false +SpaceInEmptyParentheses: false +SpacesBeforeTrailingComments: 2 +SpacesInAngles: false +SpacesInConditionalStatement: false +SpacesInContainerLiterals: true +SpacesInCStyleCastParentheses: false +SpacesInParentheses: false +SpacesInSquareBrackets: false +SpaceBeforeSquareBrackets: false +Standard: Auto +StatementMacros: + - Q_UNUSED + - QT_REQUIRE_VERSION +TabWidth: 8 +UseCRLF: false +UseTab: Never +WhitespaceSensitiveMacros: + - STRINGIZE + - PP_STRINGIZE + - BOOST_PP_STRINGIZE +... + diff --git a/subprojects/gst-plugins-bad/sys/aja/README.md b/subprojects/gst-plugins-bad/sys/aja/README.md new file mode 100644 index 0000000000..0934a55d06 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/README.md @@ -0,0 +1,202 @@ +# GStreamer AJA source/sink plugin + +[GStreamer](https://gstreamer.freedesktop.org/) plugin for +[AJA](https://www.aja.com) capture and output cards. + +This plugin requires the AJA NTV2 SDK version 16 or newer. + +The location of the SDK can be configured via the `aja-sdk-dir` meson option. +If no location is given then the NTV2 SDK from +[GitHub](https://github.com/aja-video/ntv2.git) is compiled as a meson +subproject as part of the plugin. + +## Example usage + +Capture 1080p30 audio/video and display it locally + +```sh +gst-launch-1.0 ajasrc video-format=1080p-3000 ! ajasrcdemux name=d \ + d.video ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=1000000000 ! videoconvert ! autovideosink \ + d.audio ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=1000000000 ! audioconvert ! audioresample ! autoaudiosink +``` + +Output a 1080p2997 test audio/video stream + +```sh +gst-launch-1.0 videotestsrc pattern=ball ! video/x-raw,format=v210,width=1920,height=1080,framerate=30000/1001,interlace-mode=progressive ! timeoverlay ! timecodestamper ! combiner.video \ + audiotestsrc freq=440 ! audio/x-raw,format=S32LE,rate=48000,channels=16 ! audiobuffersplit output-buffer-duration=1/30 ! combiner.audio \ + ajasinkcombiner name=combiner ! ajasink channel=0 +``` + +Capture 1080p30 audio/video and directly output it again on the same card + +```sh +gst-launch-1.0 ajasrc video-format=1080p-3000 channel=1 input-source=sdi-1 audio-system=2 ! ajasrcdemux name=d \ + d.video ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=1000000000 ! c.video \ + d.audio ! queue max-size-bytes=0 max-size-buffers=0 max-size-time=1000000000 ! c.audio \ + ajasinkcombiner name=c ! ajasink channel=0 reference-source=input-1 +``` + +## Configuration + +### Source + +``` + audio-source : Audio source to use + flags: readable, writable + Enum "GstAjaAudioSource" Default: 0, "Embedded" + (0): Embedded - embedded + (1): AES - aes + (2): Analog - analog + (3): HDMI - hdmi + (4): Microphone - mic + audio-system : Audio system to use + flags: readable, writable + Enum "GstAjaAudioSystem" Default: 0, "Auto (based on selected channel)" + (0): Auto (based on selected channel) - auto + (1): Audio system 1 - 1 + (2): Audio system 2 - 2 + (3): Audio system 3 - 3 + (4): Audio system 4 - 4 + (5): Audio system 5 - 5 + (6): Audio system 6 - 6 + (7): Audio system 7 - 7 + (8): Audio system 8 - 8 + capture-cpu-core : Sets the affinity of the capture thread to this CPU core (-1=disabled) + flags: readable, writable + Unsigned Integer. Range: 0 - 4294967295 Default: 4294967295 + channel : Channel to use + flags: readable, writable + Unsigned Integer. Range: 0 - 7 Default: 0 + device-identifier : Input device instance to use + flags: readable, writable + String. Default: "0" + input-source : Input source to use + flags: readable, writable + Enum "GstAjaInputSource" Default: 0, "Auto (based on selected channel)" + (0): Auto (based on selected channel) - auto + (1): Analog Input 1 - analog-1 + (6): SDI Input 1 - sdi-1 + (7): SDI Input 2 - sdi-2 + (8): SDI Input 3 - sdi-3 + (9): SDI Input 4 - sdi-4 + (10): SDI Input 5 - sdi-5 + (11): SDI Input 6 - sdi-6 + (12): SDI Input 7 - sdi-7 + (13): SDI Input 8 - sdi-8 + (2): HDMI Input 1 - hdmi-1 + (3): HDMI Input 2 - hdmi-2 + (4): HDMI Input 3 - hdmi-3 + (5): HDMI Input 4 - hdmi-4 + queue-size : Size of internal queue in number of video frames. Half of this is allocated as device buffers and equal to the latency. + flags: readable, writable + Unsigned Integer. Range: 1 - 2147483647 Default: 16 + reference-source : Reference source to use + flags: readable, writable + Enum "GstAjaReferenceSource" Default: 1, "Freerun" + (0): Auto - auto + (1): Freerun - freerun + (2): External - external + (3): SDI Input 1 - input-1 + (4): SDI Input 2 - input-2 + (5): SDI Input 3 - input-3 + (6): SDI Input 4 - input-4 + (7): SDI Input 5 - input-5 + (8): SDI Input 6 - input-6 + (9): SDI Input 7 - input-7 + (10): SDI Input 8 - input-8 + timecode-index : Timecode index to use + flags: readable, writable + Enum "GstAjaTimecodeIndex" Default: 0, "Embedded SDI ATC LTC" + (0): Embedded SDI VITC - vitc + (0): Embedded SDI ATC LTC - atc-ltc + (2): Analog LTC 1 - ltc-1 + (3): Analog LTC 2 - ltc-2 + video-format : Video format to use + flags: readable, writable + Enum "GstAjaVideoFormat" Default: 0, "1080i 5000" + (0): 1080i 5000 - 1080i-5000 + (1): 1080i 5994 - 1080i-5994 + (2): 1080i 6000 - 1080i-6000 + (3): 720p 5994 - 720p-5994 + (4): 720p 6000 - 720p-6000 + (5): 1080p 2997 - 1080p-2997 + (6): 1080p 3000 - 1080p-3000 + (7): 1080p 2500 - 1080p-2500 + (8): 1080p 2398 - 1080p-2398 + (9): 1080p 2400 - 1080p-2400 + (10): 720p 5000 - 720p-5000 + (11): 720p 2398 - 720p-2398 + (12): 720p 2500 - 720p-2500 + (13): 1080p 5000 A - 1080p-5000-a + (14): 1080p 5994 A - 1080p-5994-a + (15): 1080p 6000 A - 1080p-6000-a + (16): 625 5000 - 625-5000 + (17): 525 5994 - 525-5994 + (18): 525 2398 - 525-2398 + (19): 525 2400 - 525-2400 +``` + +### Sink + +``` + audio-system : Audio system to use + flags: readable, writable + Enum "GstAjaAudioSystem" Default: 0, "Auto (based on selected channel)" + (0): Auto (based on selected channel) - auto + (1): Audio system 1 - 1 + (2): Audio system 2 - 2 + (3): Audio system 3 - 3 + (4): Audio system 4 - 4 + (5): Audio system 5 - 5 + (6): Audio system 6 - 6 + (7): Audio system 7 - 7 + (8): Audio system 8 - 8 + channel : Channel to use + flags: readable, writable + Unsigned Integer. Range: 0 - 7 Default: 0 + device-identifier : Input device instance to use + flags: readable, writable + String. Default: "0" + output-cpu-core : Sets the affinity of the output thread to this CPU core (-1=disabled) + flags: readable, writable + Unsigned Integer. Range: 0 - 4294967295 Default: 4294967295 + output-destination : Output destination to use + flags: readable, writable + Enum "GstAjaOutputDestination" Default: 0, "Auto (based on selected channel)" + (0): Auto (based on selected channel) - auto + (1): Analog Output - analog + (2): SDI Output 1 - sdi-1 + (3): SDI Output 2 - sdi-2 + (4): SDI Output 3 - sdi-3 + (5): SDI Output 4 - sdi-4 + (6): SDI Output 5 - sdi-5 + (7): SDI Output 6 - sdi-6 + (8): SDI Output 7 - sdi-7 + (9): SDI Output 8 - sdi-8 + (10): HDMI Output - hdmi + queue-size : Size of internal queue in number of video frames. Half of this is allocated as device buffers and equal to the latency. + flags: readable, writable + Unsigned Integer. Range: 1 - 2147483647 Default: 16 + reference-source : Reference source to use + flags: readable, writable + Enum "GstAjaReferenceSource" Default: 0, "Auto" + (0): Auto - auto + (1): Freerun - freerun + (2): External - external + (3): SDI Input 1 - input-1 + (4): SDI Input 2 - input-2 + (5): SDI Input 3 - input-3 + (6): SDI Input 4 - input-4 + (7): SDI Input 5 - input-5 + (8): SDI Input 6 - input-6 + (9): SDI Input 7 - input-7 + (10): SDI Input 8 - input-8 + timecode-index : Timecode index to use + flags: readable, writable + Enum "GstAjaTimecodeIndex" Default: 0, "Embedded SDI ATC LTC" + (0): Embedded SDI VITC - vitc + (0): Embedded SDI ATC LTC - atc-ltc + (2): Analog LTC 1 - ltc-1 + (3): Analog LTC 2 - ltc-2 +``` diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajacommon.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajacommon.cpp new file mode 100644 index 0000000000..1fef9c8f13 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajacommon.cpp @@ -0,0 +1,1181 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include + +#include "gstajacommon.h" + +GST_DEBUG_CATEGORY_STATIC(gst_aja_debug); + +#define GST_CAT_DEFAULT gst_aja_debug + +typedef struct { + GstAjaVideoFormat gst_format; + NTV2VideoFormat aja_format; + NTV2VideoFormat quad_format; +} FormatMapEntry; + +static const FormatMapEntry format_map[] = { + {GST_AJA_VIDEO_FORMAT_1080i_5000, NTV2_FORMAT_1080i_5000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080i_5994, NTV2_FORMAT_1080i_5994, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080i_6000, NTV2_FORMAT_1080i_6000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_5994, NTV2_FORMAT_720p_5994, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_6000, NTV2_FORMAT_720p_6000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080psf_2398, NTV2_FORMAT_1080psf_2398, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080psf_2400, NTV2_FORMAT_1080psf_2400, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_2997, NTV2_FORMAT_1080p_2997, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_3000, NTV2_FORMAT_1080p_3000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_2500, NTV2_FORMAT_1080p_2500, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_2398, NTV2_FORMAT_1080p_2398, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_2400, NTV2_FORMAT_1080p_2400, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_5000, NTV2_FORMAT_720p_5000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_5000_A, NTV2_FORMAT_1080p_5000_A, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_5994_A, NTV2_FORMAT_1080p_5994_A, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_6000_A, NTV2_FORMAT_1080p_6000_A, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_2398, NTV2_FORMAT_720p_2398, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_720p_5000, NTV2_FORMAT_720p_2500, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080psf_2500_2, NTV2_FORMAT_1080psf_2500_2, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080psf_2997_2, NTV2_FORMAT_1080psf_2997_2, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080psf_3000_2, NTV2_FORMAT_1080psf_3000_2, + NTV2_FORMAT_UNKNOWN}, + + {GST_AJA_VIDEO_FORMAT_625_5000, NTV2_FORMAT_625_5000, NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_525_5994, NTV2_FORMAT_525_5994, NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_525_2398, NTV2_FORMAT_525_2398, NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_525_2400, NTV2_FORMAT_525_2400, NTV2_FORMAT_UNKNOWN}, + + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2398, NTV2_FORMAT_1080p_2K_2398, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2400, NTV2_FORMAT_1080p_2K_2400, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2500, NTV2_FORMAT_1080p_2K_2500, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2997, NTV2_FORMAT_1080p_2K_2997, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_3000, NTV2_FORMAT_1080p_2K_3000, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_5000_A, NTV2_FORMAT_1080p_2K_5000_A, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_5994_A, NTV2_FORMAT_1080p_2K_5994_A, + NTV2_FORMAT_UNKNOWN}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_6000_A, NTV2_FORMAT_1080p_2K_6000_A, + NTV2_FORMAT_UNKNOWN}, + + {GST_AJA_VIDEO_FORMAT_2160p_2398, NTV2_FORMAT_3840x2160p_2398, + NTV2_FORMAT_4x1920x1080p_2398}, + {GST_AJA_VIDEO_FORMAT_2160p_2400, NTV2_FORMAT_3840x2160p_2400, + NTV2_FORMAT_4x1920x1080p_2400}, + {GST_AJA_VIDEO_FORMAT_2160p_2500, NTV2_FORMAT_3840x2160p_2500, + NTV2_FORMAT_4x1920x1080p_2500}, + {GST_AJA_VIDEO_FORMAT_2160p_2997, NTV2_FORMAT_3840x2160p_2997, + NTV2_FORMAT_4x1920x1080p_2997}, + {GST_AJA_VIDEO_FORMAT_2160p_3000, NTV2_FORMAT_3840x2160p_3000, + NTV2_FORMAT_4x1920x1080p_3000}, + {GST_AJA_VIDEO_FORMAT_2160p_5000, NTV2_FORMAT_3840x2160p_5000, + NTV2_FORMAT_4x1920x1080p_5000}, + {GST_AJA_VIDEO_FORMAT_2160p_5994, NTV2_FORMAT_3840x2160p_5994, + NTV2_FORMAT_4x1920x1080p_5994}, + {GST_AJA_VIDEO_FORMAT_2160p_6000, NTV2_FORMAT_3840x2160p_6000, + NTV2_FORMAT_4x1920x1080p_6000}, + + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2398, NTV2_FORMAT_4096x2160p_2398, + NTV2_FORMAT_4x2048x1080p_2398}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2400, NTV2_FORMAT_4096x2160p_2400, + NTV2_FORMAT_4x2048x1080p_2400}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2500, NTV2_FORMAT_4096x2160p_2500, + NTV2_FORMAT_4x2048x1080p_2500}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2997, NTV2_FORMAT_4096x2160p_2997, + NTV2_FORMAT_4x2048x1080p_2997}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_3000, NTV2_FORMAT_4096x2160p_3000, + NTV2_FORMAT_4x2048x1080p_3000}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_5000, NTV2_FORMAT_4096x2160p_5000, + NTV2_FORMAT_4x2048x1080p_5000}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_5994, NTV2_FORMAT_4096x2160p_5994, + NTV2_FORMAT_4x2048x1080p_5994}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_6000, NTV2_FORMAT_4096x2160p_6000, + NTV2_FORMAT_4x2048x1080p_6000}, + + {GST_AJA_VIDEO_FORMAT_4320p_2398, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_2398}, + {GST_AJA_VIDEO_FORMAT_4320p_2400, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_2400}, + {GST_AJA_VIDEO_FORMAT_4320p_2500, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_2500}, + {GST_AJA_VIDEO_FORMAT_4320p_2997, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_2997}, + {GST_AJA_VIDEO_FORMAT_4320p_3000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_3000}, + {GST_AJA_VIDEO_FORMAT_4320p_5000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_5000}, + {GST_AJA_VIDEO_FORMAT_4320p_5994, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_5994}, + {GST_AJA_VIDEO_FORMAT_4320p_6000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x3840x2160p_6000}, + + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2398, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_2398}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2400, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_2400}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2500, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_2500}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2997, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_2997}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_3000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_3000}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_5000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_5000}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_5994, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_5994}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_6000, NTV2_FORMAT_UNKNOWN, + NTV2_FORMAT_4x4096x2160p_6000}, +}; + +GstCaps *gst_ntv2_supported_caps(NTV2DeviceID device_id) { + GstCaps *caps = gst_caps_new_empty(); + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &format = format_map[i]; + GstCaps *tmp = NULL; + + if (device_id == DEVICE_ID_INVALID) { + tmp = gst_aja_video_format_to_caps(format.gst_format); + } else if ((format.aja_format != NTV2_FORMAT_UNKNOWN && + ::NTV2DeviceCanDoVideoFormat(device_id, format.aja_format)) || + (format.quad_format != NTV2_FORMAT_UNKNOWN && + ::NTV2DeviceCanDoVideoFormat(device_id, format.quad_format))) { + tmp = gst_aja_video_format_to_caps(format.gst_format); + } + + if (tmp) { + // Widescreen PAL/NTSC + if (format.gst_format == GST_AJA_VIDEO_FORMAT_525_2398 || + format.gst_format == GST_AJA_VIDEO_FORMAT_525_2400 || + format.gst_format == GST_AJA_VIDEO_FORMAT_525_5994) { + GstCaps *tmp2 = gst_caps_copy(tmp); + gst_caps_set_simple(tmp2, "pixel-aspect-ratio", GST_TYPE_FRACTION, 40, + 33, NULL); + gst_caps_append(tmp, tmp2); + } else if (format.gst_format == GST_AJA_VIDEO_FORMAT_625_5000) { + GstCaps *tmp2 = gst_caps_copy(tmp); + gst_caps_set_simple(tmp2, "pixel-aspect-ratio", GST_TYPE_FRACTION, 16, + 11, NULL); + gst_caps_append(tmp, tmp2); + } + + gst_caps_append(caps, tmp); + } + } + + return caps; +} + +GstCaps *gst_aja_video_format_to_caps(GstAjaVideoFormat format) { + const FormatMapEntry *entry = NULL; + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &tmp = format_map[i]; + + if (tmp.gst_format == format) { + entry = &tmp; + break; + } + } + g_assert(entry != NULL); + + if (entry->aja_format != NTV2_FORMAT_UNKNOWN) + return gst_ntv2_video_format_to_caps(entry->aja_format); + if (entry->quad_format != NTV2_FORMAT_UNKNOWN) + return gst_ntv2_video_format_to_caps(entry->quad_format); + + g_assert_not_reached(); +} + +bool gst_video_info_from_aja_video_format(GstVideoInfo *info, + GstAjaVideoFormat format) { + const FormatMapEntry *entry = NULL; + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &tmp = format_map[i]; + + if (tmp.gst_format == format) { + entry = &tmp; + break; + } + } + g_assert(entry != NULL); + + if (entry->aja_format != NTV2_FORMAT_UNKNOWN) + return gst_video_info_from_ntv2_video_format(info, entry->aja_format); + if (entry->quad_format != NTV2_FORMAT_UNKNOWN) + return gst_video_info_from_ntv2_video_format(info, entry->quad_format); + + g_assert_not_reached(); +} + +GstCaps *gst_ntv2_video_format_to_caps(NTV2VideoFormat format) { + GstVideoInfo info; + GstCaps *caps; + + if (!gst_video_info_from_ntv2_video_format(&info, format)) return NULL; + + caps = gst_video_info_to_caps(&info); + if (!caps) return caps; + + guint n = gst_caps_get_size(caps); + for (guint i = 0; i < n; i++) { + GstStructure *s = gst_caps_get_structure(caps, i); + + gst_structure_remove_fields(s, "chroma-site", "colorimetry", NULL); + } + + return caps; +} + +bool gst_video_info_from_ntv2_video_format(GstVideoInfo *info, + NTV2VideoFormat format) { + if (format == NTV2_FORMAT_UNKNOWN) return false; + + NTV2Standard standard = ::GetNTV2StandardFromVideoFormat(format); + guint width = ::GetDisplayWidth(format); + guint height = ::GetDisplayHeight(format); + NTV2FrameRate fps = ::GetNTV2FrameRateFromVideoFormat(format); + guint fps_n, fps_d; + ::GetFramesPerSecond(fps, fps_n, fps_d); + + gst_video_info_set_format(info, GST_VIDEO_FORMAT_v210, width, height); + info->fps_n = fps_n; + info->fps_d = fps_d; + if (NTV2_IS_525_FORMAT(format)) { + info->par_n = 10; + info->par_d = 11; + } else if (NTV2_IS_625_FORMAT(format)) { + info->par_n = 12; + info->par_d = 11; + } + info->interlace_mode = + (!::IsProgressivePicture(format) && !NTV2_IS_PSF_VIDEO_FORMAT(format)) + ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED + : GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; + + if (!::IsProgressiveTransport(format) && !NTV2_IS_PSF_VIDEO_FORMAT(format)) { + NTV2SmpteLineNumber line_number = ::GetSmpteLineNumber(standard); + + if (line_number.firstFieldTop) { + GST_VIDEO_INFO_FIELD_ORDER(info) = GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST; + } else { + GST_VIDEO_INFO_FIELD_ORDER(info) = + GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST; + } + } + + return true; +} + +NTV2VideoFormat gst_ntv2_video_format_from_caps(const GstCaps *caps, + bool quad) { + GstVideoInfo info; + + if (!gst_video_info_from_caps(&info, caps)) return NTV2_FORMAT_UNKNOWN; + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &format = format_map[i]; + NTV2VideoFormat f = !quad ? format.aja_format : format.quad_format; + + if (f == NTV2_FORMAT_UNKNOWN) continue; + + guint width = ::GetDisplayWidth(f); + guint height = ::GetDisplayHeight(f); + NTV2FrameRate fps = ::GetNTV2FrameRateFromVideoFormat(f); + guint fps_n, fps_d; + ::GetFramesPerSecond(fps, fps_n, fps_d); + + if (width == (guint)info.width && height == (guint)info.height && + (guint)info.fps_n == fps_n && (guint)info.fps_d == fps_d && + ((!::IsProgressiveTransport(f) && + info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED) || + (::IsProgressiveTransport(f) && + info.interlace_mode == GST_VIDEO_INTERLACE_MODE_PROGRESSIVE))) + return f; + } + + return NTV2_FORMAT_UNKNOWN; +} + +GstAjaVideoFormat gst_aja_video_format_from_caps(const GstCaps *caps) { + GstVideoInfo info; + + if (!gst_video_info_from_caps(&info, caps)) + return GST_AJA_VIDEO_FORMAT_INVALID; + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &format = format_map[i]; + NTV2VideoFormat f = (format.aja_format != NTV2_FORMAT_UNKNOWN) + ? format.aja_format + : format.quad_format; + + if (f == NTV2_FORMAT_UNKNOWN) continue; + + guint width = ::GetDisplayWidth(f); + guint height = ::GetDisplayHeight(f); + NTV2FrameRate fps = ::GetNTV2FrameRateFromVideoFormat(f); + guint fps_n, fps_d; + ::GetFramesPerSecond(fps, fps_n, fps_d); + + if (width == (guint)info.width && height == (guint)info.height && + (guint)info.fps_n == fps_n && (guint)info.fps_d == fps_d && + ((!::IsProgressiveTransport(f) && + info.interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED) || + (::IsProgressiveTransport(f) && + info.interlace_mode == GST_VIDEO_INTERLACE_MODE_PROGRESSIVE))) + return format.gst_format; + } + + return GST_AJA_VIDEO_FORMAT_INVALID; +} + +GstAjaVideoFormat gst_aja_video_format_from_ntv2_format( + NTV2VideoFormat format) { + if (format == NTV2_FORMAT_UNKNOWN) return GST_AJA_VIDEO_FORMAT_INVALID; + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &entry = format_map[i]; + if (entry.aja_format == format || entry.quad_format == format) + return entry.gst_format; + } + + return GST_AJA_VIDEO_FORMAT_INVALID; +} + +NTV2VideoFormat gst_ntv2_video_format_from_aja_format(GstAjaVideoFormat format, + bool quad) { + if (format == GST_AJA_VIDEO_FORMAT_INVALID) return NTV2_FORMAT_UNKNOWN; + + for (gsize i = 0; i < G_N_ELEMENTS(format_map); i++) { + const FormatMapEntry &entry = format_map[i]; + if (entry.gst_format == format) { + if (!quad && entry.aja_format != NTV2_FORMAT_UNKNOWN) + return entry.aja_format; + if (quad && entry.quad_format != NTV2_FORMAT_UNKNOWN) + return entry.quad_format; + } + } + + return NTV2_FORMAT_UNKNOWN; +} + +bool gst_ntv2_video_format_is_quad(NTV2VideoFormat format) { + return (format >= NTV2_FORMAT_FIRST_4K_DEF_FORMAT && + format < NTV2_FORMAT_END_4K_DEF_FORMATS) || + (format >= NTV2_FORMAT_FIRST_4K_DEF_FORMAT2 && + format < NTV2_FORMAT_END_4K_DEF_FORMATS2) || + (format >= NTV2_FORMAT_FIRST_UHD2_DEF_FORMAT && + format < NTV2_FORMAT_END_UHD2_DEF_FORMATS) || + (format >= NTV2_FORMAT_FIRST_UHD2_FULL_DEF_FORMAT && + format < NTV2_FORMAT_END_UHD2_FULL_DEF_FORMATS); +} + +GType gst_aja_audio_meta_api_get_type(void) { + static GType type; + + if (g_once_init_enter(&type)) { + static const gchar *tags[] = {NULL}; + GType _type = gst_meta_api_type_register("GstAjaAudioMetaAPI", tags); + GST_INFO("registering"); + g_once_init_leave(&type, _type); + } + return type; +} + +static gboolean gst_aja_audio_meta_transform(GstBuffer *dest, GstMeta *meta, + GstBuffer *buffer, GQuark type, + gpointer data) { + GstAjaAudioMeta *dmeta, *smeta; + + if (GST_META_TRANSFORM_IS_COPY(type)) { + smeta = (GstAjaAudioMeta *)meta; + + GST_TRACE("copy AJA audio metadata"); + dmeta = gst_buffer_add_aja_audio_meta(dest, smeta->buffer); + if (!dmeta) return FALSE; + } else { + /* return FALSE, if transform type is not supported */ + return FALSE; + } + return TRUE; +} + +static gboolean gst_aja_audio_meta_init(GstMeta *meta, gpointer params, + GstBuffer *buffer) { + GstAjaAudioMeta *emeta = (GstAjaAudioMeta *)meta; + + emeta->buffer = NULL; + + return TRUE; +} + +static void gst_aja_audio_meta_free(GstMeta *meta, GstBuffer *buffer) { + GstAjaAudioMeta *emeta = (GstAjaAudioMeta *)meta; + + gst_buffer_replace(&emeta->buffer, NULL); +} + +const GstMetaInfo *gst_aja_audio_meta_get_info(void) { + static const GstMetaInfo *meta_info = NULL; + + if (g_once_init_enter((GstMetaInfo **)&meta_info)) { + const GstMetaInfo *mi = gst_meta_register( + GST_AJA_AUDIO_META_API_TYPE, "GstAjaAudioMeta", sizeof(GstAjaAudioMeta), + gst_aja_audio_meta_init, gst_aja_audio_meta_free, + gst_aja_audio_meta_transform); + g_once_init_leave((GstMetaInfo **)&meta_info, (GstMetaInfo *)mi); + } + return meta_info; +} + +GstAjaAudioMeta *gst_buffer_add_aja_audio_meta(GstBuffer *buffer, + GstBuffer *audio_buffer) { + GstAjaAudioMeta *meta; + + g_return_val_if_fail(buffer != NULL, NULL); + g_return_val_if_fail(audio_buffer != NULL, NULL); + + meta = (GstAjaAudioMeta *)gst_buffer_add_meta(buffer, GST_AJA_AUDIO_META_INFO, + NULL); + + meta->buffer = gst_buffer_ref(audio_buffer); + + return meta; +} + +typedef struct { + GstMemory mem; + + guint8 *data; +} GstAjaMemory; + +typedef struct { + guint8 *data; + gsize size; +} FreedMemory; + +G_DEFINE_TYPE(GstAjaAllocator, gst_aja_allocator, GST_TYPE_ALLOCATOR); + +static inline void _aja_memory_init(GstAjaAllocator *alloc, GstAjaMemory *mem, + GstMemoryFlags flags, GstMemory *parent, + gpointer data, gsize maxsize, gsize offset, + gsize size) { + gst_memory_init(GST_MEMORY_CAST(mem), flags, GST_ALLOCATOR(alloc), parent, + maxsize, 4095, offset, size); + + mem->data = (guint8 *)data; +} + +static inline GstAjaMemory *_aja_memory_new(GstAjaAllocator *alloc, + GstMemoryFlags flags, + GstAjaMemory *parent, gpointer data, + gsize maxsize, gsize offset, + gsize size) { + GstAjaMemory *mem; + + mem = (GstAjaMemory *)g_new0(GstAjaMemory, 1); + _aja_memory_init(alloc, mem, flags, (GstMemory *)parent, data, maxsize, + offset, size); + + return mem; +} + +static GstAjaMemory *_aja_memory_new_block(GstAjaAllocator *alloc, + GstMemoryFlags flags, gsize maxsize, + gsize offset, gsize size) { + GstAjaMemory *mem; + guint8 *data = NULL; + + mem = (GstAjaMemory *)g_new0(GstAjaMemory, 1); + + GST_OBJECT_LOCK(alloc); + guint n = gst_queue_array_get_length(alloc->freed_mems); + for (guint i = 0; i < n; i++) { + FreedMemory *fmem = + (FreedMemory *)gst_queue_array_peek_nth_struct(alloc->freed_mems, i); + + if (fmem->size == size) { + data = fmem->data; + GST_TRACE_OBJECT( + alloc, "Using cached freed memory of size %" G_GSIZE_FORMAT " at %p", + fmem->size, fmem->data); + gst_queue_array_drop_struct(alloc->freed_mems, i, NULL); + break; + } + } + GST_OBJECT_UNLOCK(alloc); + + if (!data) { + data = (guint8 *)AJAMemory::AllocateAligned(maxsize, 4096); + GST_TRACE_OBJECT(alloc, + "Allocated memory of size %" G_GSIZE_FORMAT " at %p", + maxsize, data); + if (!alloc->device->device->DMABufferLock((ULWord *)data, maxsize, true)) { + GST_WARNING_OBJECT(alloc, "Failed to pre-lock memory"); + } + } + + _aja_memory_init(alloc, mem, flags, NULL, data, maxsize, offset, size); + + return mem; +} + +static gpointer _aja_memory_map(GstAjaMemory *mem, gsize maxsize, + GstMapFlags flags) { + return mem->data; +} + +static gboolean _aja_memory_unmap(GstAjaMemory *mem) { return TRUE; } + +static GstMemory *_aja_memory_copy(GstAjaMemory *mem, gssize offset, + gsize size) { + GstMemory *copy; + GstMapInfo map; + + if (size == (gsize)-1) + size = mem->mem.size > (gsize)offset ? mem->mem.size - offset : 0; + + copy = gst_allocator_alloc(mem->mem.allocator, size, NULL); + gst_memory_map(copy, &map, GST_MAP_READ); + GST_DEBUG("memcpy %" G_GSIZE_FORMAT " memory %p -> %p", size, mem, copy); + memcpy(map.data, mem->data + mem->mem.offset + offset, size); + gst_memory_unmap(copy, &map); + + return copy; +} + +static GstAjaMemory *_aja_memory_share(GstAjaMemory *mem, gssize offset, + gsize size) { + GstAjaMemory *sub; + GstAjaMemory *parent; + + /* find the real parent */ + if ((parent = (GstAjaMemory *)mem->mem.parent) == NULL) + parent = (GstAjaMemory *)mem; + + if (size == (gsize)-1) size = mem->mem.size - offset; + + sub = _aja_memory_new(GST_AJA_ALLOCATOR(parent->mem.allocator), + (GstMemoryFlags)(GST_MINI_OBJECT_FLAGS(parent) | + GST_MINI_OBJECT_FLAG_LOCK_READONLY), + parent, parent->data, mem->mem.maxsize, + mem->mem.offset + offset, size); + + return sub; +} + +static GstMemory *gst_aja_allocator_alloc(GstAllocator *alloc, gsize size, + GstAllocationParams *params) { + g_warn_if_fail(params->prefix == 0); + g_warn_if_fail(params->padding == 0); + + return (GstMemory *)_aja_memory_new_block(GST_AJA_ALLOCATOR(alloc), + params->flags, size, 0, size); +} + +static void gst_aja_allocator_free(GstAllocator *alloc, GstMemory *mem) { + GstAjaMemory *dmem = (GstAjaMemory *)mem; + + if (!mem->parent) { + GstAjaAllocator *aja_alloc = GST_AJA_ALLOCATOR(alloc); + + GST_OBJECT_LOCK(aja_alloc); + while (gst_queue_array_get_length(aja_alloc->freed_mems) > 8) { + FreedMemory *fmem = + (FreedMemory *)gst_queue_array_pop_head_struct(aja_alloc->freed_mems); + + GST_TRACE_OBJECT( + alloc, "Freeing cached memory of size %" G_GSIZE_FORMAT " at %p", + fmem->size, fmem->data); + aja_alloc->device->device->DMABufferUnlock((ULWord *)fmem->data, + fmem->size); + AJAMemory::FreeAligned(fmem->data); + } + + FreedMemory fmem; + GST_TRACE_OBJECT(alloc, + "Caching freed memory of size %" G_GSIZE_FORMAT " at %p", + mem->maxsize, dmem->data); + fmem.data = dmem->data; + fmem.size = mem->size; + gst_queue_array_push_tail_struct(aja_alloc->freed_mems, &fmem); + GST_OBJECT_UNLOCK(aja_alloc); + } + + g_free(dmem); +} + +static void gst_aja_allocator_finalize(GObject *alloc) { + GstAjaAllocator *aja_alloc = GST_AJA_ALLOCATOR(alloc); + + GST_DEBUG_OBJECT(alloc, "Freeing allocator"); + + FreedMemory *mem; + while ((mem = (FreedMemory *)gst_queue_array_pop_head_struct( + aja_alloc->freed_mems))) { + GST_TRACE_OBJECT(alloc, "Freeing cached memory at %p", mem->data); + aja_alloc->device->device->DMABufferUnlock((ULWord *)mem->data, mem->size); + AJAMemory::FreeAligned(mem->data); + } + + gst_aja_ntv2_device_unref(aja_alloc->device); + + G_OBJECT_CLASS(gst_aja_allocator_parent_class)->finalize(alloc); +} + +static void gst_aja_allocator_class_init(GstAjaAllocatorClass *klass) { + GObjectClass *gobject_class; + GstAllocatorClass *allocator_class; + + gobject_class = (GObjectClass *)klass; + allocator_class = (GstAllocatorClass *)klass; + + gobject_class->finalize = gst_aja_allocator_finalize; + + allocator_class->alloc = gst_aja_allocator_alloc; + allocator_class->free = gst_aja_allocator_free; +} + +static void gst_aja_allocator_init(GstAjaAllocator *aja_alloc) { + GstAllocator *alloc = GST_ALLOCATOR_CAST(aja_alloc); + + alloc->mem_type = GST_AJA_ALLOCATOR_MEMTYPE; + alloc->mem_map = (GstMemoryMapFunction)_aja_memory_map; + alloc->mem_unmap = (GstMemoryUnmapFunction)_aja_memory_unmap; + alloc->mem_copy = (GstMemoryCopyFunction)_aja_memory_copy; + alloc->mem_share = (GstMemoryShareFunction)_aja_memory_share; +} + +GstAllocator *gst_aja_allocator_new(GstAjaNtv2Device *device) { + GstAjaAllocator *alloc = + (GstAjaAllocator *)g_object_new(GST_TYPE_AJA_ALLOCATOR, NULL); + + alloc->device = gst_aja_ntv2_device_ref(device); + alloc->freed_mems = gst_queue_array_new_for_struct(sizeof(FreedMemory), 16); + + GST_DEBUG_OBJECT(alloc, "Creating allocator for device %d", + device->device->GetIndexNumber()); + + return GST_ALLOCATOR(alloc); +} + +GstAjaNtv2Device *gst_aja_ntv2_device_obtain(const gchar *device_identifier) { + CNTV2Device *device = new CNTV2Device(); + + if (!CNTV2DeviceScanner::GetFirstDeviceFromArgument(device_identifier, + *device)) { + delete device; + return NULL; + } + + GstAjaNtv2Device *dev = g_atomic_rc_box_new0(GstAjaNtv2Device); + dev->device = device; + + return dev; +} + +GstAjaNtv2Device *gst_aja_ntv2_device_ref(GstAjaNtv2Device *device) { + return (GstAjaNtv2Device *)g_atomic_rc_box_acquire(device); +} + +void gst_aja_ntv2_device_unref(GstAjaNtv2Device *device) { + g_atomic_rc_box_release_full(device, [](gpointer data) { + GstAjaNtv2Device *dev = (GstAjaNtv2Device *)data; + + delete dev->device; + }); +} + +static gpointer init_setup_mutex(gpointer data) { + sem_t *s = SEM_FAILED; + s = sem_open("/gstreamer-aja-sem", O_CREAT, S_IRUSR | S_IWUSR, 1); + if (s == SEM_FAILED) { + g_critical("Failed to create SHM semaphore for GStreamer AJA plugin: %s", + g_strerror(errno)); + } + return s; +} + +static sem_t *get_setup_mutex(void) { + static GOnce once = G_ONCE_INIT; + + g_once(&once, init_setup_mutex, NULL); + + return (sem_t *)once.retval; +} + +ShmMutexLocker::ShmMutexLocker() { + sem_t *s = get_setup_mutex(); + if (s != SEM_FAILED) sem_wait(s); +} + +ShmMutexLocker::~ShmMutexLocker() { + sem_t *s = get_setup_mutex(); + if (s != SEM_FAILED) sem_post(s); +} + +static guint gst_aja_device_get_frame_multiplier(GstAjaNtv2Device *device, + NTV2Channel channel) { + // quad formats use 4x as many frames, quad-quad formats 8x + bool quad_enabled = false; + device->device->GetQuadFrameEnable(quad_enabled, channel); + bool quad_quad_enabled = false; + device->device->GetQuadQuadFrameEnable(quad_quad_enabled, channel); + + NTV2VideoFormat format = NTV2_FORMAT_UNKNOWN; + device->device->GetVideoFormat(format, channel); + + GST_TRACE("Channel %d uses mode %d (quad: %d, quad quad: %d)", (gint)channel, + (gint)format, quad_enabled, quad_quad_enabled); + + // Similarly, 2k/UHD use 4x as many frames and 4k/UHD2 use 8x as many + // frames + if (format != NTV2_FORMAT_UNKNOWN) { + guint width = ::GetDisplayWidth(format); + guint height = ::GetDisplayHeight(format); + + if (height <= 1080 && width <= 1920) { + // SD and HD but not 2k! + } else if (height <= 2160 && width <= 3840) { + // 2k and UHD but not 4k + quad_enabled = true; + } else if (height <= 4320 && width <= 7680) { + // 4k and UHD2 but not 8k + quad_quad_enabled = true; + } else { + // 8k FIXME + quad_quad_enabled = true; + } + } + + if (quad_enabled) { + g_assert(!quad_quad_enabled); + + return 4; + } else if (quad_quad_enabled) { + g_assert(!quad_enabled); + + return 8; + } + + return 1; +} + +// Returns -1 on failure or otherwise the start_frame. +// end_frame would be start_frame + frame_count - 1 +gint gst_aja_ntv2_device_find_unallocated_frames(GstAjaNtv2Device *device, + NTV2Channel channel, + guint frame_count) { + g_assert(frame_count != 0); + g_assert(device != NULL); + g_assert(device->device->IsOpen()); + + // Adapted from CNTV2Card::FindUnallocatedFrames() with + // quad/quad-quad/UHD/UHD2 support + std::set used_frames; + + for (NTV2Channel c = ::NTV2_CHANNEL1; c < NTV2_MAX_NUM_CHANNELS; + c = (NTV2Channel)(c + 1)) { + AUTOCIRCULATE_STATUS ac_status; + + if (device->device->AutoCirculateGetStatus(c, ac_status) && + !ac_status.IsStopped()) { + guint16 start_frame = ac_status.GetStartFrame(); + guint16 end_frame = ac_status.GetEndFrame(); + + guint multiplier = gst_aja_device_get_frame_multiplier(device, c); + + GST_TRACE("Channel %d uses frames %u-%u (multiplier: %u)", c, start_frame, + end_frame, multiplier); + + start_frame *= multiplier; + end_frame *= multiplier; + end_frame += (multiplier - 1); + + GST_TRACE("Channel %d uses HD frames %u-%u", c, start_frame, end_frame); + for (guint16 i = start_frame; i <= end_frame; i++) { + used_frames.insert(i); + } + } + } + + guint multiplier = gst_aja_device_get_frame_multiplier(device, channel); + frame_count *= multiplier; + + const guint16 last_frame = + ::NTV2DeviceGetNumberFrameBuffers(device->device->GetDeviceID()) - 1; + guint16 start_frame = 0; + guint16 end_frame = start_frame + frame_count - 1; + + auto iter = used_frames.cbegin(); + while (iter != used_frames.cend()) { + guint16 allocated_start_frame = *iter; + guint16 allocated_end_frame = allocated_start_frame; + + // Find end of the allocation + while (++iter != used_frames.cend() && *iter == (allocated_end_frame + 1)) + allocated_end_frame++; + + // Free block before this allocation + if (start_frame < allocated_start_frame && + end_frame < allocated_start_frame) + break; + + // Move after this allocation and check if there is enough space before + // the next allocation + start_frame = GST_ROUND_UP_N(allocated_end_frame + 1, multiplier); + end_frame = start_frame + frame_count - 1; + } + + // If above we moved after the end of the available frames error out + if (start_frame > last_frame || end_frame > last_frame) { + GST_WARNING("Did not find a contiguous unused range of %u frames", + frame_count); + return -1; + } + + // Otherwise we have enough space after the last allocation + GST_INFO("Using HD frames %u-%u", start_frame, end_frame); + GST_INFO("Using frames %u-%u", start_frame / multiplier, + start_frame / multiplier + frame_count / multiplier - 1); + + return start_frame / multiplier; +} + +GType gst_aja_audio_system_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_AUDIO_SYSTEM_AUTO, "auto", "Auto (based on selected channel)"}, + {GST_AJA_AUDIO_SYSTEM_1, "1", "Audio system 1"}, + {GST_AJA_AUDIO_SYSTEM_2, "2", "Audio system 2"}, + {GST_AJA_AUDIO_SYSTEM_3, "3", "Audio system 3"}, + {GST_AJA_AUDIO_SYSTEM_4, "4", "Audio system 4"}, + {GST_AJA_AUDIO_SYSTEM_5, "5", "Audio system 5"}, + {GST_AJA_AUDIO_SYSTEM_6, "6", "Audio system 6"}, + {GST_AJA_AUDIO_SYSTEM_7, "7", "Audio system 7"}, + {GST_AJA_AUDIO_SYSTEM_8, "8", "Audio system 8"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaAudioSystem", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_output_destination_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_OUTPUT_DESTINATION_AUTO, "auto", + "Auto (based on selected channel)"}, + {GST_AJA_OUTPUT_DESTINATION_ANALOG, "analog", "Analog Output"}, + {GST_AJA_OUTPUT_DESTINATION_SDI1, "sdi-1", "SDI Output 1"}, + {GST_AJA_OUTPUT_DESTINATION_SDI2, "sdi-2", "SDI Output 2"}, + {GST_AJA_OUTPUT_DESTINATION_SDI3, "sdi-3", "SDI Output 3"}, + {GST_AJA_OUTPUT_DESTINATION_SDI4, "sdi-4", "SDI Output 4"}, + {GST_AJA_OUTPUT_DESTINATION_SDI5, "sdi-5", "SDI Output 5"}, + {GST_AJA_OUTPUT_DESTINATION_SDI6, "sdi-6", "SDI Output 6"}, + {GST_AJA_OUTPUT_DESTINATION_SDI7, "sdi-7", "SDI Output 7"}, + {GST_AJA_OUTPUT_DESTINATION_SDI8, "sdi-8", "SDI Output 8"}, + {GST_AJA_OUTPUT_DESTINATION_HDMI, "hdmi", "HDMI Output"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaOutputDestination", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_reference_source_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_REFERENCE_SOURCE_AUTO, "auto", "Auto"}, + {GST_AJA_REFERENCE_SOURCE_FREERUN, "freerun", "Freerun"}, + {GST_AJA_REFERENCE_SOURCE_EXTERNAL, "external", "External"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_1, "input-1", "SDI Input 1"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_2, "input-2", "SDI Input 2"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_3, "input-3", "SDI Input 3"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_4, "input-4", "SDI Input 4"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_5, "input-5", "SDI Input 5"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_6, "input-6", "SDI Input 6"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_7, "input-7", "SDI Input 7"}, + {GST_AJA_REFERENCE_SOURCE_INPUT_8, "input-8", "SDI Input 8"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaReferenceSource", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_input_source_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_INPUT_SOURCE_AUTO, "auto", "Auto (based on selected channel)"}, + {GST_AJA_INPUT_SOURCE_ANALOG1, "analog-1", "Analog Input 1"}, + {GST_AJA_INPUT_SOURCE_SDI1, "sdi-1", "SDI Input 1"}, + {GST_AJA_INPUT_SOURCE_SDI2, "sdi-2", "SDI Input 2"}, + {GST_AJA_INPUT_SOURCE_SDI3, "sdi-3", "SDI Input 3"}, + {GST_AJA_INPUT_SOURCE_SDI4, "sdi-4", "SDI Input 4"}, + {GST_AJA_INPUT_SOURCE_SDI5, "sdi-5", "SDI Input 5"}, + {GST_AJA_INPUT_SOURCE_SDI6, "sdi-6", "SDI Input 6"}, + {GST_AJA_INPUT_SOURCE_SDI7, "sdi-7", "SDI Input 7"}, + {GST_AJA_INPUT_SOURCE_SDI8, "sdi-8", "SDI Input 8"}, + {GST_AJA_INPUT_SOURCE_HDMI1, "hdmi-1", "HDMI Input 1"}, + {GST_AJA_INPUT_SOURCE_HDMI2, "hdmi-2", "HDMI Input 2"}, + {GST_AJA_INPUT_SOURCE_HDMI3, "hdmi-3", "HDMI Input 3"}, + {GST_AJA_INPUT_SOURCE_HDMI4, "hdmi-4", "HDMI Input 4"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaInputSource", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_sdi_mode_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_SDI_MODE_SINGLE_LINK, "single-link", "Single Link"}, + {GST_AJA_SDI_MODE_QUAD_LINK_SQD, "quad-link-sqd", "Quad Link SQD"}, + {GST_AJA_SDI_MODE_QUAD_LINK_TSI, "quad-link-tsi", "Quad Link TSI"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaSdiMode", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_video_format_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_VIDEO_FORMAT_AUTO, "auto", "Auto detect format"}, + {GST_AJA_VIDEO_FORMAT_1080i_5000, "1080i-5000", "1080i 5000"}, + {GST_AJA_VIDEO_FORMAT_1080i_5994, "1080i-5994", "1080i 5994"}, + {GST_AJA_VIDEO_FORMAT_1080i_6000, "1080i-6000", "1080i 6000"}, + {GST_AJA_VIDEO_FORMAT_720p_5994, "720p-5994", "720p 5994"}, + {GST_AJA_VIDEO_FORMAT_720p_6000, "720p-6000", "720p 6000"}, + {GST_AJA_VIDEO_FORMAT_1080psf_2398, "1080psf-2398", "1080psf 2398"}, + {GST_AJA_VIDEO_FORMAT_1080psf_2400, "1080psf-2400", "1080psf 2400"}, + {GST_AJA_VIDEO_FORMAT_1080p_2997, "1080p-2997", "1080p 2997"}, + {GST_AJA_VIDEO_FORMAT_1080p_3000, "1080p-3000", "1080p 3000"}, + {GST_AJA_VIDEO_FORMAT_1080p_2500, "1080p-2500", "1080p 2500"}, + {GST_AJA_VIDEO_FORMAT_1080p_2398, "1080p-2398", "1080p 2398"}, + {GST_AJA_VIDEO_FORMAT_1080p_2400, "1080p-2400", "1080p 2400"}, + {GST_AJA_VIDEO_FORMAT_720p_5000, "720p-5000", "720p 5000"}, + {GST_AJA_VIDEO_FORMAT_1080p_5000_A, "1080p-5000-a", "1080p 5000 A"}, + {GST_AJA_VIDEO_FORMAT_1080p_5994_A, "1080p-5994-a", "1080p 5994 A"}, + {GST_AJA_VIDEO_FORMAT_1080p_6000_A, "1080p-6000-a", "1080p 6000 A"}, + {GST_AJA_VIDEO_FORMAT_720p_2398, "720p-2398", "720p 2398"}, + {GST_AJA_VIDEO_FORMAT_720p_2500, "720p-2500", "720p 2500"}, + {GST_AJA_VIDEO_FORMAT_1080psf_2500_2, "1080psf-2500-2", "1080psf 2500 2"}, + {GST_AJA_VIDEO_FORMAT_1080psf_2997_2, "1080psf-2997-2", "1080psf 2997 2"}, + {GST_AJA_VIDEO_FORMAT_1080psf_3000_2, "1080psf-3000-2", "1080psf 3000 2"}, + + {GST_AJA_VIDEO_FORMAT_625_5000, "625-5000", "625 5000"}, + {GST_AJA_VIDEO_FORMAT_525_5994, "525-5994", "525 5994"}, + {GST_AJA_VIDEO_FORMAT_525_2398, "525-2398", "525 2398"}, + {GST_AJA_VIDEO_FORMAT_525_2400, "525-2400", "525 2400"}, + + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2398, "1080p-dci-2398", "1080p DCI 2398"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2400, "1080p-dci-2400", "1080p DCI 2400"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2500, "1080p-dci-2500", "1080p DCI 2500"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_2997, "1080p-dci-2997", "1080p DCI 2997"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_3000, "1080p-dci-3000", "1080p DCI 3000"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_5000_A, "1080p-dci-5000-a", + "1080p DCI 5000 A"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_5994_A, "1080p-dci-5994-a", + "1080p DCI 5994 A"}, + {GST_AJA_VIDEO_FORMAT_1080p_DCI_6000_A, "1080p-dci-6000-a", + "1080p DCI 6000 A"}, + + {GST_AJA_VIDEO_FORMAT_2160p_2398, "2160p-2398", "2160p 2398"}, + {GST_AJA_VIDEO_FORMAT_2160p_2400, "2160p-2400", "2160p 2400"}, + {GST_AJA_VIDEO_FORMAT_2160p_2500, "2160p-2500", "2160p 2500"}, + {GST_AJA_VIDEO_FORMAT_2160p_2997, "2160p-2997", "2160p 2997"}, + {GST_AJA_VIDEO_FORMAT_2160p_3000, "2160p-3000", "2160p 3000"}, + {GST_AJA_VIDEO_FORMAT_2160p_5000, "2160p-5000", "2160p 5000"}, + {GST_AJA_VIDEO_FORMAT_2160p_5994, "2160p-5994", "2160p 5994"}, + {GST_AJA_VIDEO_FORMAT_2160p_6000, "2160p-6000", "2160p 6000"}, + + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2398, "2160p-dci-2398", "2160p DCI 2398"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2400, "2160p-dci-2400", "2160p DCI 2400"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2500, "2160p-dci-2500", "2160p DCI 2500"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_2997, "2160p-dci-2997", "2160p DCI 2997"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_3000, "2160p-dci-3000", "2160p DCI 3000"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_5000, "2160p-dci-5000", "2160p DCI 5000"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_5994, "2160p-dci-5994", "2160p DCI 5994"}, + {GST_AJA_VIDEO_FORMAT_2160p_DCI_6000, "2160p-dci-6000", "2160p DCI 6000"}, + + {GST_AJA_VIDEO_FORMAT_4320p_2398, "4320p-2398", "4320p 2398"}, + {GST_AJA_VIDEO_FORMAT_4320p_2400, "4320p-2400", "4320p 2400"}, + {GST_AJA_VIDEO_FORMAT_4320p_2500, "4320p-2500", "4320p 2500"}, + {GST_AJA_VIDEO_FORMAT_4320p_2997, "4320p-2997", "4320p 2997"}, + {GST_AJA_VIDEO_FORMAT_4320p_3000, "4320p-3000", "4320p 3000"}, + {GST_AJA_VIDEO_FORMAT_4320p_5000, "4320p-5000", "4320p 5000"}, + {GST_AJA_VIDEO_FORMAT_4320p_5994, "4320p-5994", "4320p 5994"}, + {GST_AJA_VIDEO_FORMAT_4320p_6000, "4320p-6000", "4320p 6000"}, + + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2398, "4320p-dci-2398", "4320p DCI 2398"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2400, "4320p-dci-2400", "4320p DCI 2400"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2500, "4320p-dci-2500", "4320p DCI 2500"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_2997, "4320p-dci-2997", "4320p DCI 2997"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_3000, "4320p-dci-3000", "4320p DCI 3000"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_5000, "4320p-dci-5000", "4320p DCI 5000"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_5994, "4320p-dci-5994", "4320p DCI 5994"}, + {GST_AJA_VIDEO_FORMAT_4320p_DCI_6000, "4320p-dci-6000", "4320p DCI 6000"}, + + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaVideoFormat", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_audio_source_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_AUDIO_SOURCE_EMBEDDED, "embedded", "Embedded"}, + {GST_AJA_AUDIO_SOURCE_AES, "aes", "AES"}, + {GST_AJA_AUDIO_SOURCE_ANALOG, "analog", "Analog"}, + {GST_AJA_AUDIO_SOURCE_HDMI, "hdmi", "HDMI"}, + {GST_AJA_AUDIO_SOURCE_MIC, "mic", "Microphone"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaAudioSource", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_embedded_audio_input_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO, "auto", "auto"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO1, "video-1", "Video 1"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO2, "video-2", "Video 2"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO3, "video-3", "Video 3"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO4, "video-4", "Video 4"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO5, "video-5", "Video 5"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO6, "video-6", "Video 6"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO7, "video-7", "Video 7"}, + {GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO8, "video-8", "Video 8"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaEmbeddedAudioInput", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_timecode_index_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_TIMECODE_INDEX_VITC, "vitc", "Embedded SDI VITC"}, + {GST_AJA_TIMECODE_INDEX_ATC_LTC, "atc-ltc", "Embedded SDI ATC LTC"}, + {GST_AJA_TIMECODE_INDEX_LTC1, "ltc-1", "Analog LTC 1"}, + {GST_AJA_TIMECODE_INDEX_LTC2, "ltc-2", "Analog LTC 2"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaTimecodeIndex", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +GType gst_aja_closed_caption_capture_mode_get_type(void) { + static gsize id = 0; + static const GEnumValue modes[] = { + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_AND_CEA608, + "cea708-and-cea608", + "CEA708 S334-2 and CEA608 S334-1 Annex A Closed Captions"}, + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_OR_CEA608, "cea708-or-cea608", + "CEA708 S334-2 or if not existing CEA608 S334-1 Annex A Closed " + "Captions"}, + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_OR_CEA708, "cea608-or-cea708", + "CEA608 S334-1 Annex A or if not existing CEA708 S334-2 Closed " + "Captions"}, + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_ONLY, "cea708-only", + "CEA708 S334-2 Closed Captions only"}, + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_ONLY, "cea608-only", + "CEA608 S334-1 Annex A Closed Captions only"}, + {GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_NONE, "none", + "Don't capture Closed Captions"}, + {0, NULL, NULL}}; + + if (g_once_init_enter(&id)) { + GType tmp = g_enum_register_static("GstAjaClosedCaptionCaptureMode", modes); + g_once_init_leave(&id, tmp); + } + + return (GType)id; +} + +void gst_aja_common_init(void) { + GST_DEBUG_CATEGORY_INIT(gst_aja_debug, "aja", 0, + "Debug category for AJA plugin"); +} diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajacommon.h b/subprojects/gst-plugins-bad/sys/aja/gstajacommon.h new file mode 100644 index 0000000000..1629cf08a2 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajacommon.h @@ -0,0 +1,354 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +G_BEGIN_DECLS + +typedef struct { + GstMeta meta; + + GstBuffer *buffer; +} GstAjaAudioMeta; + +G_GNUC_INTERNAL +GType gst_aja_audio_meta_api_get_type(void); +#define GST_AJA_AUDIO_META_API_TYPE (gst_aja_audio_meta_api_get_type()) + +G_GNUC_INTERNAL +const GstMetaInfo *gst_aja_audio_meta_get_info(void); +#define GST_AJA_AUDIO_META_INFO (gst_aja_audio_meta_get_info()) + +#define gst_buffer_get_aja_audio_meta(b) \ + ((GstAjaAudioMeta *)gst_buffer_get_meta((b), GST_AJA_AUDIO_META_API_TYPE)) + +G_GNUC_INTERNAL +GstAjaAudioMeta *gst_buffer_add_aja_audio_meta(GstBuffer *buffer, + GstBuffer *audio_buffer); + +typedef struct { + CNTV2Card *device; +} GstAjaNtv2Device; + +G_GNUC_INTERNAL +GstAjaNtv2Device *gst_aja_ntv2_device_obtain(const gchar *device_identifier); +G_GNUC_INTERNAL +GstAjaNtv2Device *gst_aja_ntv2_device_ref(GstAjaNtv2Device *device); +G_GNUC_INTERNAL +void gst_aja_ntv2_device_unref(GstAjaNtv2Device *device); + +G_GNUC_INTERNAL +gint gst_aja_ntv2_device_find_unallocated_frames(GstAjaNtv2Device *device, + NTV2Channel channel, + guint frame_count); + +#define GST_AJA_ALLOCATOR_MEMTYPE "aja" + +#define GST_TYPE_AJA_ALLOCATOR (gst_aja_allocator_get_type()) +#define GST_AJA_ALLOCATOR(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_ALLOCATOR, GstAjaAllocator)) +#define GST_AJA_ALLOCATOR_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AJA_ALLOCATOR, \ + GstAjaAllocatorClass)) +#define GST_IS_Aja_ALLOCATOR(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AJA_ALLOCATOR)) +#define GST_IS_Aja_ALLOCATOR_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AJA_ALLOCATOR)) +#define GST_AJA_ALLOCATOR_CAST(obj) ((GstAjaAllocator *)(obj)) + +typedef struct _GstAjaAllocator GstAjaAllocator; +typedef struct _GstAjaAllocatorClass GstAjaAllocatorClass; + +struct _GstAjaAllocator { + GstAllocator allocator; + + GstAjaNtv2Device *device; + GstQueueArray *freed_mems; +}; + +struct _GstAjaAllocatorClass { + GstAllocatorClass parent_class; +}; + +G_GNUC_INTERNAL +GType gst_aja_allocator_get_type(void); +G_GNUC_INTERNAL +GstAllocator *gst_aja_allocator_new(GstAjaNtv2Device *device); + +typedef enum { + GST_AJA_AUDIO_SYSTEM_AUTO, + GST_AJA_AUDIO_SYSTEM_1, + GST_AJA_AUDIO_SYSTEM_2, + GST_AJA_AUDIO_SYSTEM_3, + GST_AJA_AUDIO_SYSTEM_4, + GST_AJA_AUDIO_SYSTEM_5, + GST_AJA_AUDIO_SYSTEM_6, + GST_AJA_AUDIO_SYSTEM_7, + GST_AJA_AUDIO_SYSTEM_8, +} GstAjaAudioSystem; + +#define GST_TYPE_AJA_AUDIO_SYSTEM (gst_aja_audio_system_get_type()) +G_GNUC_INTERNAL +GType gst_aja_audio_system_get_type(void); + +typedef enum { + GST_AJA_OUTPUT_DESTINATION_AUTO, + GST_AJA_OUTPUT_DESTINATION_ANALOG, + GST_AJA_OUTPUT_DESTINATION_SDI1, + GST_AJA_OUTPUT_DESTINATION_SDI2, + GST_AJA_OUTPUT_DESTINATION_SDI3, + GST_AJA_OUTPUT_DESTINATION_SDI4, + GST_AJA_OUTPUT_DESTINATION_SDI5, + GST_AJA_OUTPUT_DESTINATION_SDI6, + GST_AJA_OUTPUT_DESTINATION_SDI7, + GST_AJA_OUTPUT_DESTINATION_SDI8, + GST_AJA_OUTPUT_DESTINATION_HDMI, +} GstAjaOutputDestination; + +#define GST_TYPE_AJA_OUTPUT_DESTINATION (gst_aja_output_destination_get_type()) +G_GNUC_INTERNAL +GType gst_aja_output_destination_get_type(void); + +typedef enum { + GST_AJA_REFERENCE_SOURCE_AUTO, + GST_AJA_REFERENCE_SOURCE_FREERUN, + GST_AJA_REFERENCE_SOURCE_EXTERNAL, + GST_AJA_REFERENCE_SOURCE_INPUT_1, + GST_AJA_REFERENCE_SOURCE_INPUT_2, + GST_AJA_REFERENCE_SOURCE_INPUT_3, + GST_AJA_REFERENCE_SOURCE_INPUT_4, + GST_AJA_REFERENCE_SOURCE_INPUT_5, + GST_AJA_REFERENCE_SOURCE_INPUT_6, + GST_AJA_REFERENCE_SOURCE_INPUT_7, + GST_AJA_REFERENCE_SOURCE_INPUT_8, +} GstAjaReferenceSource; + +#define GST_TYPE_AJA_REFERENCE_SOURCE (gst_aja_reference_source_get_type()) +G_GNUC_INTERNAL +GType gst_aja_reference_source_get_type(void); + +typedef enum { + GST_AJA_INPUT_SOURCE_AUTO, + GST_AJA_INPUT_SOURCE_ANALOG1, + GST_AJA_INPUT_SOURCE_HDMI1, + GST_AJA_INPUT_SOURCE_HDMI2, + GST_AJA_INPUT_SOURCE_HDMI3, + GST_AJA_INPUT_SOURCE_HDMI4, + GST_AJA_INPUT_SOURCE_SDI1, + GST_AJA_INPUT_SOURCE_SDI2, + GST_AJA_INPUT_SOURCE_SDI3, + GST_AJA_INPUT_SOURCE_SDI4, + GST_AJA_INPUT_SOURCE_SDI5, + GST_AJA_INPUT_SOURCE_SDI6, + GST_AJA_INPUT_SOURCE_SDI7, + GST_AJA_INPUT_SOURCE_SDI8, +} GstAjaInputSource; + +#define GST_TYPE_AJA_INPUT_SOURCE (gst_aja_input_source_get_type()) +G_GNUC_INTERNAL +GType gst_aja_input_source_get_type(void); + +typedef enum { + GST_AJA_SDI_MODE_SINGLE_LINK, + GST_AJA_SDI_MODE_QUAD_LINK_SQD, + GST_AJA_SDI_MODE_QUAD_LINK_TSI, +} GstAjaSdiMode; + +#define GST_TYPE_AJA_SDI_MODE (gst_aja_sdi_mode_get_type()) +G_GNUC_INTERNAL +GType gst_aja_sdi_mode_get_type(void); + +typedef enum { + GST_AJA_VIDEO_FORMAT_INVALID = -1, + GST_AJA_VIDEO_FORMAT_AUTO, + GST_AJA_VIDEO_FORMAT_1080i_5000, + GST_AJA_VIDEO_FORMAT_1080i_5994, + GST_AJA_VIDEO_FORMAT_1080i_6000, + GST_AJA_VIDEO_FORMAT_720p_5994, + GST_AJA_VIDEO_FORMAT_720p_6000, + GST_AJA_VIDEO_FORMAT_1080psf_2398, + GST_AJA_VIDEO_FORMAT_1080psf_2400, + GST_AJA_VIDEO_FORMAT_1080p_2997, + GST_AJA_VIDEO_FORMAT_1080p_3000, + GST_AJA_VIDEO_FORMAT_1080p_2500, + GST_AJA_VIDEO_FORMAT_1080p_2398, + GST_AJA_VIDEO_FORMAT_1080p_2400, + GST_AJA_VIDEO_FORMAT_720p_5000, + GST_AJA_VIDEO_FORMAT_1080p_5000_A, + GST_AJA_VIDEO_FORMAT_1080p_5994_A, + GST_AJA_VIDEO_FORMAT_1080p_6000_A, + GST_AJA_VIDEO_FORMAT_720p_2398, + GST_AJA_VIDEO_FORMAT_720p_2500, + GST_AJA_VIDEO_FORMAT_1080psf_2500_2, + GST_AJA_VIDEO_FORMAT_1080psf_2997_2, + GST_AJA_VIDEO_FORMAT_1080psf_3000_2, + GST_AJA_VIDEO_FORMAT_625_5000, + GST_AJA_VIDEO_FORMAT_525_5994, + GST_AJA_VIDEO_FORMAT_525_2398, + GST_AJA_VIDEO_FORMAT_525_2400, + GST_AJA_VIDEO_FORMAT_1080p_DCI_2398, + GST_AJA_VIDEO_FORMAT_1080p_DCI_2400, + GST_AJA_VIDEO_FORMAT_1080p_DCI_2500, + GST_AJA_VIDEO_FORMAT_1080p_DCI_2997, + GST_AJA_VIDEO_FORMAT_1080p_DCI_3000, + GST_AJA_VIDEO_FORMAT_1080p_DCI_5000_A, + GST_AJA_VIDEO_FORMAT_1080p_DCI_5994_A, + GST_AJA_VIDEO_FORMAT_1080p_DCI_6000_A, + GST_AJA_VIDEO_FORMAT_2160p_2398, + GST_AJA_VIDEO_FORMAT_2160p_2400, + GST_AJA_VIDEO_FORMAT_2160p_2500, + GST_AJA_VIDEO_FORMAT_2160p_2997, + GST_AJA_VIDEO_FORMAT_2160p_3000, + GST_AJA_VIDEO_FORMAT_2160p_5000, + GST_AJA_VIDEO_FORMAT_2160p_5994, + GST_AJA_VIDEO_FORMAT_2160p_6000, + GST_AJA_VIDEO_FORMAT_2160p_DCI_2398, + GST_AJA_VIDEO_FORMAT_2160p_DCI_2400, + GST_AJA_VIDEO_FORMAT_2160p_DCI_2500, + GST_AJA_VIDEO_FORMAT_2160p_DCI_2997, + GST_AJA_VIDEO_FORMAT_2160p_DCI_3000, + GST_AJA_VIDEO_FORMAT_2160p_DCI_5000, + GST_AJA_VIDEO_FORMAT_2160p_DCI_5994, + GST_AJA_VIDEO_FORMAT_2160p_DCI_6000, + GST_AJA_VIDEO_FORMAT_4320p_2398, + GST_AJA_VIDEO_FORMAT_4320p_2400, + GST_AJA_VIDEO_FORMAT_4320p_2500, + GST_AJA_VIDEO_FORMAT_4320p_2997, + GST_AJA_VIDEO_FORMAT_4320p_3000, + GST_AJA_VIDEO_FORMAT_4320p_5000, + GST_AJA_VIDEO_FORMAT_4320p_5994, + GST_AJA_VIDEO_FORMAT_4320p_6000, + GST_AJA_VIDEO_FORMAT_4320p_DCI_2398, + GST_AJA_VIDEO_FORMAT_4320p_DCI_2400, + GST_AJA_VIDEO_FORMAT_4320p_DCI_2500, + GST_AJA_VIDEO_FORMAT_4320p_DCI_2997, + GST_AJA_VIDEO_FORMAT_4320p_DCI_3000, + GST_AJA_VIDEO_FORMAT_4320p_DCI_5000, + GST_AJA_VIDEO_FORMAT_4320p_DCI_5994, + GST_AJA_VIDEO_FORMAT_4320p_DCI_6000, +} GstAjaVideoFormat; + +#define GST_TYPE_AJA_VIDEO_FORMAT (gst_aja_video_format_get_type()) +G_GNUC_INTERNAL +GType gst_aja_video_format_get_type(void); + +typedef enum { + GST_AJA_AUDIO_SOURCE_EMBEDDED, + GST_AJA_AUDIO_SOURCE_AES, + GST_AJA_AUDIO_SOURCE_ANALOG, + GST_AJA_AUDIO_SOURCE_HDMI, + GST_AJA_AUDIO_SOURCE_MIC, +} GstAjaAudioSource; + +#define GST_TYPE_AJA_AUDIO_SOURCE (gst_aja_audio_source_get_type()) +G_GNUC_INTERNAL +GType gst_aja_audio_source_get_type(void); + +typedef enum { + GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO1, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO2, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO3, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO4, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO5, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO6, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO7, + GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO8, +} GstAjaEmbeddedAudioInput; + +#define GST_TYPE_AJA_EMBEDDED_AUDIO_INPUT \ + (gst_aja_embedded_audio_input_get_type()) +G_GNUC_INTERNAL +GType gst_aja_embedded_audio_input_get_type(void); + +typedef enum { + GST_AJA_TIMECODE_INDEX_VITC, + GST_AJA_TIMECODE_INDEX_ATC_LTC, + GST_AJA_TIMECODE_INDEX_LTC1, + GST_AJA_TIMECODE_INDEX_LTC2, +} GstAjaTimecodeIndex; + +#define GST_TYPE_AJA_TIMECODE_INDEX (gst_aja_timecode_index_get_type()) +G_GNUC_INTERNAL +GType gst_aja_timecode_index_get_type(void); + +typedef enum { + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_AND_CEA608, + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_OR_CEA608, + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_OR_CEA708, + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_ONLY, + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_ONLY, + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_NONE, +} GstAjaClosedCaptionCaptureMode; + +#define GST_TYPE_AJA_CLOSED_CAPTION_CAPTURE_MODE \ + (gst_aja_closed_caption_capture_mode_get_type()) +G_GNUC_INTERNAL +GType gst_aja_closed_caption_capture_mode_get_type(void); + +G_GNUC_INTERNAL +void gst_aja_common_init(void); + +G_END_DECLS + +class ShmMutexLocker { + public: + ShmMutexLocker(); + ~ShmMutexLocker(); +}; + +G_GNUC_INTERNAL +GstCaps *gst_ntv2_supported_caps(NTV2DeviceID device_id); + +G_GNUC_INTERNAL +GstCaps *gst_ntv2_video_format_to_caps(NTV2VideoFormat format); +G_GNUC_INTERNAL +bool gst_video_info_from_ntv2_video_format(GstVideoInfo *info, + NTV2VideoFormat format); +G_GNUC_INTERNAL +NTV2VideoFormat gst_ntv2_video_format_from_caps(const GstCaps *caps, bool quad); + +G_GNUC_INTERNAL +GstCaps *gst_aja_video_format_to_caps(GstAjaVideoFormat format); +G_GNUC_INTERNAL +bool gst_video_info_from_aja_video_format(GstVideoInfo *info, + GstAjaVideoFormat format); +G_GNUC_INTERNAL +GstAjaVideoFormat gst_aja_video_format_from_caps(const GstCaps *caps); + +G_GNUC_INTERNAL +GstAjaVideoFormat gst_aja_video_format_from_ntv2_format(NTV2VideoFormat format); +G_GNUC_INTERNAL +NTV2VideoFormat gst_ntv2_video_format_from_aja_format(GstAjaVideoFormat format, + bool quad); + +G_GNUC_INTERNAL +bool gst_ntv2_video_format_is_quad(NTV2VideoFormat format); diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.cpp new file mode 100644 index 0000000000..361aac8cfe --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.cpp @@ -0,0 +1,170 @@ +/* + * Copyright (C) 2019 Mathieu Duponchelle + * Copyright (C) 2019,2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstajacommon.h" +#include "gstajadeviceprovider.h" + +static GstDevice *gst_aja_device_new(NTV2DeviceInfo &device, gboolean video); + +G_DEFINE_TYPE(GstAjaDeviceProvider, gst_aja_device_provider, + GST_TYPE_DEVICE_PROVIDER); + +static void gst_aja_device_provider_init(GstAjaDeviceProvider *self) {} + +static GList *gst_aja_device_provider_probe(GstDeviceProvider *provider) { + GList *ret = NULL; + + CNTV2DeviceScanner scanner; + + NTV2DeviceInfoList devices = scanner.GetDeviceInfoList(); + for (NTV2DeviceInfoList::iterator it = devices.begin(); it != devices.end(); + it++) { + // Skip non-input / non-output devices + if (it->numVidInputs == 0 && it->numVidOutputs == 0) continue; + + if (it->numVidInputs > 0) + ret = g_list_prepend(ret, gst_aja_device_new(*it, TRUE)); + if (it->numVidOutputs > 0) + ret = g_list_prepend(ret, gst_aja_device_new(*it, FALSE)); + } + + ret = g_list_reverse(ret); + + return ret; +} + +static void gst_aja_device_provider_class_init( + GstAjaDeviceProviderClass *klass) { + GstDeviceProviderClass *dm_class = GST_DEVICE_PROVIDER_CLASS(klass); + + dm_class->probe = GST_DEBUG_FUNCPTR(gst_aja_device_provider_probe); + + gst_device_provider_class_set_static_metadata( + dm_class, "AJA Device Provider", "Source/Audio/Video", + "List and provides AJA capture devices", + "Sebastian Dröge "); +} + +G_DEFINE_TYPE(GstAjaDevice, gst_aja_device, GST_TYPE_DEVICE); + +static void gst_aja_device_init(GstAjaDevice *self) {} + +static GstElement *gst_aja_device_create_element(GstDevice *device, + const gchar *name) { + GstAjaDevice *self = GST_AJA_DEVICE(device); + GstElement *ret = NULL; + + if (self->is_capture) { + ret = gst_element_factory_make("ajasrc", name); + } else { + ret = gst_element_factory_make("ajasink", name); + } + + if (ret) { + gchar *device_identifier = g_strdup_printf("%u", self->device_index); + + g_object_set(ret, "device-identifier", device_identifier, NULL); + g_free(device_identifier); + } + + return ret; +} + +static void gst_aja_device_class_init(GstAjaDeviceClass *klass) { + GstDeviceClass *gst_device_class = GST_DEVICE_CLASS(klass); + + gst_device_class->create_element = + GST_DEBUG_FUNCPTR(gst_aja_device_create_element); +} + +static GstDevice *gst_aja_device_new(NTV2DeviceInfo &device, + gboolean is_capture) { + GstDevice *ret; + gchar *display_name; + const gchar *device_class; + GstCaps *caps = NULL; + GstStructure *properties; + + device_class = is_capture ? "Audio/Video/Source" : "Audio/Video/Sink"; + display_name = g_strdup_printf("AJA %s (%s)", device.deviceIdentifier.c_str(), + is_capture ? "Source" : "Sink"); + + caps = gst_ntv2_supported_caps(device.deviceID); + + properties = gst_structure_new_empty("properties"); + + gst_structure_set( + properties, "device-id", G_TYPE_UINT, device.deviceID, "device-index", + G_TYPE_UINT, device.deviceIndex, "pci-slot", G_TYPE_UINT, device.pciSlot, + "serial-number", G_TYPE_UINT64, device.deviceSerialNumber, + "device-identifier", G_TYPE_STRING, device.deviceIdentifier.c_str(), + "num-audio-streams", G_TYPE_UINT, device.numAudioStreams, + "dual-link-support", G_TYPE_BOOLEAN, device.dualLinkSupport, + "sdi-3g-support", G_TYPE_BOOLEAN, device.sdi3GSupport, "sdi-12g-support", + G_TYPE_BOOLEAN, device.sdi12GSupport, "ip-support", G_TYPE_BOOLEAN, + device.ipSupport, "bi-directional-sdi", G_TYPE_BOOLEAN, + device.biDirectionalSDI, "ltc-in-support", G_TYPE_BOOLEAN, + device.ltcInSupport, "ltc-in-on-ref-port", G_TYPE_BOOLEAN, + device.ltcInOnRefPort, "2k-support", G_TYPE_BOOLEAN, device.has2KSupport, + "4k-support", G_TYPE_BOOLEAN, device.has4KSupport, "8k-support", + G_TYPE_BOOLEAN, device.has8KSupport, "multiformat-support", + G_TYPE_BOOLEAN, device.multiFormat, NULL); + + if (is_capture) { + gst_structure_set( + properties, "num-vid-inputs", G_TYPE_UINT, device.numVidInputs, + "num-anlg-vid-inputs", G_TYPE_UINT, device.numAnlgVidInputs, + "num-hdmi-vid-inputs", G_TYPE_UINT, device.numHDMIVidInputs, + "num-analog-audio-input-channels", G_TYPE_UINT, + device.numAnalogAudioInputChannels, "num-aes-audio-input-channels", + G_TYPE_UINT, device.numAESAudioInputChannels, + "num-embedded-audio-input-channels", G_TYPE_UINT, + device.numEmbeddedAudioInputChannels, "num-hdmi-audio-input-channels", + G_TYPE_UINT, device.numHDMIAudioInputChannels, NULL); + } else { + gst_structure_set( + properties, "num-vid-outputs", G_TYPE_UINT, device.numVidOutputs, + "num-anlg-vid-outputs", G_TYPE_UINT, device.numAnlgVidOutputs, + "num-hdmi-vid-outputs", G_TYPE_UINT, device.numHDMIVidOutputs, + "num-analog-audio-output-channels", G_TYPE_UINT, + device.numAnalogAudioOutputChannels, "num-aes-audio-output-channels", + G_TYPE_UINT, device.numAESAudioOutputChannels, + "num-embedded-audio-output-channels", G_TYPE_UINT, + device.numEmbeddedAudioOutputChannels, "num-hdmi-audio-output-channels", + G_TYPE_UINT, device.numHDMIAudioOutputChannels, NULL); + } + + ret = GST_DEVICE(g_object_new(GST_TYPE_AJA_DEVICE, "display-name", + display_name, "device-class", device_class, + "caps", caps, "properties", properties, NULL)); + + g_free(display_name); + gst_caps_unref(caps); + gst_structure_free(properties); + + GST_AJA_DEVICE(ret)->is_capture = is_capture; + GST_AJA_DEVICE(ret)->device_index = device.deviceIndex; + + return ret; +} diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.h b/subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.h new file mode 100644 index 0000000000..94829bdcfb --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajadeviceprovider.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2019 Mathieu Duponchelle + * Copyright (C) 2019,2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the Free + * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA + * 02110-1301, USA. + */ + +#ifndef _GST_AJA_DEVICE_PROVIDER_H_ +#define _GST_AJA_DEVICE_PROVIDER_H_ + +#include +#include + +G_BEGIN_DECLS + +#define GST_TYPE_AJA_DEVICE_PROVIDER gst_aja_device_provider_get_type() +#define GST_AJA_DEVICE_PROVIDER(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_DEVICE_PROVIDER, \ + GstAjaDeviceProvider)) + +typedef struct _GstAjaDeviceProvider GstAjaDeviceProvider; +typedef struct _GstAjaDeviceProviderClass GstAjaDeviceProviderClass; + +struct _GstAjaDeviceProviderClass { + GstDeviceProviderClass parent_class; +}; + +struct _GstAjaDeviceProvider { + GstDeviceProvider parent; +}; + +G_GNUC_INTERNAL +GType gst_aja_device_provider_get_type(void); + +#define GST_TYPE_AJA_DEVICE gst_aja_device_get_type() +#define GST_AJA_DEVICE(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_DEVICE, GstAjaDevice)) + +typedef struct _GstAjaDevice GstAjaDevice; +typedef struct _GstAjaDeviceClass GstAjaDeviceClass; + +struct _GstAjaDeviceClass { + GstDeviceClass parent_class; +}; + +struct _GstAjaDevice { + GstDevice parent; + gboolean is_capture; + guint device_index; +}; + +G_GNUC_INTERNAL +GType gst_aja_device_get_type(void); + +G_END_DECLS + +#endif /* _GST_AJA_DEVICE_PROVIDER_H_ */ diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajasink.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajasink.cpp new file mode 100644 index 0000000000..c729938123 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajasink.cpp @@ -0,0 +1,2201 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include +#include + +#include "gstajacommon.h" +#include "gstajasink.h" + +GST_DEBUG_CATEGORY_STATIC(gst_aja_sink_debug); +#define GST_CAT_DEFAULT gst_aja_sink_debug + +#define DEFAULT_DEVICE_IDENTIFIER ("0") +#define DEFAULT_CHANNEL (::NTV2_CHANNEL1) +#define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) +#define DEFAULT_OUTPUT_DESTINATION (GST_AJA_OUTPUT_DESTINATION_AUTO) +#define DEFAULT_SDI_MODE (GST_AJA_SDI_MODE_SINGLE_LINK) +#define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) +#define DEFAULT_RP188 (TRUE) +#define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_AUTO) +#define DEFAULT_CEA608_LINE_NUMBER (12) +#define DEFAULT_CEA708_LINE_NUMBER (12) +#define DEFAULT_QUEUE_SIZE (16) +#define DEFAULT_START_FRAME (0) +#define DEFAULT_END_FRAME (0) +#define DEFAULT_OUTPUT_CPU_CORE (G_MAXUINT) + +enum { + PROP_0, + PROP_DEVICE_IDENTIFIER, + PROP_CHANNEL, + PROP_AUDIO_SYSTEM, + PROP_OUTPUT_DESTINATION, + PROP_SDI_MODE, + PROP_TIMECODE_INDEX, + PROP_RP188, + PROP_REFERENCE_SOURCE, + PROP_CEA608_LINE_NUMBER, + PROP_CEA708_LINE_NUMBER, + PROP_QUEUE_SIZE, + PROP_START_FRAME, + PROP_END_FRAME, + PROP_OUTPUT_CPU_CORE, +}; + +typedef enum { + QUEUE_ITEM_TYPE_FRAME, +} QueueItemType; + +typedef struct { + QueueItemType type; + + // For FRAME + GstBuffer *video_buffer; + GstMapInfo video_map; + GstBuffer *audio_buffer; + GstMapInfo audio_map; + NTV2_RP188 tc; + GstBuffer *anc_buffer; + GstMapInfo anc_map; + GstBuffer *anc_buffer2; + GstMapInfo anc_map2; +} QueueItem; + +static void gst_aja_sink_set_property(GObject *object, guint property_id, + const GValue *value, GParamSpec *pspec); +static void gst_aja_sink_get_property(GObject *object, guint property_id, + GValue *value, GParamSpec *pspec); +static void gst_aja_sink_finalize(GObject *object); + +static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps); +static GstCaps *gst_aja_sink_get_caps(GstBaseSink *bsink, GstCaps *filter); +static gboolean gst_aja_sink_event(GstBaseSink *bsink, GstEvent *event); +static gboolean gst_aja_sink_propose_allocation(GstBaseSink *bsink, + GstQuery *query); +static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, GstBuffer *buffer); + +static gboolean gst_aja_sink_open(GstAjaSink *sink); +static gboolean gst_aja_sink_close(GstAjaSink *sink); +static gboolean gst_aja_sink_start(GstAjaSink *sink); +static gboolean gst_aja_sink_stop(GstAjaSink *sink); + +static GstStateChangeReturn gst_aja_sink_change_state( + GstElement *element, GstStateChange transition); + +static void output_thread_func(AJAThread *thread, void *data); + +#define parent_class gst_aja_sink_parent_class +G_DEFINE_TYPE(GstAjaSink, gst_aja_sink, GST_TYPE_BASE_SINK); + +static void gst_aja_sink_class_init(GstAjaSinkClass *klass) { + GObjectClass *gobject_class = G_OBJECT_CLASS(klass); + GstElementClass *element_class = GST_ELEMENT_CLASS(klass); + GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS(klass); + GstCaps *templ_caps; + + gobject_class->set_property = gst_aja_sink_set_property; + gobject_class->get_property = gst_aja_sink_get_property; + gobject_class->finalize = gst_aja_sink_finalize; + + g_object_class_install_property( + gobject_class, PROP_DEVICE_IDENTIFIER, + g_param_spec_string( + "device-identifier", "Device identifier", + "Input device instance to use", DEFAULT_DEVICE_IDENTIFIER, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_CHANNEL, + g_param_spec_uint( + "channel", "Channel", "Channel to use", 0, NTV2_MAX_NUM_CHANNELS - 1, + DEFAULT_CHANNEL, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_QUEUE_SIZE, + g_param_spec_uint( + "queue-size", "Queue Size", + "Size of internal queue in number of video frames. " + "Half of this is allocated as device buffers and equal to the " + "latency.", + 1, G_MAXINT, DEFAULT_QUEUE_SIZE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_START_FRAME, + g_param_spec_uint( + "start-frame", "Start Frame", + "Start frame buffer to be used for output (auto if same number as " + "end-frame).", + 0, G_MAXINT, DEFAULT_START_FRAME, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_END_FRAME, + g_param_spec_uint( + "end-frame", "End Frame", + "End frame buffer to be used for output (auto if same number as " + "start-frame).", + 0, G_MAXINT, DEFAULT_END_FRAME, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_AUDIO_SYSTEM, + g_param_spec_enum( + "audio-system", "Audio System", "Audio system to use", + GST_TYPE_AJA_AUDIO_SYSTEM, DEFAULT_AUDIO_SYSTEM, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_OUTPUT_DESTINATION, + g_param_spec_enum( + "output-destination", "Output Destination", + "Output destination to use", GST_TYPE_AJA_OUTPUT_DESTINATION, + DEFAULT_OUTPUT_DESTINATION, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_SDI_MODE, + g_param_spec_enum( + "sdi-mode", "SDI Mode", "SDI mode to use", GST_TYPE_AJA_SDI_MODE, + DEFAULT_SDI_MODE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_TIMECODE_INDEX, + g_param_spec_enum( + "timecode-index", "Timecode Index", "Timecode index to use", + GST_TYPE_AJA_TIMECODE_INDEX, DEFAULT_TIMECODE_INDEX, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_RP188, + g_param_spec_boolean( + "rp188", "RP188", "Enable RP188 timecode transmission", DEFAULT_RP188, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_REFERENCE_SOURCE, + g_param_spec_enum( + "reference-source", "Reference Source", "Reference source to use", + GST_TYPE_AJA_REFERENCE_SOURCE, DEFAULT_REFERENCE_SOURCE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_CEA608_LINE_NUMBER, + g_param_spec_uint( + "cea608-line-number", "CEA608 Line Number", + "Sets the line number to use for CEA608 S334-1 Annex A Closed " + "Captions " + "(-1=disabled)", + 0, G_MAXUINT, DEFAULT_CEA608_LINE_NUMBER, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_CEA708_LINE_NUMBER, + g_param_spec_uint( + "cea708-line-number", "CEA708 Line Number", + "Sets the line number to use for CEA708 S334-2 Closed Captions " + "(-1=disabled)", + 0, G_MAXUINT, DEFAULT_CEA608_LINE_NUMBER, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_OUTPUT_CPU_CORE, + g_param_spec_uint( + "output-cpu-core", "Output CPU Core", + "Sets the affinity of the output thread to this CPU core " + "(-1=disabled)", + 0, G_MAXUINT, DEFAULT_OUTPUT_CPU_CORE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + element_class->change_state = GST_DEBUG_FUNCPTR(gst_aja_sink_change_state); + + basesink_class->set_caps = GST_DEBUG_FUNCPTR(gst_aja_sink_set_caps); + basesink_class->get_caps = GST_DEBUG_FUNCPTR(gst_aja_sink_get_caps); + basesink_class->event = GST_DEBUG_FUNCPTR(gst_aja_sink_event); + basesink_class->propose_allocation = + GST_DEBUG_FUNCPTR(gst_aja_sink_propose_allocation); + basesink_class->render = GST_DEBUG_FUNCPTR(gst_aja_sink_render); + + templ_caps = gst_ntv2_supported_caps(DEVICE_ID_INVALID); + gst_element_class_add_pad_template( + element_class, + gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS, templ_caps)); + gst_caps_unref(templ_caps); + + gst_element_class_set_static_metadata( + element_class, "AJA audio/video sink", "Audio/Video/Sink", + "Outputs audio/video frames with AJA devices", + "Sebastian Dröge "); + + GST_DEBUG_CATEGORY_INIT(gst_aja_sink_debug, "ajasink", 0, "AJA sink"); +} + +static void gst_aja_sink_init(GstAjaSink *self) { + g_mutex_init(&self->queue_lock); + g_cond_init(&self->queue_cond); + g_cond_init(&self->drain_cond); + + self->device_identifier = g_strdup(DEFAULT_DEVICE_IDENTIFIER); + self->channel = DEFAULT_CHANNEL; + self->queue_size = DEFAULT_QUEUE_SIZE; + self->start_frame = DEFAULT_START_FRAME; + self->end_frame = DEFAULT_END_FRAME; + self->audio_system_setting = DEFAULT_AUDIO_SYSTEM; + self->output_destination = DEFAULT_OUTPUT_DESTINATION; + self->timecode_index = DEFAULT_TIMECODE_INDEX; + self->reference_source = DEFAULT_REFERENCE_SOURCE; + self->output_cpu_core = DEFAULT_OUTPUT_CPU_CORE; + + self->queue = + gst_queue_array_new_for_struct(sizeof(QueueItem), self->queue_size); +} + +void gst_aja_sink_set_property(GObject *object, guint property_id, + const GValue *value, GParamSpec *pspec) { + GstAjaSink *self = GST_AJA_SINK(object); + + switch (property_id) { + case PROP_DEVICE_IDENTIFIER: + g_free(self->device_identifier); + self->device_identifier = g_value_dup_string(value); + break; + case PROP_CHANNEL: + self->channel = (NTV2Channel)g_value_get_uint(value); + break; + case PROP_QUEUE_SIZE: + self->queue_size = g_value_get_uint(value); + break; + case PROP_START_FRAME: + self->start_frame = g_value_get_uint(value); + break; + case PROP_END_FRAME: + self->end_frame = g_value_get_uint(value); + break; + case PROP_AUDIO_SYSTEM: + self->audio_system_setting = (GstAjaAudioSystem)g_value_get_enum(value); + break; + case PROP_OUTPUT_DESTINATION: + self->output_destination = + (GstAjaOutputDestination)g_value_get_enum(value); + break; + case PROP_SDI_MODE: + self->sdi_mode = (GstAjaSdiMode)g_value_get_enum(value); + break; + case PROP_TIMECODE_INDEX: + self->timecode_index = (GstAjaTimecodeIndex)g_value_get_enum(value); + break; + case PROP_RP188: + self->rp188 = g_value_get_boolean(value); + break; + case PROP_REFERENCE_SOURCE: + self->reference_source = (GstAjaReferenceSource)g_value_get_enum(value); + break; + case PROP_CEA608_LINE_NUMBER: + self->cea608_line_number = g_value_get_uint(value); + break; + case PROP_CEA708_LINE_NUMBER: + self->cea708_line_number = g_value_get_uint(value); + break; + case PROP_OUTPUT_CPU_CORE: + self->output_cpu_core = g_value_get_uint(value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); + break; + } +} + +void gst_aja_sink_get_property(GObject *object, guint property_id, + GValue *value, GParamSpec *pspec) { + GstAjaSink *self = GST_AJA_SINK(object); + + switch (property_id) { + case PROP_DEVICE_IDENTIFIER: + g_value_set_string(value, self->device_identifier); + break; + case PROP_CHANNEL: + g_value_set_uint(value, self->channel); + break; + case PROP_QUEUE_SIZE: + g_value_set_uint(value, self->queue_size); + break; + case PROP_START_FRAME: + g_value_set_uint(value, self->start_frame); + break; + case PROP_END_FRAME: + g_value_set_uint(value, self->end_frame); + break; + case PROP_AUDIO_SYSTEM: + g_value_set_enum(value, self->audio_system_setting); + break; + case PROP_OUTPUT_DESTINATION: + g_value_set_enum(value, self->output_destination); + break; + case PROP_SDI_MODE: + g_value_set_enum(value, self->sdi_mode); + break; + case PROP_TIMECODE_INDEX: + g_value_set_enum(value, self->timecode_index); + break; + case PROP_RP188: + g_value_set_boolean(value, self->rp188); + break; + case PROP_REFERENCE_SOURCE: + g_value_set_enum(value, self->reference_source); + break; + case PROP_CEA608_LINE_NUMBER: + g_value_set_uint(value, self->cea608_line_number); + break; + case PROP_CEA708_LINE_NUMBER: + g_value_set_uint(value, self->cea708_line_number); + break; + case PROP_OUTPUT_CPU_CORE: + g_value_set_uint(value, self->output_cpu_core); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); + break; + } +} + +void gst_aja_sink_finalize(GObject *object) { + GstAjaSink *self = GST_AJA_SINK(object); + + g_assert(self->device == NULL); + g_assert(gst_queue_array_get_length(self->queue) == 0); + g_clear_pointer(&self->queue, gst_queue_array_free); + + g_mutex_clear(&self->queue_lock); + g_cond_clear(&self->queue_cond); + g_cond_clear(&self->drain_cond); + + G_OBJECT_CLASS(parent_class)->finalize(object); +} + +static gboolean gst_aja_sink_open(GstAjaSink *self) { + GST_DEBUG_OBJECT(self, "Opening device"); + + g_assert(self->device == NULL); + + self->device = gst_aja_ntv2_device_obtain(self->device_identifier); + if (!self->device) { + GST_ERROR_OBJECT(self, "Failed to open device"); + return FALSE; + } + + if (!self->device->device->IsDeviceReady(false)) { + g_clear_pointer(&self->device, gst_aja_ntv2_device_unref); + return FALSE; + } + + self->device->device->SetEveryFrameServices(::NTV2_OEM_TASKS); + self->device_id = self->device->device->GetDeviceID(); + + std::string serial_number; + if (!self->device->device->GetSerialNumberString(serial_number)) + serial_number = "none"; + + GST_DEBUG_OBJECT(self, + "Opened device with ID %d at index %d (%s, version %s, " + "serial number %s, can do VANC %d)", + self->device_id, self->device->device->GetIndexNumber(), + self->device->device->GetDisplayName().c_str(), + self->device->device->GetDeviceVersionString().c_str(), + serial_number.c_str(), + ::NTV2DeviceCanDoCustomAnc(self->device_id)); + + GST_DEBUG_OBJECT(self, + "Using SDK version %d.%d.%d.%d (%s) and driver version %s", + AJA_NTV2_SDK_VERSION_MAJOR, AJA_NTV2_SDK_VERSION_MINOR, + AJA_NTV2_SDK_VERSION_POINT, AJA_NTV2_SDK_BUILD_NUMBER, + AJA_NTV2_SDK_BUILD_DATETIME, + self->device->device->GetDriverVersionString().c_str()); + + self->device->device->SetMultiFormatMode(true); + + self->allocator = gst_aja_allocator_new(self->device); + + GST_DEBUG_OBJECT(self, "Opened device"); + + return TRUE; +} + +static gboolean gst_aja_sink_close(GstAjaSink *self) { + gst_clear_object(&self->allocator); + g_clear_pointer(&self->device, gst_aja_ntv2_device_unref); + self->device_id = DEVICE_ID_INVALID; + + GST_DEBUG_OBJECT(self, "Closed device"); + + return TRUE; +} + +static gboolean gst_aja_sink_start(GstAjaSink *self) { + GST_DEBUG_OBJECT(self, "Starting"); + self->output_thread = new AJAThread(); + self->output_thread->Attach(output_thread_func, self); + self->output_thread->SetPriority(AJA_ThreadPriority_High); + self->output_thread->Start(); + g_mutex_lock(&self->queue_lock); + self->shutdown = FALSE; + self->playing = FALSE; + self->eos = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + return TRUE; +} + +static gboolean gst_aja_sink_stop(GstAjaSink *self) { + QueueItem *item; + + GST_DEBUG_OBJECT(self, "Stopping"); + + g_mutex_lock(&self->queue_lock); + self->shutdown = TRUE; + self->playing = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + if (self->output_thread) { + self->output_thread->Stop(); + delete self->output_thread; + self->output_thread = NULL; + } + + GST_OBJECT_LOCK(self); + gst_clear_caps(&self->configured_caps); + self->configured_audio_channels = 0; + GST_OBJECT_UNLOCK(self); + + while ((item = (QueueItem *)gst_queue_array_pop_head_struct(self->queue))) { + if (item->type == QUEUE_ITEM_TYPE_FRAME) { + gst_buffer_unmap(item->video_buffer, &item->video_map); + gst_buffer_unref(item->video_buffer); + + if (item->audio_buffer) { + gst_buffer_unmap(item->audio_buffer, &item->audio_map); + gst_buffer_unref(item->audio_buffer); + } + if (item->anc_buffer) { + gst_buffer_unmap(item->anc_buffer, &item->anc_map); + gst_buffer_unref(item->anc_buffer); + } + if (item->anc_buffer2) { + gst_buffer_unmap(item->anc_buffer2, &item->anc_map2); + gst_buffer_unref(item->anc_buffer2); + } + } + } + + if (self->buffer_pool) { + gst_buffer_pool_set_active(self->buffer_pool, FALSE); + gst_clear_object(&self->buffer_pool); + } + + if (self->audio_buffer_pool) { + gst_buffer_pool_set_active(self->audio_buffer_pool, FALSE); + gst_clear_object(&self->audio_buffer_pool); + } + + if (self->anc_buffer_pool) { + gst_buffer_pool_set_active(self->anc_buffer_pool, FALSE); + gst_clear_object(&self->anc_buffer_pool); + } + + if (self->tc_indexes) { + delete self->tc_indexes; + self->tc_indexes = NULL; + } + + GST_DEBUG_OBJECT(self, "Stopped"); + + return TRUE; +} + +static GstStateChangeReturn gst_aja_sink_change_state( + GstElement *element, GstStateChange transition) { + GstAjaSink *self = GST_AJA_SINK(element); + GstStateChangeReturn ret; + + switch (transition) { + case GST_STATE_CHANGE_NULL_TO_READY: + if (!gst_aja_sink_open(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_READY_TO_PAUSED: + if (!gst_aja_sink_start(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + break; + default: + break; + } + + ret = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition); + if (ret == GST_STATE_CHANGE_FAILURE) return ret; + + switch (transition) { + case GST_STATE_CHANGE_PLAYING_TO_PAUSED: + g_mutex_lock(&self->queue_lock); + self->playing = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + g_mutex_lock(&self->queue_lock); + self->playing = TRUE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + if (!gst_aja_sink_stop(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_READY_TO_NULL: + if (!gst_aja_sink_close(self)) return GST_STATE_CHANGE_FAILURE; + break; + default: + break; + } + + return ret; +} + +static gboolean gst_aja_sink_set_caps(GstBaseSink *bsink, GstCaps *caps) { + GstAjaSink *self = GST_AJA_SINK(bsink); + const GstStructure *s; + NTV2VideoFormat video_format = ::NTV2_FORMAT_UNKNOWN; + + GST_DEBUG_OBJECT(self, "Configuring caps %" GST_PTR_FORMAT, caps); + + GST_OBJECT_LOCK(self); + if (self->configured_caps) { + if (!gst_caps_can_intersect(self->configured_caps, caps)) { + GST_DEBUG_OBJECT(self, "Need to reconfigure, waiting for draining"); + GST_OBJECT_UNLOCK(self); + g_mutex_lock(&self->queue_lock); + self->draining = TRUE; + g_cond_signal(&self->queue_cond); + while (self->draining && !self->flushing && !self->shutdown) { + g_cond_wait(&self->drain_cond, &self->queue_lock); + } + + if (self->flushing || self->shutdown) { + g_mutex_unlock(&self->queue_lock); + GST_DEBUG_OBJECT(self, "Flushing"); + return FALSE; + } + g_mutex_unlock(&self->queue_lock); + GST_OBJECT_LOCK(self); + } else { + GST_OBJECT_UNLOCK(self); + GST_DEBUG_OBJECT(self, + "Compatible caps with previous caps, not reconfiguring"); + return TRUE; + } + } + + if (!gst_video_info_from_caps(&self->configured_info, caps)) { + GST_OBJECT_UNLOCK(self); + GST_FIXME_OBJECT(self, "Failed to parse caps"); + return FALSE; + } + + self->configured_audio_channels = 0; + s = gst_caps_get_structure(caps, 0); + gst_structure_get_int(s, "audio-channels", &self->configured_audio_channels); + + gst_caps_replace(&self->configured_caps, caps); + GST_OBJECT_UNLOCK(self); + + bool quad_mode = (self->sdi_mode != GST_AJA_SDI_MODE_SINGLE_LINK); + video_format = gst_ntv2_video_format_from_caps(caps, quad_mode); + if (video_format == NTV2_FORMAT_UNKNOWN) { + GST_ERROR_OBJECT(self, "Unsupported caps %" GST_PTR_FORMAT, caps); + return FALSE; + } + + if (quad_mode) { + if (self->output_destination != GST_AJA_OUTPUT_DESTINATION_AUTO) { + GST_ERROR_OBJECT(self, + "Quad modes require usage of the channel's default " + "output destination"); + return FALSE; + } + + if (self->channel != ::NTV2_CHANNEL1 && self->channel != ::NTV2_CHANNEL5) { + GST_ERROR_OBJECT(self, "Quad modes require channels 1 or 5"); + return FALSE; + } + } + + self->quad_mode = quad_mode; + self->video_format = video_format; + + g_assert(self->device != NULL); + + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + if (!::NTV2DeviceCanDoVideoFormat(self->device_id, video_format)) { + GST_ERROR_OBJECT(self, "Device does not support mode %d", + (int)video_format); + return FALSE; + } + + self->device->device->SetMode(self->channel, NTV2_MODE_DISPLAY, false); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetMode((NTV2Channel)(self->channel + i), + NTV2_MODE_DISPLAY, false); + } + + GST_DEBUG_OBJECT(self, "Configuring video format %d on channel %d", + (int)video_format, (int)self->channel); + self->device->device->SetVideoFormat(video_format, false, false, + self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetVideoFormat(video_format, false, false, + (NTV2Channel)(self->channel + i)); + } + + if (!::NTV2DeviceCanDoFrameBufferFormat(self->device_id, + ::NTV2_FBF_10BIT_YCBCR)) { + GST_ERROR_OBJECT(self, "Device does not support frame buffer format %d", + (int)::NTV2_FBF_10BIT_YCBCR); + return FALSE; + } + self->device->device->SetFrameBufferFormat(self->channel, + ::NTV2_FBF_10BIT_YCBCR); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetFrameBufferFormat( + (NTV2Channel)(self->channel + i), ::NTV2_FBF_10BIT_YCBCR); + } + + NTV2ReferenceSource reference_source; + switch (self->reference_source) { + case GST_AJA_REFERENCE_SOURCE_EXTERNAL: + reference_source = ::NTV2_REFERENCE_EXTERNAL; + break; + case GST_AJA_REFERENCE_SOURCE_FREERUN: + case GST_AJA_REFERENCE_SOURCE_AUTO: + reference_source = ::NTV2_REFERENCE_FREERUN; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_1: + reference_source = ::NTV2_REFERENCE_INPUT1; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_2: + reference_source = ::NTV2_REFERENCE_INPUT2; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_3: + reference_source = ::NTV2_REFERENCE_INPUT3; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_4: + reference_source = ::NTV2_REFERENCE_INPUT4; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_5: + reference_source = ::NTV2_REFERENCE_INPUT5; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_6: + reference_source = ::NTV2_REFERENCE_INPUT6; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_7: + reference_source = ::NTV2_REFERENCE_INPUT7; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_8: + reference_source = ::NTV2_REFERENCE_INPUT8; + break; + default: + g_assert_not_reached(); + break; + } + GST_DEBUG_OBJECT(self, "Configuring reference source %d", + (int)reference_source); + self->device->device->SetFramePulseReference(reference_source); + + self->device->device->DMABufferAutoLock(false, true, 0); + + if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) + self->device->device->SetSDITransmitEnable(self->channel, true); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetSDITransmitEnable( + (NTV2Channel)(self->channel + i), true); + } + + if (self->configured_audio_channels) { + switch (self->audio_system_setting) { + case GST_AJA_AUDIO_SYSTEM_1: + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + break; + case GST_AJA_AUDIO_SYSTEM_2: + self->audio_system = ::NTV2_AUDIOSYSTEM_2; + break; + case GST_AJA_AUDIO_SYSTEM_3: + self->audio_system = ::NTV2_AUDIOSYSTEM_3; + break; + case GST_AJA_AUDIO_SYSTEM_4: + self->audio_system = ::NTV2_AUDIOSYSTEM_4; + break; + case GST_AJA_AUDIO_SYSTEM_5: + self->audio_system = ::NTV2_AUDIOSYSTEM_5; + break; + case GST_AJA_AUDIO_SYSTEM_6: + self->audio_system = ::NTV2_AUDIOSYSTEM_6; + break; + case GST_AJA_AUDIO_SYSTEM_7: + self->audio_system = ::NTV2_AUDIOSYSTEM_7; + break; + case GST_AJA_AUDIO_SYSTEM_8: + self->audio_system = ::NTV2_AUDIOSYSTEM_8; + break; + case GST_AJA_AUDIO_SYSTEM_AUTO: + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + if (::NTV2DeviceGetNumAudioSystems(self->device_id) > 1) + self->audio_system = ::NTV2ChannelToAudioSystem(self->channel); + if (!::NTV2DeviceCanDoFrameStore1Display(self->device_id)) + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + break; + default: + g_assert_not_reached(); + break; + } + + GST_DEBUG_OBJECT(self, "Using audio system %d", self->audio_system); + + self->device->device->SetNumberAudioChannels( + self->configured_audio_channels, self->audio_system); + self->device->device->SetAudioRate(::NTV2_AUDIO_48K, self->audio_system); + self->device->device->SetAudioBufferSize(::NTV2_AUDIO_BUFFER_BIG, + self->audio_system); + self->device->device->SetSDIOutputAudioSystem(self->channel, + self->audio_system); + self->device->device->SetSDIOutputDS2AudioSystem(self->channel, + self->audio_system); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->SetSDIOutputAudioSystem( + (NTV2Channel)(self->channel + i), self->audio_system); + self->device->device->SetSDIOutputDS2AudioSystem( + (NTV2Channel)(self->channel + i), self->audio_system); + } + } + self->device->device->SetAudioLoopBack(::NTV2_AUDIO_LOOPBACK_OFF, + self->audio_system); + } else { + self->audio_system = ::NTV2_AUDIOSYSTEM_INVALID; + } + + // Always use the framebuffer associated with the channel + NTV2OutputCrosspointID framebuffer_id = + ::GetFrameBufferOutputXptFromChannel(self->channel, false, false); + + NTV2VANCMode vanc_mode; + NTV2TCIndex tc_indexes_vitc[2] = {::NTV2_TCINDEX_INVALID, + ::NTV2_TCINDEX_INVALID}; + NTV2TCIndex tc_index_atc_ltc = ::NTV2_TCINDEX_INVALID; + NTV2InputCrosspointID output_destination_id; + switch (self->output_destination) { + case GST_AJA_OUTPUT_DESTINATION_AUTO: + tc_indexes_vitc[0] = + ::NTV2ChannelToTimecodeIndex(self->channel, false, false); + tc_indexes_vitc[1] = + ::NTV2ChannelToTimecodeIndex(self->channel, false, true); + tc_index_atc_ltc = + ::NTV2ChannelToTimecodeIndex(self->channel, false, true); + output_destination_id = ::GetSDIOutputInputXpt(self->channel, false); + vanc_mode = ::NTV2DeviceCanDoCustomAnc(self->device_id) + ? ::NTV2_VANCMODE_OFF + : ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI1: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI1; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI1_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI1_LTC; + output_destination_id = ::NTV2_XptSDIOut1Input; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI2: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI2; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI2_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI2_LTC; + output_destination_id = ::NTV2_XptSDIOut2Input; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI3: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI3; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI3_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI3_LTC; + output_destination_id = ::NTV2_XptSDIOut3Input; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI4: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI4; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI4_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI4_LTC; + output_destination_id = ::NTV2_XptSDIOut4Input; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI5: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI5; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI5_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI5_LTC; + output_destination_id = ::NTV2_XptSDIOut5Input; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI6: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI6; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI6_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI6_LTC; + output_destination_id = ::NTV2_XptSDIOut6Input; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI7: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI7; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI7_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI7_LTC; + output_destination_id = ::NTV2_XptSDIOut7Input; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_OUTPUT_DESTINATION_SDI8: + tc_indexes_vitc[0] = ::NTV2_TCINDEX_SDI8; + tc_indexes_vitc[1] = ::NTV2_TCINDEX_SDI8_2; + tc_index_atc_ltc = ::NTV2_TCINDEX_SDI8_LTC; + output_destination_id = ::NTV2_XptSDIOut8Input; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_OUTPUT_DESTINATION_ANALOG: + output_destination_id = ::NTV2_XptAnalogOutInput; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_OUTPUT_DESTINATION_HDMI: + output_destination_id = ::NTV2_XptHDMIOutInput; + vanc_mode = ::NTV2_VANCMODE_OFF; + break; + default: + g_assert_not_reached(); + break; + } + + if (!self->tc_indexes) self->tc_indexes = new NTV2TCIndexes; + + switch (self->timecode_index) { + case GST_AJA_TIMECODE_INDEX_VITC: + self->tc_indexes->insert(tc_indexes_vitc[0]); + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) + self->tc_indexes->insert(tc_indexes_vitc[1]); + break; + case GST_AJA_TIMECODE_INDEX_ATC_LTC: + self->tc_indexes->insert(tc_index_atc_ltc); + break; + case GST_AJA_TIMECODE_INDEX_LTC1: + self->tc_indexes->insert(::NTV2_TCINDEX_LTC1); + break; + case GST_AJA_TIMECODE_INDEX_LTC2: + self->tc_indexes->insert(::NTV2_TCINDEX_LTC2); + break; + default: + g_assert_not_reached(); + break; + } + + const NTV2Standard standard(::GetNTV2StandardFromVideoFormat(video_format)); + self->device->device->SetSDIOutputStandard(self->channel, standard); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetSDIOutputStandard( + (NTV2Channel)(self->channel + i), standard); + } + const NTV2FrameGeometry geometry = + ::GetNTV2FrameGeometryFromVideoFormat(video_format); + + self->vanc_mode = + ::HasVANCGeometries(geometry) ? vanc_mode : ::NTV2_VANCMODE_OFF; + if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { + self->device->device->SetFrameGeometry(geometry, false, self->channel); + self->device->device->SetVANCMode(self->vanc_mode, self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->SetFrameGeometry( + geometry, false, (NTV2Channel)(self->channel + i)); + self->device->device->SetVANCMode(self->vanc_mode, + (NTV2Channel)(self->channel + i)); + } + } + } else { + const NTV2FrameGeometry vanc_geometry = + ::GetVANCFrameGeometry(geometry, self->vanc_mode); + + self->device->device->SetFrameGeometry(vanc_geometry, false, self->channel); + self->device->device->SetVANCMode(self->vanc_mode, self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->SetFrameGeometry( + vanc_geometry, false, (NTV2Channel)(self->channel + i)); + self->device->device->SetVANCMode(self->vanc_mode, + (NTV2Channel)(self->channel + i)); + } + } + } + + bool had_quad_enabled = false, had_quad_quad_enabled = false; + + if (self->channel < ::NTV2_CHANNEL5) { + self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL1); + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL1); + } else { + self->device->device->GetQuadFrameEnable(had_quad_enabled, ::NTV2_CHANNEL5); + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL5); + } + + if (self->quad_mode) { + switch (self->sdi_mode) { + case GST_AJA_SDI_MODE_SINGLE_LINK: + g_assert_not_reached(); + break; + case GST_AJA_SDI_MODE_QUAD_LINK_SQD: + if (self->configured_info.height > 2160) { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(true, self->channel); + self->device->device->SetQuadQuadSquaresEnable(true, self->channel); + } else { + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + self->device->device->Set4kSquaresEnable(true, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + } + break; + case GST_AJA_SDI_MODE_QUAD_LINK_TSI: + if (self->configured_info.height > 2160) { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(true, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + } else { + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(true, self->channel); + } + break; + } + } else { + NTV2Channel quad_channel; + + if (self->channel < ::NTV2_CHANNEL5) + quad_channel = ::NTV2_CHANNEL1; + else + quad_channel = ::NTV2_CHANNEL5; + + self->device->device->Set4kSquaresEnable(false, quad_channel); + self->device->device->SetTsiFrameEnable(false, quad_channel); + self->device->device->SetQuadQuadFrameEnable(false, quad_channel); + self->device->device->SetQuadQuadSquaresEnable(false, quad_channel); + } + + NTV2SmpteLineNumber smpte_line_num_info = ::GetSmpteLineNumber(standard); + self->f2_start_line = + (smpte_line_num_info.GetLastLine( + smpte_line_num_info.firstFieldTop ? NTV2_FIELD0 : NTV2_FIELD1) + + 1); + + CNTV2SignalRouter router; + + // If any channels are currently running, initialize the router with the + // existing routing setup. Otherwise overwrite the whole routing table. + { + bool have_channels_running = false; + + for (NTV2Channel c = ::NTV2_CHANNEL1; c < NTV2_MAX_NUM_CHANNELS; + c = (NTV2Channel)(c + 1)) { + AUTOCIRCULATE_STATUS ac_status; + + if (c == self->channel) continue; + + if (self->device->device->AutoCirculateGetStatus(c, ac_status) && + !ac_status.IsStopped()) { + have_channels_running = true; + break; + } + } + + if (have_channels_running) self->device->device->GetRouting(router); + } + + // Need to remove old routes for the output and framebuffer we're going to use + NTV2ActualConnections connections = router.GetConnections(); + + if (self->quad_mode) { + if (self->channel == NTV2_CHANNEL1) { + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == NTV2_XptSDIOut1Input || + iter->first == NTV2_XptSDIOut1InputDS2 || + iter->first == NTV2_XptSDIOut2Input || + iter->first == NTV2_XptSDIOut2InputDS2 || + iter->first == NTV2_XptSDIOut3Input || + iter->first == NTV2_XptSDIOut4Input || + iter->second == NTV2_Xpt425Mux1AYUV || + iter->second == NTV2_Xpt425Mux1BYUV || + iter->second == NTV2_Xpt425Mux2AYUV || + iter->second == NTV2_Xpt425Mux2BYUV || + iter->first == NTV2_Xpt425Mux1AInput || + iter->first == NTV2_Xpt425Mux1BInput || + iter->first == NTV2_Xpt425Mux2AInput || + iter->first == NTV2_Xpt425Mux2BInput || + iter->second == NTV2_XptFrameBuffer1YUV || + iter->second == NTV2_XptFrameBuffer2YUV || + iter->second == NTV2_XptFrameBuffer3YUV || + iter->second == NTV2_XptFrameBuffer4YUV || + iter->second == NTV2_XptFrameBuffer1_DS2YUV || + iter->second == NTV2_XptFrameBuffer2_DS2YUV || + iter->first == NTV2_XptSDIOut1Input || + iter->first == NTV2_XptSDIOut2Input || + iter->first == NTV2_XptSDIOut3Input || + iter->first == NTV2_XptSDIOut4Input) + router.RemoveConnection(iter->first, iter->second); + } + } else if (self->channel == NTV2_CHANNEL5) { + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == NTV2_XptSDIOut5Input || + iter->first == NTV2_XptSDIOut5InputDS2 || + iter->first == NTV2_XptSDIOut6Input || + iter->first == NTV2_XptSDIOut6InputDS2 || + iter->first == NTV2_XptSDIOut7Input || + iter->first == NTV2_XptSDIOut8Input || + iter->second == NTV2_Xpt425Mux3AYUV || + iter->second == NTV2_Xpt425Mux3BYUV || + iter->second == NTV2_Xpt425Mux4AYUV || + iter->second == NTV2_Xpt425Mux4BYUV || + iter->first == NTV2_Xpt425Mux3AInput || + iter->first == NTV2_Xpt425Mux3BInput || + iter->first == NTV2_Xpt425Mux4AInput || + iter->first == NTV2_Xpt425Mux4BInput || + iter->second == NTV2_XptFrameBuffer5YUV || + iter->second == NTV2_XptFrameBuffer6YUV || + iter->second == NTV2_XptFrameBuffer7YUV || + iter->second == NTV2_XptFrameBuffer8YUV || + iter->second == NTV2_XptFrameBuffer3_DS2YUV || + iter->second == NTV2_XptFrameBuffer4_DS2YUV || + iter->second == NTV2_XptFrameBuffer5_DS2YUV || + iter->second == NTV2_XptFrameBuffer6_DS2YUV || + iter->first == NTV2_XptSDIOut5Input || + iter->first == NTV2_XptSDIOut6Input || + iter->first == NTV2_XptSDIOut7Input || + iter->first == NTV2_XptSDIOut8Input) + router.RemoveConnection(iter->first, iter->second); + } + } else { + g_assert_not_reached(); + } + } else { + // This also removes all connections for any previous quad mode on the + // corresponding channels. + + NTV2InputCrosspointID quad_output_source_ids[10]; + + if (output_destination_id == NTV2_XptSDIOut1Input || + output_destination_id == NTV2_XptSDIOut2Input || + output_destination_id == NTV2_XptSDIOut3Input || + output_destination_id == NTV2_XptSDIOut4Input) { + quad_output_source_ids[0] = NTV2_XptSDIOut1Input; + quad_output_source_ids[1] = NTV2_XptSDIOut2Input; + quad_output_source_ids[2] = NTV2_XptSDIOut3Input; + quad_output_source_ids[3] = NTV2_XptSDIOut4Input; + quad_output_source_ids[4] = NTV2_XptSDIOut1InputDS2; + quad_output_source_ids[5] = NTV2_XptSDIOut2InputDS2; + quad_output_source_ids[6] = NTV2_Xpt425Mux1AInput; + quad_output_source_ids[7] = NTV2_Xpt425Mux1BInput; + quad_output_source_ids[8] = NTV2_Xpt425Mux2AInput; + quad_output_source_ids[9] = NTV2_Xpt425Mux2BInput; + } else if (output_destination_id == NTV2_XptSDIOut5Input || + output_destination_id == NTV2_XptSDIOut6Input || + output_destination_id == NTV2_XptSDIOut7Input || + output_destination_id == NTV2_XptSDIOut8Input) { + quad_output_source_ids[0] = NTV2_XptSDIOut5Input; + quad_output_source_ids[1] = NTV2_XptSDIOut6Input; + quad_output_source_ids[2] = NTV2_XptSDIOut7Input; + quad_output_source_ids[3] = NTV2_XptSDIOut8Input; + quad_output_source_ids[4] = NTV2_XptSDIOut5InputDS2; + quad_output_source_ids[5] = NTV2_XptSDIOut6InputDS2; + quad_output_source_ids[6] = NTV2_Xpt425Mux3AInput; + quad_output_source_ids[7] = NTV2_Xpt425Mux3BInput; + quad_output_source_ids[8] = NTV2_Xpt425Mux4AInput; + quad_output_source_ids[9] = NTV2_Xpt425Mux4BInput; + } + + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (had_quad_enabled || had_quad_quad_enabled) { + for (auto quad_output_source_id : quad_output_source_ids) { + if (iter->first == quad_output_source_id) + router.RemoveConnection(iter->first, iter->second); + } + } else { + if (iter->first == output_destination_id || + iter->second == framebuffer_id) + router.RemoveConnection(iter->first, iter->second); + } + } + } + + if (self->quad_mode) { + if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI && + !NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format) && + !NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) + framebuffer_id = NTV2_Xpt425Mux1AYUV; + else if (self->channel == NTV2_CHANNEL5) + framebuffer_id = NTV2_Xpt425Mux3AYUV; + else + g_assert_not_reached(); + } + } + + GST_DEBUG_OBJECT(self, "Creating connection %d - %d", output_destination_id, + framebuffer_id); + router.AddConnection(output_destination_id, framebuffer_id); + + if (self->quad_mode) { + if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI) { + if (NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptSDIOut2Input, + NTV2_XptFrameBuffer1_DS2YUV); + router.AddConnection(NTV2_XptSDIOut3Input, NTV2_XptFrameBuffer2YUV); + router.AddConnection(NTV2_XptSDIOut4Input, + NTV2_XptFrameBuffer2_DS2YUV); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptSDIOut6Input, + NTV2_XptFrameBuffer3_DS2YUV); + router.AddConnection(NTV2_XptSDIOut7Input, NTV2_XptFrameBuffer4YUV); + router.AddConnection(NTV2_XptSDIOut8Input, + NTV2_XptFrameBuffer4_DS2YUV); + } else { + g_assert_not_reached(); + } + } else if (NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptSDIOut1InputDS2, + NTV2_XptFrameBuffer1_DS2YUV); + router.AddConnection(NTV2_XptSDIOut2Input, NTV2_XptFrameBuffer2YUV); + router.AddConnection(NTV2_XptSDIOut2InputDS2, + NTV2_XptFrameBuffer2_DS2YUV); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptSDIOut5InputDS2, + NTV2_XptFrameBuffer3_DS2YUV); + router.AddConnection(NTV2_XptSDIOut6Input, NTV2_XptFrameBuffer4YUV); + router.AddConnection(NTV2_XptSDIOut6InputDS2, + NTV2_XptFrameBuffer4_DS2YUV); + } else { + g_assert_not_reached(); + } + } else if (NTV2_IS_4K_HFR_VIDEO_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptSDIOut2Input, NTV2_Xpt425Mux1BYUV); + router.AddConnection(NTV2_XptSDIOut3Input, NTV2_Xpt425Mux2AYUV); + router.AddConnection(NTV2_XptSDIOut4Input, NTV2_Xpt425Mux2BYUV); + + router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptFrameBuffer1YUV); + router.AddConnection(NTV2_Xpt425Mux1BInput, + NTV2_XptFrameBuffer1_DS2YUV); + router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptFrameBuffer2YUV); + router.AddConnection(NTV2_Xpt425Mux2BInput, + NTV2_XptFrameBuffer2_DS2YUV); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptSDIOut6Input, NTV2_Xpt425Mux3BYUV); + router.AddConnection(NTV2_XptSDIOut7Input, NTV2_Xpt425Mux4AYUV); + router.AddConnection(NTV2_XptSDIOut8Input, NTV2_Xpt425Mux4BYUV); + + router.AddConnection(NTV2_Xpt425Mux3AInput, NTV2_XptFrameBuffer5YUV); + router.AddConnection(NTV2_Xpt425Mux3BInput, + NTV2_XptFrameBuffer5_DS2YUV); + router.AddConnection(NTV2_Xpt425Mux4AInput, NTV2_XptFrameBuffer6YUV); + router.AddConnection(NTV2_Xpt425Mux4BInput, + NTV2_XptFrameBuffer6_DS2YUV); + } else { + g_assert_not_reached(); + } + } else { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptSDIOut1InputDS2, NTV2_Xpt425Mux1BYUV); + router.AddConnection(NTV2_XptSDIOut2Input, NTV2_Xpt425Mux2AYUV); + router.AddConnection(NTV2_XptSDIOut2InputDS2, NTV2_Xpt425Mux2BYUV); + + router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptFrameBuffer1YUV); + router.AddConnection(NTV2_Xpt425Mux1BInput, + NTV2_XptFrameBuffer1_DS2YUV); + router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptFrameBuffer2YUV); + router.AddConnection(NTV2_Xpt425Mux2BInput, + NTV2_XptFrameBuffer2_DS2YUV); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptSDIOut5InputDS2, NTV2_Xpt425Mux3BYUV); + router.AddConnection(NTV2_XptSDIOut6Input, NTV2_Xpt425Mux4AYUV); + router.AddConnection(NTV2_XptSDIOut6InputDS2, NTV2_Xpt425Mux4BYUV); + + router.AddConnection(NTV2_Xpt425Mux3AInput, NTV2_XptFrameBuffer5YUV); + router.AddConnection(NTV2_Xpt425Mux3BInput, + NTV2_XptFrameBuffer5_DS2YUV); + router.AddConnection(NTV2_Xpt425Mux4AInput, NTV2_XptFrameBuffer6YUV); + router.AddConnection(NTV2_Xpt425Mux4BInput, + NTV2_XptFrameBuffer6_DS2YUV); + } else { + g_assert_not_reached(); + } + } + } else if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_SQD) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptSDIOut2Input, NTV2_XptFrameBuffer2YUV); + router.AddConnection(NTV2_XptSDIOut3Input, NTV2_XptFrameBuffer3YUV); + router.AddConnection(NTV2_XptSDIOut4Input, NTV2_XptFrameBuffer4YUV); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptSDIOut6Input, NTV2_XptFrameBuffer6YUV); + router.AddConnection(NTV2_XptSDIOut7Input, NTV2_XptFrameBuffer7YUV); + router.AddConnection(NTV2_XptSDIOut8Input, NTV2_XptFrameBuffer8YUV); + } else { + g_assert_not_reached(); + } + } + } + + { + std::stringstream os; + CNTV2SignalRouter oldRouter; + self->device->device->GetRouting(oldRouter); + oldRouter.Print(os); + GST_DEBUG_OBJECT(self, "Previous routing:\n%s", os.str().c_str()); + } + self->device->device->ApplySignalRoute(router, true); + { + std::stringstream os; + CNTV2SignalRouter currentRouter; + self->device->device->GetRouting(currentRouter); + currentRouter.Print(os); + GST_DEBUG_OBJECT(self, "New routing:\n%s", os.str().c_str()); + } + + return TRUE; +} + +static GstCaps *gst_aja_sink_get_caps(GstBaseSink *bsink, GstCaps *filter) { + GstAjaSink *self = GST_AJA_SINK(bsink); + GstCaps *caps; + + if (self->device) { + caps = gst_ntv2_supported_caps(self->device_id); + } else { + caps = gst_pad_get_pad_template_caps(GST_BASE_SINK_PAD(self)); + } + + if (filter) { + GstCaps *tmp = + gst_caps_intersect_full(filter, caps, GST_CAPS_INTERSECT_FIRST); + gst_caps_unref(caps); + caps = tmp; + } + + return caps; +} + +static gboolean gst_aja_sink_event(GstBaseSink *bsink, GstEvent *event) { + GstAjaSink *self = GST_AJA_SINK(bsink); + + switch (GST_EVENT_TYPE(event)) { + case GST_EVENT_EOS: { + GST_DEBUG_OBJECT(self, "Signalling EOS"); + + g_mutex_lock(&self->queue_lock); + self->eos = TRUE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + break; + } + case GST_EVENT_FLUSH_START: { + g_mutex_lock(&self->queue_lock); + self->flushing = TRUE; + self->draining = FALSE; + g_cond_signal(&self->drain_cond); + g_mutex_unlock(&self->queue_lock); + break; + } + case GST_EVENT_FLUSH_STOP: { + QueueItem *item; + + g_mutex_lock(&self->queue_lock); + while ( + (item = (QueueItem *)gst_queue_array_pop_head_struct(self->queue))) { + if (item->type == QUEUE_ITEM_TYPE_FRAME) { + gst_buffer_unmap(item->video_buffer, &item->video_map); + gst_buffer_unref(item->video_buffer); + + if (item->audio_buffer) { + gst_buffer_unmap(item->audio_buffer, &item->audio_map); + gst_buffer_unref(item->audio_buffer); + } + + if (item->anc_buffer) { + gst_buffer_unmap(item->anc_buffer, &item->anc_map); + gst_buffer_unref(item->anc_buffer); + } + if (item->anc_buffer2) { + gst_buffer_unmap(item->anc_buffer2, &item->anc_map2); + gst_buffer_unref(item->anc_buffer2); + } + } + } + g_cond_signal(&self->queue_cond); + + self->flushing = FALSE; + g_cond_signal(&self->drain_cond); + g_mutex_unlock(&self->queue_lock); + break; + } + default: + break; + } + + return GST_BASE_SINK_CLASS(parent_class)->event(bsink, event); +} + +static gboolean gst_aja_sink_propose_allocation(GstBaseSink *bsink, + GstQuery *query) { + GstAjaSink *self = GST_AJA_SINK(bsink); + + if (self->allocator && self->vanc_mode == ::NTV2_VANCMODE_OFF) { + GstAllocationParams params; + + gst_allocation_params_init(¶ms); + params.prefix = 0; + params.padding = 0; + params.align = 4095; + + gst_query_add_allocation_param(query, self->allocator, ¶ms); + } + + return TRUE; +} + +static GstFlowReturn gst_aja_sink_render(GstBaseSink *bsink, + GstBuffer *buffer) { + GstAjaSink *self = GST_AJA_SINK(bsink); + GstFlowReturn flow_ret = GST_FLOW_OK; + GstAjaAudioMeta *meta; + GstBuffer *item_buffer = NULL, *item_audio_buffer = NULL; + GstVideoTimeCodeMeta *tc_meta; + QueueItem item = { + .type = QUEUE_ITEM_TYPE_FRAME, + .video_buffer = NULL, + .video_map = GST_MAP_INFO_INIT, + .audio_buffer = NULL, + .audio_map = GST_MAP_INFO_INIT, + .tc = NTV2_RP188(), + .anc_buffer = NULL, + .anc_map = GST_MAP_INFO_INIT, + .anc_buffer2 = NULL, + .anc_map2 = GST_MAP_INFO_INIT, + }; + + guint video_buffer_size = ::GetVideoActiveSize( + self->video_format, ::NTV2_FBF_10BIT_YCBCR, self->vanc_mode); + NTV2FormatDescriptor format_desc(self->video_format, ::NTV2_FBF_10BIT_YCBCR, + self->vanc_mode); + + meta = gst_buffer_get_aja_audio_meta(buffer); + tc_meta = gst_buffer_get_video_time_code_meta(buffer); + + if (self->vanc_mode == ::NTV2_VANCMODE_OFF && + gst_buffer_n_memory(buffer) == 1) { + GstMemory *mem = gst_buffer_peek_memory(buffer, 0); + gsize offset; + + if (gst_memory_get_sizes(mem, &offset, NULL) == video_buffer_size && + offset == 0 && + strcmp(mem->allocator->mem_type, GST_AJA_ALLOCATOR_MEMTYPE) == 0 && + GST_AJA_ALLOCATOR(mem->allocator)->device->device->GetIndexNumber() == + self->device->device->GetIndexNumber()) { + item_buffer = gst_buffer_ref(buffer); + } + } + + if (!item_buffer) { + GstVideoFrame in_frame; + + GST_DEBUG_OBJECT(self, "Allocating new video buffer"); + + if (!self->buffer_pool) { + self->buffer_pool = gst_buffer_pool_new(); + GstStructure *config = gst_buffer_pool_get_config(self->buffer_pool); + gst_buffer_pool_config_set_params(config, NULL, video_buffer_size, + self->queue_size, 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->buffer_pool, config); + gst_buffer_pool_set_active(self->buffer_pool, TRUE); + } + + if (!gst_video_frame_map(&in_frame, &self->configured_info, buffer, + GST_MAP_READ)) { + GST_ERROR_OBJECT(self, "Failed to map buffer"); + return GST_FLOW_ERROR; + } + + flow_ret = + gst_buffer_pool_acquire_buffer(self->buffer_pool, &item_buffer, NULL); + if (flow_ret != GST_FLOW_OK) { + gst_video_frame_unmap(&in_frame); + return flow_ret; + } + + item.type = QUEUE_ITEM_TYPE_FRAME; + + item.video_buffer = item_buffer; + gst_buffer_map(item.video_buffer, &item.video_map, GST_MAP_WRITE); + + guint offset = + format_desc.RasterLineToByteOffset(format_desc.GetFirstActiveLine()); + guint size = format_desc.GetVisibleRasterBytes(); + + if (offset != 0) + ::SetRasterLinesBlack(::NTV2_FBF_10BIT_YCBCR, item.video_map.data, + format_desc.GetBytesPerRow(), + format_desc.GetFirstActiveLine()); + memcpy(item.video_map.data + offset, + GST_VIDEO_FRAME_PLANE_DATA(&in_frame, 0), size); + + gst_video_frame_unmap(&in_frame); + } else { + item.type = QUEUE_ITEM_TYPE_FRAME; + + item.video_buffer = item_buffer; + gst_buffer_map(item.video_buffer, &item.video_map, GST_MAP_READ); + } + + if (meta) { + if (gst_buffer_n_memory(meta->buffer) == 1) { + GstMemory *mem = gst_buffer_peek_memory(meta->buffer, 0); + + if (strcmp(mem->allocator->mem_type, GST_AJA_ALLOCATOR_MEMTYPE) == 0 && + GST_AJA_ALLOCATOR(mem->allocator)->device->device->GetIndexNumber() == + self->device->device->GetIndexNumber()) { + item_audio_buffer = gst_buffer_ref(meta->buffer); + } + } + + if (!item_audio_buffer) { + GstMapInfo audio_map; + + GST_DEBUG_OBJECT(self, "Allocating new audio buffer"); + + if (!self->audio_buffer_pool) { + guint audio_buffer_size = 1UL * 1024UL * 1024UL; + + self->audio_buffer_pool = gst_buffer_pool_new(); + GstStructure *config = + gst_buffer_pool_get_config(self->audio_buffer_pool); + gst_buffer_pool_config_set_params(config, NULL, audio_buffer_size, + self->queue_size, 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->audio_buffer_pool, config); + gst_buffer_pool_set_active(self->audio_buffer_pool, TRUE); + } + + flow_ret = gst_buffer_pool_acquire_buffer(self->audio_buffer_pool, + &item_audio_buffer, NULL); + if (flow_ret != GST_FLOW_OK) { + gst_buffer_unmap(item.video_buffer, &item.video_map); + gst_buffer_unref(item.video_buffer); + return flow_ret; + } + + gst_buffer_set_size(item_audio_buffer, gst_buffer_get_size(meta->buffer)); + + gst_buffer_map(meta->buffer, &audio_map, GST_MAP_READ); + gst_buffer_map(item_audio_buffer, &item.audio_map, GST_MAP_READWRITE); + memcpy(item.audio_map.data, audio_map.data, audio_map.size); + gst_buffer_unmap(meta->buffer, &audio_map); + item.audio_buffer = item_audio_buffer; + } else { + gst_buffer_map(item_audio_buffer, &item.audio_map, GST_MAP_READ); + item.audio_buffer = item_audio_buffer; + } + } else { + item.audio_buffer = NULL; + } + + if (tc_meta) { + TimecodeFormat tc_format = ::kTCFormatUnknown; + + if (tc_meta->tc.config.fps_n == 24 && tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat24fps; + } else if (tc_meta->tc.config.fps_n == 25 && + tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat25fps; + } else if (tc_meta->tc.config.fps_n == 30 && + tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat30fps; + } else if (tc_meta->tc.config.fps_n == 30000 && + tc_meta->tc.config.fps_d == 1001) { + tc_format = kTCFormat30fpsDF; + } else if (tc_meta->tc.config.fps_n == 48 && + tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat48fps; + } else if (tc_meta->tc.config.fps_n == 50 && + tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat50fps; + } else if (tc_meta->tc.config.fps_n == 60 && + tc_meta->tc.config.fps_d == 1) { + tc_format = kTCFormat60fps; + } else if (tc_meta->tc.config.fps_n == 60000 && + tc_meta->tc.config.fps_d == 1001) { + tc_format = kTCFormat60fpsDF; + } + + const CRP188 rp188(tc_meta->tc.frames, tc_meta->tc.seconds, + tc_meta->tc.minutes, tc_meta->tc.hours, tc_format); + rp188.GetRP188Reg(item.tc); + } else { + item.tc.fDBB = 0xffffffff; + } + + AJAAncillaryList anc_packet_list; + + // TODO: Handle AFD/Bar meta +#if 0 + if (bar_meta || afd_meta) { + const uint16_t kF1PktLineNumAFDBAR(11); + const AJAAncillaryDataLocation kAFDBARLocF1( + AJAAncillaryDataLink_A, AJAAncillaryDataVideoStream_Y, + AJAAncillaryDataSpace_VANC, kF1PktLineNumAFDBAR, + AJAAncDataHorizOffset_AnyVanc); + const uint16_t kF2PktLineNumAFDBAR(self->f2_start_line + 11); + const AJAAncillaryDataLocation kAFDBARLocF2( + AJAAncillaryDataLink_A, AJAAncillaryDataVideoStream_Y, + AJAAncillaryDataSpace_VANC, kF2PktLineNumAFDBAR, + AJAAncDataHorizOffset_AnyVanc); + + AJAAncillaryData pkt; + pkt.SetFromSMPTE334(NULL, 0, kAFDBARLocF1); + anc_packet_list.AddAncillaryData(pkt); + + if (self->configured_info.interlace_mode != GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { + AJAAncillaryData pkt2; + pkt.SetFromSMPTE334(NULL, 0, kAFDBARLocF2); + anc_packet_list.AddAncillaryData(pkt); + } + } +#endif + + GstVideoCaptionMeta *caption_meta; + gpointer iter = NULL; + while ( + (caption_meta = (GstVideoCaptionMeta *)gst_buffer_iterate_meta_filtered( + buffer, &iter, GST_VIDEO_CAPTION_META_API_TYPE))) { + if (caption_meta->caption_type == GST_VIDEO_CAPTION_TYPE_CEA708_CDP) { + if (self->cea708_line_number != -1) { + const AJAAncillaryDataLocation kCEA708LocF1( + AJAAncillaryDataLink_A, AJAAncillaryDataVideoStream_Y, + AJAAncillaryDataSpace_VANC, self->cea708_line_number, + AJAAncDataHorizOffset_AnyVanc); + + AJAAncillaryData_Cea708 pkt; + + pkt.SetDID(AJAAncillaryData_CEA708_DID); + pkt.SetSID(AJAAncillaryData_CEA708_SID); + pkt.SetDataLocation(kCEA708LocF1); + pkt.SetDataCoding(AJAAncillaryDataCoding_Digital); + pkt.SetPayloadData(caption_meta->data, caption_meta->size); + + GST_TRACE_OBJECT( + self, + "Adding CEA708 CDP VANC of %" G_GSIZE_FORMAT " bytes at line %u", + pkt.GetPayloadByteCount(), pkt.GetLocationLineNumber()); + + anc_packet_list.AddAncillaryData(pkt); + } + } else if (caption_meta->caption_type == + GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A) { + if (self->cea608_line_number != -1) { + const AJAAncillaryDataLocation kCEA608LocF1( + AJAAncillaryDataLink_A, AJAAncillaryDataVideoStream_Y, + AJAAncillaryDataSpace_VANC, self->cea608_line_number, + AJAAncDataHorizOffset_AnyVanc); + + AJAAncillaryData_Cea608_Vanc pkt; + + pkt.SetDID(AJAAncillaryData_Cea608_Vanc_DID); + pkt.SetSID(AJAAncillaryData_Cea608_Vanc_SID); + pkt.SetDataLocation(kCEA608LocF1); + pkt.SetDataCoding(AJAAncillaryDataCoding_Digital); + pkt.SetPayloadData(caption_meta->data, caption_meta->size); + pkt.ParsePayloadData(); + + GST_TRACE_OBJECT( + self, "Adding CEA608 VANC of %" G_GSIZE_FORMAT " bytes at line %u", + pkt.GetPayloadByteCount(), pkt.GetLocationLineNumber()); + + anc_packet_list.AddAncillaryData(pkt); + } + } else { + GST_WARNING_OBJECT(self, "Unhandled caption type %d", + caption_meta->caption_type); + } + } + + if (!anc_packet_list.IsEmpty()) { + if (self->vanc_mode == ::NTV2_VANCMODE_OFF && + ::NTV2DeviceCanDoCustomAnc(self->device_id)) { + if (!self->anc_buffer_pool) { + self->anc_buffer_pool = gst_buffer_pool_new(); + GstStructure *config = + gst_buffer_pool_get_config(self->anc_buffer_pool); + gst_buffer_pool_config_set_params( + config, NULL, 8 * 1024, + (self->configured_info.interlace_mode == + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE + ? 1 + : 2) * + self->queue_size, + 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->anc_buffer_pool, config); + gst_buffer_pool_set_active(self->anc_buffer_pool, TRUE); + } + + flow_ret = gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, + &item.anc_buffer, NULL); + if (flow_ret != GST_FLOW_OK) { + gst_buffer_unmap(item.video_buffer, &item.video_map); + gst_buffer_unref(item.video_buffer); + + if (item.audio_buffer) { + gst_buffer_unmap(item.audio_buffer, &item.audio_map); + gst_buffer_unref(item.audio_buffer); + } + + return flow_ret; + } + gst_buffer_map(item.anc_buffer, &item.anc_map, GST_MAP_READWRITE); + + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { + flow_ret = gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, + &item.anc_buffer2, NULL); + if (flow_ret != GST_FLOW_OK) { + gst_buffer_unmap(item.video_buffer, &item.video_map); + gst_buffer_unref(item.video_buffer); + + if (item.audio_buffer) { + gst_buffer_unmap(item.audio_buffer, &item.audio_map); + gst_buffer_unref(item.audio_buffer); + } + + if (item.anc_buffer) { + gst_buffer_unmap(item.anc_buffer, &item.anc_map); + gst_buffer_unref(item.anc_buffer); + } + + return flow_ret; + } + gst_buffer_map(item.anc_buffer2, &item.anc_map2, GST_MAP_READWRITE); + } + + NTV2_POINTER anc_ptr1(item.anc_map.data, item.anc_map.size); + NTV2_POINTER anc_ptr2(item.anc_map2.data, item.anc_map2.size); + + anc_ptr1.Fill(ULWord(0)); + anc_ptr2.Fill(ULWord(0)); + anc_packet_list.GetTransmitData(anc_ptr1, anc_ptr2, + self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE, + self->f2_start_line); + } else { + NTV2_POINTER ptr(item.video_map.data, item.video_map.size); + + // Work around bug in GetVANCTransmitData() for SD formats that + // truncates ADF packets that are not a multiple of 12 words long. + // + // See AJA SDK support ticket #4845. + if (format_desc.IsSDFormat()) { + guint32 n_vanc_packets = anc_packet_list.CountAncillaryData(); + for (guint32 i = 0; i < n_vanc_packets; i++) { + AJAAncillaryData *packet = anc_packet_list.GetAncillaryDataAtIndex(i); + + ULWord line_offset = 0; + if (!format_desc.GetLineOffsetFromSMPTELine( + packet->GetLocationLineNumber(), line_offset)) + continue; + + UWordSequence data; + if (packet->GenerateTransmitData(data) != AJA_STATUS_SUCCESS) + continue; + + // Pad to a multiple of 12 words + while (data.size() < 12 || data.size() % 12 != 0) + data.push_back(0x040); + ::YUVComponentsTo10BitYUVPackedBuffer(data, ptr, format_desc, + line_offset); + } + } else { + anc_packet_list.GetVANCTransmitData(ptr, format_desc); + } + } + } + + g_mutex_lock(&self->queue_lock); + while (gst_queue_array_get_length(self->queue) >= self->queue_size) { + QueueItem *tmp = (QueueItem *)gst_queue_array_pop_head_struct(self->queue); + + if (tmp->type == QUEUE_ITEM_TYPE_FRAME) { + GST_WARNING_OBJECT(self, "Element queue overrun, dropping old frame"); + + GstMessage *msg = gst_message_new_qos( + GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, + GST_BUFFER_PTS(tmp->video_buffer), + gst_util_uint64_scale(GST_SECOND, self->configured_info.fps_d, + self->configured_info.fps_n)); + gst_element_post_message(GST_ELEMENT_CAST(self), msg); + + gst_buffer_unmap(tmp->video_buffer, &tmp->video_map); + gst_buffer_unref(tmp->video_buffer); + if (tmp->audio_buffer) { + gst_buffer_unmap(tmp->audio_buffer, &tmp->audio_map); + gst_buffer_unref(tmp->audio_buffer); + } + if (tmp->anc_buffer) { + gst_buffer_unmap(tmp->anc_buffer, &tmp->anc_map); + gst_buffer_unref(tmp->anc_buffer); + } + if (tmp->anc_buffer2) { + gst_buffer_unmap(tmp->anc_buffer2, &tmp->anc_map2); + gst_buffer_unref(tmp->anc_buffer2); + } + } + } + + GST_TRACE_OBJECT(self, "Queuing frame video %p audio %p", item.video_map.data, + item.audio_buffer ? item.audio_map.data : NULL); + gst_queue_array_push_tail_struct(self->queue, &item); + GST_TRACE_OBJECT(self, "%u frames queued", + gst_queue_array_get_length(self->queue)); + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + return flow_ret; +} + +static void output_thread_func(AJAThread *thread, void *data) { + GstAjaSink *self = GST_AJA_SINK(data); + GstClock *clock = NULL; + guint64 frames_rendered_start = G_MAXUINT64; + GstClockTime frames_rendered_start_time = GST_CLOCK_TIME_NONE; + guint64 frames_dropped_last = G_MAXUINT64; + AUTOCIRCULATE_TRANSFER transfer; + + if (self->output_cpu_core != G_MAXUINT) { + cpu_set_t mask; + pthread_t current_thread = pthread_self(); + + CPU_ZERO(&mask); + CPU_SET(self->output_cpu_core, &mask); + + if (pthread_setaffinity_np(current_thread, sizeof(mask), &mask) != 0) { + GST_ERROR_OBJECT(self, + "Failed to set affinity for current thread to core %u", + self->output_cpu_core); + } + } + + g_mutex_lock(&self->queue_lock); +restart: + if (self->draining && gst_queue_array_get_length(self->queue) == 0) { + GST_DEBUG_OBJECT(self, "Drained"); + self->draining = FALSE; + g_cond_signal(&self->drain_cond); + } + + GST_DEBUG_OBJECT(self, "Waiting for playing or shutdown"); + while ((!self->playing && !self->shutdown) || + (self->playing && + gst_queue_array_get_length(self->queue) < self->queue_size / 2 && + !self->eos)) + g_cond_wait(&self->queue_cond, &self->queue_lock); + if (self->shutdown) { + GST_DEBUG_OBJECT(self, "Shutting down"); + g_mutex_unlock(&self->queue_lock); + return; + } + + GST_DEBUG_OBJECT(self, "Starting playing"); + g_mutex_unlock(&self->queue_lock); + + { + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + self->device->device->AutoCirculateStop(self->channel); + + if (!self->device->device->EnableChannel(self->channel)) { + GST_ERROR_OBJECT(self, "Failed to enable channel"); + g_mutex_lock(&self->queue_lock); + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to configure device")); + goto out; + } + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + if (!self->device->device->EnableChannel( + (NTV2Channel)(self->channel + i))) { + GST_ERROR_OBJECT(self, "Failed to enable channel"); + g_mutex_lock(&self->queue_lock); + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to configure device")); + goto out; + } + } + } + + self->device->device->EnableOutputInterrupt(self->channel); + self->device->device->SubscribeOutputVerticalEvent(self->channel); + + guint16 start_frame = self->start_frame; + guint16 end_frame = self->end_frame; + + // If both are the same, try to find queue_size/2 unallocated frames and + // use those. + if (start_frame == end_frame) { + guint16 num_frames = self->queue_size / 2; + + gint assigned_start_frame = gst_aja_ntv2_device_find_unallocated_frames( + self->device, self->channel, num_frames); + + if (assigned_start_frame == -1) { + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to allocate %u frames", num_frames)); + goto out; + } + + start_frame = assigned_start_frame; + end_frame = start_frame + num_frames - 1; + } + + GST_DEBUG_OBJECT( + self, "Configuring channel %u with start frame %u and end frame %u", + self->channel, start_frame, end_frame); + + // Configure render delay based on the framerate and queue size + gst_base_sink_set_render_delay( + GST_BASE_SINK(self), + gst_util_uint64_scale(end_frame - start_frame + 1, + self->configured_info.fps_d * GST_SECOND, + self->configured_info.fps_n)); + + if (!self->device->device->AutoCirculateInitForOutput( + self->channel, 0, self->audio_system, + (self->rp188 ? AUTOCIRCULATE_WITH_RP188 : 0) | + (self->vanc_mode == ::NTV2_VANCMODE_OFF ? AUTOCIRCULATE_WITH_ANC + : 0), + 1, start_frame, end_frame)) { + GST_ELEMENT_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to initialize autocirculate")); + goto out; + } + self->device->device->AutoCirculateStart(self->channel); + } + + gst_clear_object(&clock); + clock = gst_element_get_clock(GST_ELEMENT_CAST(self)); + frames_rendered_start = G_MAXUINT64; + frames_rendered_start_time = GST_CLOCK_TIME_NONE; + frames_dropped_last = G_MAXUINT64; + + g_mutex_lock(&self->queue_lock); + while (self->playing && !self->shutdown && + !(self->draining && gst_queue_array_get_length(self->queue) == 0)) { + AUTOCIRCULATE_STATUS status; + + self->device->device->AutoCirculateGetStatus(self->channel, status); + + GST_TRACE_OBJECT(self, + "Start frame %d " + "end frame %d " + "active frame %d " + "start time %" G_GUINT64_FORMAT + " " + "current time %" G_GUINT64_FORMAT + " " + "frames processed %u " + "frames dropped %u " + "buffer level %u", + status.acStartFrame, status.acEndFrame, + status.acActiveFrame, status.acRDTSCStartTime, + status.acRDTSCCurrentTime, status.acFramesProcessed, + status.acFramesDropped, status.acBufferLevel); + + // Detect if we were too slow with providing frames and report if that was + // the case together with the amount of frames dropped + if (frames_dropped_last == G_MAXUINT64) { + frames_dropped_last = status.acFramesDropped; + } else if (frames_dropped_last < status.acFramesDropped) { + GST_WARNING_OBJECT(self, "Dropped %" G_GUINT64_FORMAT " frames", + status.acFramesDropped - frames_dropped_last); + + GstClockTime timestamp = + gst_util_uint64_scale(status.acFramesProcessed + frames_dropped_last, + self->configured_info.fps_n, + self->configured_info.fps_d * GST_SECOND); + GstClockTime timestamp_end = gst_util_uint64_scale( + status.acFramesProcessed + status.acFramesDropped, + self->configured_info.fps_n, + self->configured_info.fps_d * GST_SECOND); + GstMessage *msg = gst_message_new_qos( + GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, + timestamp, timestamp_end - timestamp); + gst_element_post_message(GST_ELEMENT_CAST(self), msg); + + frames_dropped_last = status.acFramesDropped; + } + + if (status.GetNumAvailableOutputFrames() > 1) { + QueueItem item, *item_p; + + while ((item_p = (QueueItem *)gst_queue_array_pop_head_struct( + self->queue)) == NULL && + self->playing && !self->shutdown && !self->draining) { + GST_DEBUG_OBJECT( + self, + "Element queue underrun, waiting for more frames or shutdown"); + g_cond_wait(&self->queue_cond, &self->queue_lock); + } + + if (!self->playing || self->shutdown || (!item_p && self->draining)) { + if (item_p && item_p->type == QUEUE_ITEM_TYPE_FRAME) { + gst_buffer_unmap(item_p->video_buffer, &item_p->video_map); + gst_buffer_unref(item_p->video_buffer); + + if (item_p->audio_buffer) { + gst_buffer_unmap(item_p->audio_buffer, &item_p->audio_map); + gst_buffer_unref(item_p->audio_buffer); + } + + if (item_p->anc_buffer) { + gst_buffer_unmap(item_p->anc_buffer, &item_p->anc_map); + gst_buffer_unref(item_p->anc_buffer); + } + if (item_p->anc_buffer2) { + gst_buffer_unmap(item_p->anc_buffer2, &item_p->anc_map2); + gst_buffer_unref(item_p->anc_buffer2); + } + } + break; + } + + if (item_p && item_p->type != QUEUE_ITEM_TYPE_FRAME) { + continue; + } + + GST_TRACE_OBJECT(self, "%u frames queued", + gst_queue_array_get_length(self->queue)); + + item = *item_p; + g_mutex_unlock(&self->queue_lock); + + GST_TRACE_OBJECT(self, + "Transferring frame: " + "Video %p %" G_GSIZE_FORMAT + " " + "Audio %p %" G_GSIZE_FORMAT, + item.video_map.data, item.video_map.size, + item.audio_buffer ? item.audio_map.data : NULL, + item.audio_buffer ? item.audio_map.size : 0); + + // Set timecodes if provided by upstream + if (item.tc.IsValid() && item.tc.fDBB != 0xffffffff && self->tc_indexes) { + NTV2TimeCodes timecodes; + + for (const auto &tc_index : *self->tc_indexes) { + timecodes[tc_index] = item.tc; + } + transfer.SetOutputTimeCodes(timecodes); + } + + transfer.SetVideoBuffer((ULWord *)item.video_map.data, + item.video_map.size); + transfer.SetAudioBuffer((ULWord *)item.audio_map.data, + item.audio_map.size); + transfer.SetAncBuffers((ULWord *)item.anc_map.data, item.anc_map.size, + (ULWord *)item.anc_map2.data, item.anc_map2.size); + + if (!self->device->device->AutoCirculateTransfer(self->channel, + transfer)) { + GST_WARNING_OBJECT(self, "Failed to transfer frame"); + } + + gst_buffer_unmap(item.video_buffer, &item.video_map); + gst_buffer_unref(item.video_buffer); + + if (item.audio_buffer) { + gst_buffer_unmap(item.audio_buffer, &item.audio_map); + gst_buffer_unref(item.audio_buffer); + } + + if (item.anc_buffer) { + gst_buffer_unmap(item.anc_buffer, &item.anc_map); + gst_buffer_unref(item.anc_buffer); + } + + if (item.anc_buffer2) { + gst_buffer_unmap(item.anc_buffer2, &item.anc_map2); + gst_buffer_unref(item.anc_buffer2); + } + + GST_TRACE_OBJECT( + self, + "Transferred frame. " + "frame time %" GST_TIME_FORMAT + " " + "current frame %u " + "current frame time %" GST_TIME_FORMAT + " " + "frames processed %u " + "frames dropped %u " + "buffer level %u", + GST_TIME_ARGS(transfer.acTransferStatus.acFrameStamp.acFrameTime * + 100), + transfer.acTransferStatus.acFrameStamp.acCurrentFrame, + GST_TIME_ARGS( + transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime * 100), + transfer.acTransferStatus.acFramesProcessed, + transfer.acTransferStatus.acFramesDropped, + transfer.acTransferStatus.acBufferLevel); + + // Trivial drift calculation + // + // TODO: Should probably take averages over a timespan (say 1 minute) + // into a ringbuffer and calculate a linear regression over them + // FIXME: Add some compensation by dropping/duplicating frames as needed + // but make this configurable + if (frames_rendered_start_time == GST_CLOCK_TIME_NONE && + transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime != 0 && + transfer.acTransferStatus.acFramesProcessed + + transfer.acTransferStatus.acFramesDropped > + self->queue_size && + clock) { + frames_rendered_start = transfer.acTransferStatus.acFramesProcessed + + transfer.acTransferStatus.acFramesDropped; + + GstClockTime now_gst = gst_clock_get_time(clock); + GstClockTime now_sys = g_get_real_time() * 1000; + GstClockTime render_time = + transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime * 100; + + if (render_time < now_sys) { + frames_rendered_start_time = now_gst - (now_sys - render_time); + } + } + + if (clock && frames_rendered_start_time != GST_CLOCK_TIME_NONE) { + GstClockTime now_gst = gst_clock_get_time(clock); + GstClockTime now_sys = g_get_real_time() * 1000; + GstClockTime render_time = + transfer.acTransferStatus.acFrameStamp.acCurrentFrameTime * 100; + + GstClockTime sys_diff; + if (now_sys > render_time) { + sys_diff = now_sys - render_time; + } else { + sys_diff = 0; + } + + GstClockTime diff = now_gst - frames_rendered_start_time; + if (sys_diff < diff) diff -= sys_diff; + + guint64 frames_rendered = (transfer.acTransferStatus.acFramesProcessed + + transfer.acTransferStatus.acFramesDropped) - + frames_rendered_start; + guint64 frames_produced = + gst_util_uint64_scale(diff, self->configured_info.fps_n, + self->configured_info.fps_d * GST_SECOND); + gdouble fps_rendered = ((gdouble)frames_rendered * GST_SECOND) / diff; + + GST_TRACE_OBJECT(self, + "Frames rendered %" G_GUINT64_FORMAT + ", frames produced %" G_GUINT64_FORMAT + ", FPS rendered %lf", + frames_rendered, frames_produced, fps_rendered); + } + + g_mutex_lock(&self->queue_lock); + } else { + g_mutex_unlock(&self->queue_lock); + + self->device->device->WaitForOutputVerticalInterrupt(self->channel); + + g_mutex_lock(&self->queue_lock); + } + } + +out : { + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + self->device->device->AutoCirculateStop(self->channel); + self->device->device->UnsubscribeOutputVerticalEvent(self->channel); + self->device->device->DisableOutputInterrupt(self->channel); + + self->device->device->DisableChannel(self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->DisableChannel((NTV2Channel)(self->channel + i)); + } + } +} + + if ((!self->playing || self->draining) && !self->shutdown) goto restart; + g_mutex_unlock(&self->queue_lock); + + gst_clear_object(&clock); + + GST_DEBUG_OBJECT(self, "Stopped"); +} diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajasink.h b/subprojects/gst-plugins-bad/sys/aja/gstajasink.h new file mode 100644 index 0000000000..0e2aafc37a --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajasink.h @@ -0,0 +1,106 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include + +#include "gstajacommon.h" + +G_BEGIN_DECLS + +#define GST_TYPE_AJA_SINK (gst_aja_sink_get_type()) +#define GST_AJA_SINK(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_SINK, GstAjaSink)) +#define GST_AJA_SINK_CAST(obj) ((GstAjaSink *)obj) +#define GST_AJA_SINK_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AJA_SINK, GstAjaSinkClass)) +#define GST_IS_AJA_SINK(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AJA_SINK)) +#define GST_IS_AJA_SINK_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AJA_SINK)) + +typedef struct _GstAjaSink GstAjaSink; +typedef struct _GstAjaSinkClass GstAjaSinkClass; + +struct _GstAjaSink { + GstBaseSink parent; + + // Everything below protected by queue lock + GMutex queue_lock; + GCond queue_cond; + GstQueueArray *queue; + gboolean eos; + gboolean playing; + gboolean shutdown; + gboolean draining; + // Hold by set_caps() to wait until drained + GCond drain_cond; + gboolean flushing; + + GstAjaNtv2Device *device; + NTV2DeviceID device_id; + GstAllocator *allocator; + + // Only allocated on demand + GstBufferPool *buffer_pool; + GstBufferPool *audio_buffer_pool; + GstBufferPool *anc_buffer_pool; + + // Properties + gchar *device_identifier; + NTV2Channel channel; + guint queue_size; + guint start_frame, end_frame; + guint output_cpu_core; + + GstAjaAudioSystem audio_system_setting; + GstAjaOutputDestination output_destination; + GstAjaSdiMode sdi_mode; + GstAjaTimecodeIndex timecode_index; + gboolean rp188; + GstAjaReferenceSource reference_source; + + gint cea608_line_number; + gint cea708_line_number; + + NTV2AudioSystem audio_system; + NTV2VideoFormat video_format; + bool quad_mode; + NTV2VANCMode vanc_mode; + guint32 f2_start_line; + NTV2TCIndexes *tc_indexes; + + GstCaps *configured_caps; + GstVideoInfo configured_info; + gint configured_audio_channels; + + AJAThread *output_thread; +}; + +struct _GstAjaSinkClass { + GstBaseSinkClass parent_class; +}; + +G_GNUC_INTERNAL +GType gst_aja_sink_get_type(void); + +G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.cpp new file mode 100644 index 0000000000..122f895824 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.cpp @@ -0,0 +1,294 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstajacommon.h" +#include "gstajasinkcombiner.h" + +GST_DEBUG_CATEGORY_STATIC(gst_aja_sink_combiner_debug); +#define GST_CAT_DEFAULT gst_aja_sink_combiner_debug + +static GstStaticPadTemplate video_sink_template = GST_STATIC_PAD_TEMPLATE( + "video", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS("video/x-raw")); + +static GstStaticPadTemplate audio_sink_template = + GST_STATIC_PAD_TEMPLATE("audio", GST_PAD_SINK, GST_PAD_REQUEST, + GST_STATIC_CAPS("audio/x-raw, " + "format = (string) S32LE, " + "rate = (int) 48000, " + "channels = (int) [ 1, 16 ], " + "layout = (string) interleaved")); + +static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE( + "src", GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS("video/x-raw")); + +G_DEFINE_TYPE(GstAjaSinkCombiner, gst_aja_sink_combiner, GST_TYPE_AGGREGATOR); +#define parent_class gst_aja_sink_combiner_parent_class + +static void gst_aja_sink_combiner_finalize(GObject *object) { + GstAjaSinkCombiner *self = GST_AJA_SINK_COMBINER(object); + + GST_OBJECT_LOCK(self); + gst_caps_replace(&self->audio_caps, NULL); + gst_caps_replace(&self->video_caps, NULL); + GST_OBJECT_UNLOCK(self); + + G_OBJECT_CLASS(parent_class)->finalize(object); +} + +static GstFlowReturn gst_aja_sink_combiner_aggregate(GstAggregator *aggregator, + gboolean timeout) { + GstAjaSinkCombiner *self = GST_AJA_SINK_COMBINER(aggregator); + GstBuffer *video_buffer, *audio_buffer; + + if (gst_aggregator_pad_is_eos(GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad)) && + gst_aggregator_pad_is_eos(GST_AGGREGATOR_PAD_CAST(self->video_sinkpad))) { + GST_DEBUG_OBJECT(self, "All pads EOS"); + return GST_FLOW_EOS; + } + + // FIXME: We currently assume that upstream provides + // - properly chunked buffers (1 buffer = 1 video frame) + // - properly synchronized buffers (audio/video starting at the same time) + // - no gaps + // + // This can be achieved externally with elements like audiobuffersplit and + // videorate. + + video_buffer = gst_aggregator_pad_peek_buffer( + GST_AGGREGATOR_PAD_CAST(self->video_sinkpad)); + if (!video_buffer) return GST_AGGREGATOR_FLOW_NEED_DATA; + + audio_buffer = gst_aggregator_pad_peek_buffer( + GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad)); + if (!audio_buffer && !gst_aggregator_pad_is_eos( + GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad))) { + gst_buffer_unref(video_buffer); + GST_TRACE_OBJECT(self, "Audio not ready yet, waiting"); + return GST_AGGREGATOR_FLOW_NEED_DATA; + } + + gst_aggregator_pad_drop_buffer(GST_AGGREGATOR_PAD_CAST(self->video_sinkpad)); + video_buffer = gst_buffer_make_writable(video_buffer); + GST_TRACE_OBJECT(self, + "Outputting buffer with video %" GST_PTR_FORMAT + " and audio %" GST_PTR_FORMAT, + video_buffer, audio_buffer); + if (audio_buffer) { + gst_buffer_add_aja_audio_meta(video_buffer, audio_buffer); + gst_buffer_unref(audio_buffer); + gst_aggregator_pad_drop_buffer( + GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad)); + } + + if (!gst_pad_has_current_caps(GST_AGGREGATOR_SRC_PAD(self)) || + self->caps_changed) { + GstCaps *caps = gst_caps_copy(self->video_caps); + GstStructure *s; + + s = gst_caps_get_structure(caps, 0); + if (self->audio_caps) { + const GstStructure *s2; + gint audio_channels; + + s2 = gst_caps_get_structure(self->audio_caps, 0); + + gst_structure_get_int(s2, "channels", &audio_channels); + gst_structure_set(s, "audio-channels", G_TYPE_INT, audio_channels, NULL); + } else { + gst_structure_set(s, "audio-channels", G_TYPE_INT, 0, NULL); + } + + GST_DEBUG_OBJECT(self, "Configuring caps %" GST_PTR_FORMAT, caps); + + gst_aggregator_set_src_caps(GST_AGGREGATOR(self), caps); + gst_caps_unref(caps); + self->caps_changed = FALSE; + } + + // Update the position for synchronization purposes + GST_AGGREGATOR_PAD_CAST(GST_AGGREGATOR_SRC_PAD(self))->segment.position = + GST_BUFFER_PTS(video_buffer); + if (GST_BUFFER_DURATION_IS_VALID(video_buffer)) + GST_AGGREGATOR_PAD_CAST(GST_AGGREGATOR_SRC_PAD(self))->segment.position += + GST_BUFFER_DURATION(video_buffer); + + return gst_aggregator_finish_buffer(GST_AGGREGATOR_CAST(self), video_buffer); +} + +static gboolean gst_aja_sink_combiner_sink_event(GstAggregator *aggregator, + GstAggregatorPad *agg_pad, + GstEvent *event) { + GstAjaSinkCombiner *self = GST_AJA_SINK_COMBINER(aggregator); + + switch (GST_EVENT_TYPE(event)) { + case GST_EVENT_SEGMENT: { + const GstSegment *segment; + + gst_event_parse_segment(event, &segment); + gst_aggregator_update_segment(GST_AGGREGATOR(self), segment); + break; + } + case GST_EVENT_CAPS: { + GstCaps *caps; + + gst_event_parse_caps(event, &caps); + + if (agg_pad == GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad)) { + GST_OBJECT_LOCK(self); + gst_caps_replace(&self->audio_caps, caps); + self->caps_changed = TRUE; + GST_OBJECT_UNLOCK(self); + } else if (agg_pad == GST_AGGREGATOR_PAD_CAST(self->video_sinkpad)) { + GST_OBJECT_LOCK(self); + gst_caps_replace(&self->video_caps, caps); + self->caps_changed = TRUE; + GST_OBJECT_UNLOCK(self); + } + + break; + } + default: + break; + } + + return GST_AGGREGATOR_CLASS(parent_class) + ->sink_event(aggregator, agg_pad, event); +} + +static gboolean gst_aja_sink_combiner_sink_query(GstAggregator *aggregator, + GstAggregatorPad *agg_pad, + GstQuery *query) { + GstAjaSinkCombiner *self = GST_AJA_SINK_COMBINER(aggregator); + + switch (GST_QUERY_TYPE(query)) { + case GST_QUERY_CAPS: { + GstCaps *filter, *caps; + + gst_query_parse_caps(query, &filter); + + if (agg_pad == GST_AGGREGATOR_PAD_CAST(self->audio_sinkpad)) { + caps = gst_pad_get_pad_template_caps(GST_PAD(agg_pad)); + } else if (agg_pad == GST_AGGREGATOR_PAD_CAST(self->video_sinkpad)) { + caps = gst_pad_peer_query_caps(GST_AGGREGATOR_SRC_PAD(self), NULL); + caps = gst_caps_make_writable(caps); + guint caps_size = gst_caps_get_size(caps); + for (guint i = 0; i < caps_size; i++) { + GstStructure *s = gst_caps_get_structure(caps, i); + gst_structure_remove_field(s, "audio-channels"); + } + } else { + g_assert_not_reached(); + } + + if (filter) { + GstCaps *tmp = gst_caps_intersect(filter, caps); + gst_caps_unref(caps); + caps = tmp; + } + + gst_query_set_caps_result(query, caps); + + return TRUE; + } + case GST_QUERY_ALLOCATION: { + // Proxy to the sink for both pads so that the AJA allocator can be + // used upstream as needed. + return gst_pad_peer_query(GST_AGGREGATOR_SRC_PAD(self), query); + } + default: + break; + } + + return GST_AGGREGATOR_CLASS(parent_class) + ->sink_query(aggregator, agg_pad, query); +} + +static gboolean gst_aja_sink_combiner_negotiate(GstAggregator *aggregator) { + return TRUE; +} + +static gboolean gst_aja_sink_combiner_stop(GstAggregator *aggregator) { + GstAjaSinkCombiner *self = GST_AJA_SINK_COMBINER(aggregator); + + GST_OBJECT_LOCK(self); + gst_caps_replace(&self->audio_caps, NULL); + gst_caps_replace(&self->video_caps, NULL); + GST_OBJECT_UNLOCK(self); + + return TRUE; +} + +static void gst_aja_sink_combiner_class_init(GstAjaSinkCombinerClass *klass) { + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + GstAggregatorClass *aggregator_class; + + gobject_class = (GObjectClass *)klass; + gstelement_class = (GstElementClass *)klass; + aggregator_class = (GstAggregatorClass *)klass; + + gobject_class->finalize = gst_aja_sink_combiner_finalize; + + gst_element_class_set_static_metadata( + gstelement_class, "AJA sink audio/video combiner", "Audio/Video/Combiner", + "Combines corresponding audio/video frames", + "Sebastian Dröge "); + + gst_element_class_add_static_pad_template_with_gtype( + gstelement_class, &video_sink_template, GST_TYPE_AGGREGATOR_PAD); + gst_element_class_add_static_pad_template_with_gtype( + gstelement_class, &audio_sink_template, GST_TYPE_AGGREGATOR_PAD); + gst_element_class_add_static_pad_template_with_gtype( + gstelement_class, &src_template, GST_TYPE_AGGREGATOR_PAD); + + aggregator_class->aggregate = gst_aja_sink_combiner_aggregate; + aggregator_class->stop = gst_aja_sink_combiner_stop; + aggregator_class->sink_event = gst_aja_sink_combiner_sink_event; + aggregator_class->sink_query = gst_aja_sink_combiner_sink_query; + aggregator_class->negotiate = gst_aja_sink_combiner_negotiate; + aggregator_class->get_next_time = gst_aggregator_simple_get_next_time; + + // We don't support requesting new pads + gstelement_class->request_new_pad = NULL; + + GST_DEBUG_CATEGORY_INIT(gst_aja_sink_combiner_debug, "ajasinkcombiner", 0, + "AJA sink combiner"); +} + +static void gst_aja_sink_combiner_init(GstAjaSinkCombiner *self) { + GstPadTemplate *templ; + + templ = gst_static_pad_template_get(&video_sink_template); + self->video_sinkpad = + GST_PAD(g_object_new(GST_TYPE_AGGREGATOR_PAD, "name", "video", + "direction", GST_PAD_SINK, "template", templ, NULL)); + gst_object_unref(templ); + gst_element_add_pad(GST_ELEMENT_CAST(self), self->video_sinkpad); + + templ = gst_static_pad_template_get(&audio_sink_template); + self->audio_sinkpad = + GST_PAD(g_object_new(GST_TYPE_AGGREGATOR_PAD, "name", "audio", + "direction", GST_PAD_SINK, "template", templ, NULL)); + gst_object_unref(templ); + gst_element_add_pad(GST_ELEMENT_CAST(self), self->audio_sinkpad); +} diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.h b/subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.h new file mode 100644 index 0000000000..32d3a89e9f --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajasinkcombiner.h @@ -0,0 +1,60 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include + +#include "gstajacommon.h" + +G_BEGIN_DECLS + +#define GST_TYPE_AJA_SINK_COMBINER (gst_aja_sink_combiner_get_type()) +#define GST_AJA_SINK_COMBINER(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_SINK_COMBINER, \ + GstAjaSinkCombiner)) +#define GST_AJA_SINK_COMBINER_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AJA_SINK_COMBINER, \ + GstAjaSinkCombinerClass)) +#define IS_GST_AJA_SINK_COMBINER(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AJA_SINK_COMBINER)) +#define IS_GST_AJA_SINK_COMBINER_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AJA_SINK_COMBINER)) + +typedef struct _GstAjaSinkCombiner GstAjaSinkCombiner; +typedef struct _GstAjaSinkCombinerClass GstAjaSinkCombinerClass; + +struct _GstAjaSinkCombiner { + GstAggregator parent; + + GstPad *audio_sinkpad, *video_sinkpad; + GstCaps *audio_caps, *video_caps; + gboolean caps_changed; +}; + +struct _GstAjaSinkCombinerClass { + GstAggregatorClass parent_class; +}; + +G_GNUC_INTERNAL +GType gst_aja_sink_combiner_get_type(void); + +G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajasrc.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajasrc.cpp new file mode 100644 index 0000000000..2b3434560c --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajasrc.cpp @@ -0,0 +1,2714 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include +#include +#include + +#include "gstajacommon.h" +#include "gstajasrc.h" + +GST_DEBUG_CATEGORY_STATIC(gst_aja_src_debug); +#define GST_CAT_DEFAULT gst_aja_src_debug + +#define DEFAULT_DEVICE_IDENTIFIER ("0") +#define DEFAULT_CHANNEL (::NTV2_CHANNEL1) +#define DEFAULT_VIDEO_FORMAT (GST_AJA_VIDEO_FORMAT_AUTO) +#define DEFAULT_AUDIO_SYSTEM (GST_AJA_AUDIO_SYSTEM_AUTO) +#define DEFAULT_INPUT_SOURCE (GST_AJA_INPUT_SOURCE_AUTO) +#define DEFAULT_SDI_MODE (GST_AJA_SDI_MODE_SINGLE_LINK) +#define DEFAULT_AUDIO_SOURCE (GST_AJA_AUDIO_SOURCE_EMBEDDED) +#define DEFAULT_EMBEDDED_AUDIO_INPUT (GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO) +#define DEFAULT_TIMECODE_INDEX (GST_AJA_TIMECODE_INDEX_VITC) +#define DEFAULT_RP188 (TRUE) +#define DEFAULT_REFERENCE_SOURCE (GST_AJA_REFERENCE_SOURCE_FREERUN) +#define DEFAULT_CLOSED_CAPTION_CAPTURE_MODE \ + (GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_AND_CEA608) +#define DEFAULT_QUEUE_SIZE (16) +#define DEFAULT_START_FRAME (8) +#define DEFAULT_END_FRAME (8) +#define DEFAULT_CAPTURE_CPU_CORE (G_MAXUINT) + +enum { + PROP_0, + PROP_DEVICE_IDENTIFIER, + PROP_CHANNEL, + PROP_VIDEO_FORMAT, + PROP_AUDIO_SYSTEM, + PROP_INPUT_SOURCE, + PROP_SDI_MODE, + PROP_AUDIO_SOURCE, + PROP_EMBEDDED_AUDIO_INPUT, + PROP_TIMECODE_INDEX, + PROP_RP188, + PROP_REFERENCE_SOURCE, + PROP_CLOSED_CAPTION_CAPTURE_MODE, + PROP_START_FRAME, + PROP_END_FRAME, + PROP_QUEUE_SIZE, + PROP_CAPTURE_CPU_CORE, + PROP_SIGNAL, +}; + +// Make these plain C structs for usage in GstQueueArray +G_BEGIN_DECLS + +typedef enum { + QUEUE_ITEM_TYPE_DUMMY, + QUEUE_ITEM_TYPE_FRAME, + QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + QUEUE_ITEM_TYPE_ERROR, + QUEUE_ITEM_TYPE_FRAMES_DROPPED, +} QueueItemType; + +typedef struct { + QueueItemType type; + + union { + // For DUMMY + struct { + gchar dummy; + } dummy; + // For FRAME + struct { + GstClockTime capture_time; + GstBuffer *video_buffer; + GstBuffer *audio_buffer; + GstBuffer *anc_buffer, *anc_buffer2; + NTV2_RP188 tc; + + NTV2VideoFormat detected_format; + guint32 vpid; + } frame; + // For SIGNAL_CHANGE + struct { + gboolean have_signal; + NTV2VideoFormat detected_format; + guint32 vpid; + } signal_change; + // For ERROR + struct { + GstMessage *msg; + } error; + // For FRAMES_DROPPED + struct { + gboolean driver_side; + GstClockTime timestamp_start, timestamp_end; + } frames_dropped; + }; +} QueueItem; + +G_END_DECLS + +static void queue_item_clear(QueueItem *item) { + switch (item->type) { + case QUEUE_ITEM_TYPE_DUMMY: + break; + case QUEUE_ITEM_TYPE_FRAME: + gst_clear_buffer(&item->frame.video_buffer); + gst_clear_buffer(&item->frame.audio_buffer); + gst_clear_buffer(&item->frame.anc_buffer); + gst_clear_buffer(&item->frame.anc_buffer2); + item->frame.tc.~NTV2_RP188(); + break; + case QUEUE_ITEM_TYPE_SIGNAL_CHANGE: + break; + case QUEUE_ITEM_TYPE_ERROR: + gst_clear_message(&item->error.msg); + break; + case QUEUE_ITEM_TYPE_FRAMES_DROPPED: + break; + } + + item->type = QUEUE_ITEM_TYPE_DUMMY; +} + +static void gst_aja_src_set_property(GObject *object, guint property_id, + const GValue *value, GParamSpec *pspec); +static void gst_aja_src_get_property(GObject *object, guint property_id, + GValue *value, GParamSpec *pspec); +static void gst_aja_src_finalize(GObject *object); + +static GstCaps *gst_aja_src_get_caps(GstBaseSrc *bsrc, GstCaps *filter); +static gboolean gst_aja_src_query(GstBaseSrc *bsrc, GstQuery *query); +static gboolean gst_aja_src_unlock(GstBaseSrc *bsrc); +static gboolean gst_aja_src_unlock_stop(GstBaseSrc *bsrc); + +static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer); + +static gboolean gst_aja_src_open(GstAjaSrc *src); +static gboolean gst_aja_src_close(GstAjaSrc *src); +static gboolean gst_aja_src_stop(GstAjaSrc *src); + +static GstStateChangeReturn gst_aja_src_change_state(GstElement *element, + GstStateChange transition); + +static void capture_thread_func(AJAThread *thread, void *data); + +#define parent_class gst_aja_src_parent_class +G_DEFINE_TYPE(GstAjaSrc, gst_aja_src, GST_TYPE_PUSH_SRC); + +static void gst_aja_src_class_init(GstAjaSrcClass *klass) { + GObjectClass *gobject_class = G_OBJECT_CLASS(klass); + GstElementClass *element_class = GST_ELEMENT_CLASS(klass); + GstBaseSrcClass *basesrc_class = GST_BASE_SRC_CLASS(klass); + GstPushSrcClass *pushsrc_class = GST_PUSH_SRC_CLASS(klass); + GstCaps *templ_caps; + + gobject_class->set_property = gst_aja_src_set_property; + gobject_class->get_property = gst_aja_src_get_property; + gobject_class->finalize = gst_aja_src_finalize; + + g_object_class_install_property( + gobject_class, PROP_DEVICE_IDENTIFIER, + g_param_spec_string( + "device-identifier", "Device identifier", + "Input device instance to use", DEFAULT_DEVICE_IDENTIFIER, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_CHANNEL, + g_param_spec_uint( + "channel", "Channel", "Channel to use", 0, NTV2_MAX_NUM_CHANNELS - 1, + DEFAULT_CHANNEL, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_VIDEO_FORMAT, + g_param_spec_enum( + "video-format", "Video Format", "Video format to use", + GST_TYPE_AJA_VIDEO_FORMAT, DEFAULT_VIDEO_FORMAT, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_QUEUE_SIZE, + g_param_spec_uint( + "queue-size", "Queue Size", + "Size of internal queue in number of video frames. " + "Half of this is allocated as device buffers and equal to the " + "latency.", + 1, G_MAXINT, DEFAULT_QUEUE_SIZE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_START_FRAME, + g_param_spec_uint( + "start-frame", "Start Frame", + "Start frame buffer to be used for capturing (automatically assign " + "that many frames if same number as end-frame).", + 0, G_MAXINT, DEFAULT_START_FRAME, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_END_FRAME, + g_param_spec_uint( + "end-frame", "End Frame", + "End frame buffer to be used for capturing (automatically assign " + "that many frames if same number as start-frame).", + 0, G_MAXINT, DEFAULT_END_FRAME, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); + + g_object_class_install_property( + gobject_class, PROP_AUDIO_SYSTEM, + g_param_spec_enum( + "audio-system", "Audio System", "Audio system to use", + GST_TYPE_AJA_AUDIO_SYSTEM, DEFAULT_AUDIO_SYSTEM, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_INPUT_SOURCE, + g_param_spec_enum( + "input-source", "Input Source", "Input source to use", + GST_TYPE_AJA_INPUT_SOURCE, DEFAULT_INPUT_SOURCE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_SDI_MODE, + g_param_spec_enum( + "sdi-input-mode", "SDI Input Mode", "SDI input mode to use", + GST_TYPE_AJA_SDI_MODE, DEFAULT_SDI_MODE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_AUDIO_SOURCE, + g_param_spec_enum( + "audio-source", "Audio Source", "Audio source to use", + GST_TYPE_AJA_AUDIO_SOURCE, DEFAULT_AUDIO_SOURCE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_EMBEDDED_AUDIO_INPUT, + g_param_spec_enum( + "embedded-audio-input", "Embedded Audio Input", + "Embedded Audio Input to use", GST_TYPE_AJA_EMBEDDED_AUDIO_INPUT, + DEFAULT_EMBEDDED_AUDIO_INPUT, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_TIMECODE_INDEX, + g_param_spec_enum( + "timecode-index", "Timecode Index", "Timecode index to use", + GST_TYPE_AJA_TIMECODE_INDEX, DEFAULT_TIMECODE_INDEX, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_RP188, + g_param_spec_boolean( + "rp188", "RP188", "Enable RP188 timecode retrieval", DEFAULT_RP188, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_REFERENCE_SOURCE, + g_param_spec_enum( + "reference-source", "Reference Source", "Reference source to use", + GST_TYPE_AJA_REFERENCE_SOURCE, DEFAULT_REFERENCE_SOURCE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_CLOSED_CAPTION_CAPTURE_MODE, + g_param_spec_enum( + "closed-caption-capture-mode", "Closed Caption Capture Mode", + "Closed Caption Capture Mode", + GST_TYPE_AJA_CLOSED_CAPTION_CAPTURE_MODE, + DEFAULT_CLOSED_CAPTION_CAPTURE_MODE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_CAPTURE_CPU_CORE, + g_param_spec_uint( + "capture-cpu-core", "Capture CPU Core", + "Sets the affinity of the capture thread to this CPU core " + "(-1=disabled)", + 0, G_MAXUINT, DEFAULT_CAPTURE_CPU_CORE, + (GParamFlags)(G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property( + gobject_class, PROP_SIGNAL, + g_param_spec_boolean( + "signal", "Input signal available", + "True if there is a valid input signal available", FALSE, + (GParamFlags)(G_PARAM_READABLE | G_PARAM_STATIC_STRINGS))); + + element_class->change_state = GST_DEBUG_FUNCPTR(gst_aja_src_change_state); + + basesrc_class->get_caps = GST_DEBUG_FUNCPTR(gst_aja_src_get_caps); + basesrc_class->negotiate = NULL; + basesrc_class->query = GST_DEBUG_FUNCPTR(gst_aja_src_query); + basesrc_class->unlock = GST_DEBUG_FUNCPTR(gst_aja_src_unlock); + basesrc_class->unlock_stop = GST_DEBUG_FUNCPTR(gst_aja_src_unlock_stop); + + pushsrc_class->create = GST_DEBUG_FUNCPTR(gst_aja_src_create); + + templ_caps = gst_ntv2_supported_caps(DEVICE_ID_INVALID); + gst_element_class_add_pad_template( + element_class, + gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS, templ_caps)); + gst_caps_unref(templ_caps); + + gst_element_class_set_static_metadata( + element_class, "AJA audio/video src", "Audio/Video/Source", + "Captures audio/video frames with AJA devices", + "Sebastian Dröge "); + + GST_DEBUG_CATEGORY_INIT(gst_aja_src_debug, "ajasrc", 0, "AJA src"); +} + +static void gst_aja_src_init(GstAjaSrc *self) { + g_mutex_init(&self->queue_lock); + g_cond_init(&self->queue_cond); + + self->device_identifier = g_strdup(DEFAULT_DEVICE_IDENTIFIER); + self->channel = DEFAULT_CHANNEL; + self->queue_size = DEFAULT_QUEUE_SIZE; + self->start_frame = DEFAULT_START_FRAME; + self->end_frame = DEFAULT_END_FRAME; + self->video_format_setting = DEFAULT_VIDEO_FORMAT; + self->audio_system_setting = DEFAULT_AUDIO_SYSTEM; + self->input_source = DEFAULT_INPUT_SOURCE; + self->audio_source = DEFAULT_AUDIO_SOURCE; + self->embedded_audio_input = DEFAULT_EMBEDDED_AUDIO_INPUT; + self->timecode_index = DEFAULT_TIMECODE_INDEX; + self->reference_source = DEFAULT_REFERENCE_SOURCE; + self->closed_caption_capture_mode = DEFAULT_CLOSED_CAPTION_CAPTURE_MODE; + self->capture_cpu_core = DEFAULT_CAPTURE_CPU_CORE; + + self->queue = + gst_queue_array_new_for_struct(sizeof(QueueItem), self->queue_size); + gst_base_src_set_live(GST_BASE_SRC_CAST(self), TRUE); + gst_base_src_set_format(GST_BASE_SRC_CAST(self), GST_FORMAT_TIME); + + self->video_format = NTV2_FORMAT_UNKNOWN; +} + +void gst_aja_src_set_property(GObject *object, guint property_id, + const GValue *value, GParamSpec *pspec) { + GstAjaSrc *self = GST_AJA_SRC(object); + + switch (property_id) { + case PROP_DEVICE_IDENTIFIER: + g_free(self->device_identifier); + self->device_identifier = g_value_dup_string(value); + break; + case PROP_CHANNEL: + self->channel = (NTV2Channel)g_value_get_uint(value); + break; + case PROP_QUEUE_SIZE: + self->queue_size = g_value_get_uint(value); + break; + case PROP_START_FRAME: + self->start_frame = g_value_get_uint(value); + break; + case PROP_END_FRAME: + self->end_frame = g_value_get_uint(value); + break; + case PROP_VIDEO_FORMAT: + self->video_format_setting = (GstAjaVideoFormat)g_value_get_enum(value); + break; + case PROP_AUDIO_SYSTEM: + self->audio_system_setting = (GstAjaAudioSystem)g_value_get_enum(value); + break; + case PROP_INPUT_SOURCE: + self->input_source = (GstAjaInputSource)g_value_get_enum(value); + break; + case PROP_SDI_MODE: + self->sdi_mode = (GstAjaSdiMode)g_value_get_enum(value); + break; + case PROP_AUDIO_SOURCE: + self->audio_source = (GstAjaAudioSource)g_value_get_enum(value); + break; + case PROP_EMBEDDED_AUDIO_INPUT: + self->embedded_audio_input = + (GstAjaEmbeddedAudioInput)g_value_get_enum(value); + break; + case PROP_TIMECODE_INDEX: + self->timecode_index = (GstAjaTimecodeIndex)g_value_get_enum(value); + break; + case PROP_RP188: + self->rp188 = g_value_get_boolean(value); + break; + case PROP_REFERENCE_SOURCE: + self->reference_source = (GstAjaReferenceSource)g_value_get_enum(value); + break; + case PROP_CLOSED_CAPTION_CAPTURE_MODE: + self->closed_caption_capture_mode = + (GstAjaClosedCaptionCaptureMode)g_value_get_enum(value); + break; + case PROP_CAPTURE_CPU_CORE: + self->capture_cpu_core = g_value_get_uint(value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); + break; + } +} + +void gst_aja_src_get_property(GObject *object, guint property_id, GValue *value, + GParamSpec *pspec) { + GstAjaSrc *self = GST_AJA_SRC(object); + + switch (property_id) { + case PROP_DEVICE_IDENTIFIER: + g_value_set_string(value, self->device_identifier); + break; + case PROP_CHANNEL: + g_value_set_uint(value, self->channel); + break; + case PROP_QUEUE_SIZE: + g_value_set_uint(value, self->queue_size); + break; + case PROP_START_FRAME: + g_value_set_uint(value, self->start_frame); + break; + case PROP_END_FRAME: + g_value_set_uint(value, self->end_frame); + break; + case PROP_VIDEO_FORMAT: + g_value_set_enum(value, self->video_format_setting); + break; + case PROP_AUDIO_SYSTEM: + g_value_set_enum(value, self->audio_system_setting); + break; + case PROP_INPUT_SOURCE: + g_value_set_enum(value, self->input_source); + break; + case PROP_SDI_MODE: + g_value_set_enum(value, self->sdi_mode); + break; + case PROP_AUDIO_SOURCE: + g_value_set_enum(value, self->audio_source); + break; + case PROP_EMBEDDED_AUDIO_INPUT: + g_value_set_enum(value, self->embedded_audio_input); + break; + case PROP_TIMECODE_INDEX: + g_value_set_enum(value, self->timecode_index); + break; + case PROP_RP188: + g_value_set_boolean(value, self->rp188); + break; + case PROP_REFERENCE_SOURCE: + g_value_set_enum(value, self->reference_source); + break; + case PROP_CLOSED_CAPTION_CAPTURE_MODE: + g_value_set_enum(value, self->closed_caption_capture_mode); + break; + case PROP_CAPTURE_CPU_CORE: + g_value_set_uint(value, self->capture_cpu_core); + break; + case PROP_SIGNAL: + g_value_set_boolean(value, self->signal); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, property_id, pspec); + break; + } +} + +void gst_aja_src_finalize(GObject *object) { + GstAjaSrc *self = GST_AJA_SRC(object); + + g_assert(self->device == NULL); + g_assert(gst_queue_array_get_length(self->queue) == 0); + g_clear_pointer(&self->queue, gst_queue_array_free); + + g_mutex_clear(&self->queue_lock); + g_cond_clear(&self->queue_cond); + + G_OBJECT_CLASS(parent_class)->finalize(object); +} + +static gboolean gst_aja_src_open(GstAjaSrc *self) { + GST_DEBUG_OBJECT(self, "Opening device"); + + g_assert(self->device == NULL); + + self->device = gst_aja_ntv2_device_obtain(self->device_identifier); + if (!self->device) { + GST_ERROR_OBJECT(self, "Failed to open device"); + return FALSE; + } + + if (!self->device->device->IsDeviceReady(false)) { + g_clear_pointer(&self->device, gst_aja_ntv2_device_unref); + return FALSE; + } + + self->device->device->SetEveryFrameServices(::NTV2_OEM_TASKS); + self->device_id = self->device->device->GetDeviceID(); + + std::string serial_number; + if (!self->device->device->GetSerialNumberString(serial_number)) + serial_number = "none"; + + GST_DEBUG_OBJECT(self, + "Opened device with ID %d at index %d (%s, version %s, " + "serial number %s, can do VANC %d)", + self->device_id, self->device->device->GetIndexNumber(), + self->device->device->GetDisplayName().c_str(), + self->device->device->GetDeviceVersionString().c_str(), + serial_number.c_str(), + ::NTV2DeviceCanDoCustomAnc(self->device_id)); + + GST_DEBUG_OBJECT(self, + "Using SDK version %d.%d.%d.%d (%s) and driver version %s", + AJA_NTV2_SDK_VERSION_MAJOR, AJA_NTV2_SDK_VERSION_MINOR, + AJA_NTV2_SDK_VERSION_POINT, AJA_NTV2_SDK_BUILD_NUMBER, + AJA_NTV2_SDK_BUILD_DATETIME, + self->device->device->GetDriverVersionString().c_str()); + + self->device->device->SetMultiFormatMode(true); + + self->allocator = gst_aja_allocator_new(self->device); + + GST_DEBUG_OBJECT(self, "Opened device"); + + return TRUE; +} + +static gboolean gst_aja_src_close(GstAjaSrc *self) { + gst_clear_object(&self->allocator); + g_clear_pointer(&self->device, gst_aja_ntv2_device_unref); + self->device_id = DEVICE_ID_INVALID; + + GST_DEBUG_OBJECT(self, "Closed device"); + + return TRUE; +} + +// Must be called with ShmMutexLocker +static gboolean gst_aja_src_configure(GstAjaSrc *self) { + GST_DEBUG_OBJECT(self, "Starting"); + +#define NEEDS_QUAD_MODE(self) \ + (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_SQD || \ + self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI || \ + (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && \ + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4)) + + self->quad_mode = NEEDS_QUAD_MODE(self); + +#undef NEEDS_QUAD_MODE + + if (self->quad_mode) { + if (self->input_source != GST_AJA_INPUT_SOURCE_AUTO && + !(self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4)) { + GST_ERROR_OBJECT( + self, + "Quad modes require usage of the channel's default input source"); + return FALSE; + } + + if (self->channel != ::NTV2_CHANNEL1 && self->channel != ::NTV2_CHANNEL5) { + GST_ERROR_OBJECT(self, "Quad modes require channels 1 or 5"); + return FALSE; + } + } + + bool had_quad_enabled = false, had_quad_quad_enabled = false; + + // HDMI can also be internally quad mode but it runs on a single channel. + if (!(self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4)) { + if (self->channel < ::NTV2_CHANNEL5) { + self->device->device->GetQuadFrameEnable(had_quad_enabled, + ::NTV2_CHANNEL1); + + // 12G UHD is also internally considered quad modes but they run on a + // single channel. + if (had_quad_enabled && ::NTV2DeviceCanDo12gRouting(self->device_id)) { + NTV2VideoFormat fmt = + self->device->device->GetInputVideoFormat(::NTV2_INPUTSOURCE_SDI1); + if (fmt >= NTV2_FORMAT_FIRST_UHD_TSI_DEF_FORMAT && + fmt < NTV2_FORMAT_END_4K_TSI_DEF_FORMATS) + had_quad_enabled = false; + } + + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL1); + } else { + self->device->device->GetQuadFrameEnable(had_quad_enabled, + ::NTV2_CHANNEL5); + + // 12G UHD is also internally considered quad modes but they run on a + // single channel. + if (had_quad_enabled && ::NTV2DeviceCanDo12gRouting(self->device_id)) { + NTV2VideoFormat fmt = + self->device->device->GetInputVideoFormat(::NTV2_INPUTSOURCE_SDI5); + if (fmt >= NTV2_FORMAT_FIRST_UHD_TSI_DEF_FORMAT && + fmt < NTV2_FORMAT_END_4K_TSI_DEF_FORMATS) + had_quad_enabled = false; + } + + self->device->device->GetQuadQuadFrameEnable(had_quad_quad_enabled, + ::NTV2_CHANNEL5); + } + } + + // Stop any previously running quad mode, or other configurations on the + // quad channels + self->device->device->AutoCirculateStop(self->channel); + if (self->quad_mode || had_quad_enabled || had_quad_enabled) { + NTV2Channel quad_channel; + + if (self->channel < ::NTV2_CHANNEL5) + quad_channel = ::NTV2_CHANNEL1; + else + quad_channel = ::NTV2_CHANNEL5; + + for (int i = 0; i < 4; i++) { + self->device->device->AutoCirculateStop((NTV2Channel)(quad_channel + i)); + } + } + + if (self->buffer_pool) { + gst_buffer_pool_set_active(self->buffer_pool, FALSE); + gst_clear_object(&self->buffer_pool); + } + + if (self->audio_buffer_pool) { + gst_buffer_pool_set_active(self->audio_buffer_pool, FALSE); + gst_clear_object(&self->audio_buffer_pool); + } + + if (self->anc_buffer_pool) { + gst_buffer_pool_set_active(self->anc_buffer_pool, FALSE); + gst_clear_object(&self->anc_buffer_pool); + } + + NTV2VANCMode vanc_mode; + NTV2InputSource input_source; + NTV2OutputCrosspointID input_source_id; + switch (self->input_source) { + case GST_AJA_INPUT_SOURCE_AUTO: + input_source = ::NTV2ChannelToInputSource(self->channel); + input_source_id = ::GetSDIInputOutputXptFromChannel(self->channel, false); + vanc_mode = ::NTV2DeviceCanDoCustomAnc(self->device_id) + ? ::NTV2_VANCMODE_OFF + : ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_ANALOG1: + input_source = ::NTV2_INPUTSOURCE_ANALOG1; + input_source_id = ::NTV2_XptAnalogIn; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_HDMI1: + input_source = ::NTV2_INPUTSOURCE_HDMI1; + input_source_id = ::NTV2_XptHDMIIn1; + vanc_mode = ::NTV2_VANCMODE_OFF; + break; + case GST_AJA_INPUT_SOURCE_HDMI2: + input_source = ::NTV2_INPUTSOURCE_HDMI2; + input_source_id = ::NTV2_XptHDMIIn2; + vanc_mode = ::NTV2_VANCMODE_OFF; + break; + case GST_AJA_INPUT_SOURCE_HDMI3: + input_source = ::NTV2_INPUTSOURCE_HDMI3; + input_source_id = ::NTV2_XptHDMIIn3; + vanc_mode = ::NTV2_VANCMODE_OFF; + break; + case GST_AJA_INPUT_SOURCE_HDMI4: + input_source = ::NTV2_INPUTSOURCE_HDMI4; + input_source_id = ::NTV2_XptHDMIIn4; + vanc_mode = ::NTV2_VANCMODE_OFF; + break; + case GST_AJA_INPUT_SOURCE_SDI1: + input_source = ::NTV2_INPUTSOURCE_SDI1; + input_source_id = ::NTV2_XptSDIIn1; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI2: + input_source = ::NTV2_INPUTSOURCE_SDI2; + input_source_id = ::NTV2_XptSDIIn2; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI3: + input_source = ::NTV2_INPUTSOURCE_SDI3; + input_source_id = ::NTV2_XptSDIIn3; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI4: + input_source = ::NTV2_INPUTSOURCE_SDI4; + input_source_id = ::NTV2_XptSDIIn4; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI5: + input_source = ::NTV2_INPUTSOURCE_SDI5; + input_source_id = ::NTV2_XptSDIIn5; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI6: + input_source = ::NTV2_INPUTSOURCE_SDI6; + input_source_id = ::NTV2_XptSDIIn6; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI7: + input_source = ::NTV2_INPUTSOURCE_SDI7; + input_source_id = ::NTV2_XptSDIIn7; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + case GST_AJA_INPUT_SOURCE_SDI8: + input_source = ::NTV2_INPUTSOURCE_SDI8; + input_source_id = ::NTV2_XptSDIIn8; + vanc_mode = ::NTV2_VANCMODE_TALL; + break; + default: + g_assert_not_reached(); + break; + } + + self->configured_input_source = input_source; + + self->vanc_mode = vanc_mode; + + if (!self->device->device->EnableChannel(self->channel)) { + GST_ERROR_OBJECT(self, "Failed to enable channel"); + return FALSE; + } + + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + if (!self->device->device->EnableChannel( + (NTV2Channel)(self->channel + i))) { + GST_ERROR_OBJECT(self, "Failed to enable channel"); + return FALSE; + } + } + } + + self->device->device->EnableInputInterrupt(self->channel); + self->device->device->SubscribeInputVerticalEvent(self->channel); + + if (self->video_format_setting == GST_AJA_VIDEO_FORMAT_AUTO) { + self->device->device->WaitForInputVerticalInterrupt(self->channel, 10); + self->video_format = self->device->device->GetInputVideoFormat( + self->configured_input_source); + if (self->video_format == NTV2_FORMAT_UNKNOWN) { + GST_ERROR_OBJECT(self, "Input video format not detected"); + return TRUE; + } + std::string configured_string = NTV2VideoFormatToString(self->video_format); + GST_DEBUG_OBJECT(self, "Detected input video format %s (%d)", + configured_string.c_str(), (int)self->video_format); + } else { + self->video_format = gst_ntv2_video_format_from_aja_format( + self->video_format_setting, self->quad_mode); + } + + if (self->video_format == NTV2_FORMAT_UNKNOWN) { + GST_ERROR_OBJECT(self, "Unsupported mode"); + return FALSE; + } + + if (!::NTV2DeviceCanDoVideoFormat(self->device_id, self->video_format)) { + GST_ERROR_OBJECT(self, "Device does not support mode %d", + (int)self->video_format); + return FALSE; + } + + gst_video_info_from_ntv2_video_format(&self->configured_info, + self->video_format); + + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, self->channel); + self->device->device->Set4kSquaresEnable(true, self->channel); + self->device->device->SetTsiFrameEnable(true, self->channel); + } else { + switch (self->sdi_mode) { + case GST_AJA_SDI_MODE_SINGLE_LINK: + g_assert_not_reached(); + break; + case GST_AJA_SDI_MODE_QUAD_LINK_SQD: + if (self->configured_info.height > 2160) { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(true, self->channel); + self->device->device->SetQuadQuadSquaresEnable(true, self->channel); + } else { + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, + self->channel); + self->device->device->Set4kSquaresEnable(true, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + } + break; + case GST_AJA_SDI_MODE_QUAD_LINK_TSI: + if (self->configured_info.height > 2160) { + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(false, self->channel); + self->device->device->SetQuadQuadFrameEnable(true, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, + self->channel); + } else { + self->device->device->SetQuadQuadFrameEnable(false, self->channel); + self->device->device->SetQuadQuadSquaresEnable(false, + self->channel); + self->device->device->Set4kSquaresEnable(false, self->channel); + self->device->device->SetTsiFrameEnable(true, self->channel); + } + break; + } + } + } else if (had_quad_enabled || had_quad_quad_enabled) { + NTV2Channel quad_channel; + + if (self->channel < ::NTV2_CHANNEL5) + quad_channel = ::NTV2_CHANNEL1; + else + quad_channel = ::NTV2_CHANNEL5; + + self->device->device->Set4kSquaresEnable(false, quad_channel); + self->device->device->SetTsiFrameEnable(false, quad_channel); + self->device->device->SetQuadQuadFrameEnable(false, quad_channel); + self->device->device->SetQuadQuadSquaresEnable(false, quad_channel); + } + + self->device->device->SetMode(self->channel, NTV2_MODE_CAPTURE, false); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetMode((NTV2Channel)(self->channel + i), + NTV2_MODE_CAPTURE, false); + } + + std::string configured_string = NTV2VideoFormatToString(self->video_format); + GST_DEBUG_OBJECT(self, "Configuring video format %s (%d) on channel %d", + configured_string.c_str(), (int)self->video_format, + (int)self->channel); + if (!self->device->device->SetVideoFormat(self->video_format, false, false, + self->channel)) { + GST_DEBUG_OBJECT( + self, "Failed configuring video format %s (%d) on channel %d", + configured_string.c_str(), (int)self->video_format, (int)self->channel); + return FALSE; + } + + if (!::NTV2DeviceCanDoFrameBufferFormat(self->device_id, + ::NTV2_FBF_10BIT_YCBCR)) { + GST_ERROR_OBJECT(self, "Device does not support frame buffer format %d", + (int)::NTV2_FBF_10BIT_YCBCR); + return FALSE; + } + + if (!self->device->device->SetFrameBufferFormat(self->channel, + ::NTV2_FBF_10BIT_YCBCR)) { + GST_ERROR_OBJECT(self, "Failed configuring frame buffer format %d", + (int)::NTV2_FBF_10BIT_YCBCR); + return FALSE; + } + + // FIXME: Workaround for sometimes setting the video format not actually + // changing the register values. Let's just try again. + { + NTV2VideoFormat fmt; + self->device->device->GetVideoFormat(fmt, self->channel); + + if (fmt != self->video_format) { + std::string actual_string = NTV2VideoFormatToString(fmt); + + GST_ERROR_OBJECT(self, + "Configured video format %s (%d) on channel %d but %s " + "(%d) is configured instead, trying again", + configured_string.c_str(), (int)self->video_format, + (int)self->channel, actual_string.c_str(), (int)fmt); + self->video_format = ::NTV2_FORMAT_UNKNOWN; + return TRUE; + } + } + + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetFrameBufferFormat( + (NTV2Channel)(self->channel + i), ::NTV2_FBF_10BIT_YCBCR); + } + + self->device->device->DMABufferAutoLock(false, true, 0); + + if (::NTV2DeviceHasBiDirectionalSDI(self->device_id)) { + self->device->device->SetSDITransmitEnable(self->channel, false); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetSDITransmitEnable( + (NTV2Channel)(self->channel + i), false); + } + } + + // Always use the framebuffer associated with the channel + NTV2InputCrosspointID framebuffer_id = + ::GetFrameBufferInputXptFromChannel(self->channel, false); + + const NTV2Standard standard( + ::GetNTV2StandardFromVideoFormat(self->video_format)); + self->device->device->SetStandard(standard, self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) + self->device->device->SetStandard(standard, + (NTV2Channel)(self->channel + i)); + } + + const NTV2FrameGeometry geometry = + ::GetNTV2FrameGeometryFromVideoFormat(self->video_format); + + self->vanc_mode = + ::HasVANCGeometries(geometry) ? vanc_mode : ::NTV2_VANCMODE_OFF; + if (self->vanc_mode == ::NTV2_VANCMODE_OFF) { + self->device->device->SetFrameGeometry(geometry, false, self->channel); + + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->SetFrameGeometry( + geometry, false, (NTV2Channel)(self->channel + i)); + } + } + } else { + const NTV2FrameGeometry vanc_geometry = + ::GetVANCFrameGeometry(geometry, self->vanc_mode); + + self->device->device->SetFrameGeometry(vanc_geometry, false, self->channel); + + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->SetFrameGeometry( + vanc_geometry, false, (NTV2Channel)(self->channel + i)); + } + } + } + + CNTV2SignalRouter router; + + // If any channels are currently running, initialize the router with the + // existing routing setup. Otherwise overwrite the whole routing table. + { + bool have_channels_running = false; + + for (NTV2Channel c = ::NTV2_CHANNEL1; c < NTV2_MAX_NUM_CHANNELS; + c = (NTV2Channel)(c + 1)) { + AUTOCIRCULATE_STATUS ac_status; + + if (c == self->channel) continue; + + if (self->device->device->AutoCirculateGetStatus(c, ac_status) && + !ac_status.IsStopped()) { + have_channels_running = true; + break; + } + } + + if (have_channels_running) self->device->device->GetRouting(router); + } + + // Need to remove old routes for the output and framebuffer we're going to + // use + NTV2ActualConnections connections = router.GetConnections(); + + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + // Need to disconnect the 4 inputs corresponding to this channel from + // their framebuffers/muxers, and muxers from their framebuffers + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == NTV2_XptFrameBuffer1Input || + iter->first == NTV2_XptFrameBuffer1BInput || + iter->first == NTV2_XptFrameBuffer2Input || + iter->first == NTV2_XptFrameBuffer2BInput || + iter->second == NTV2_Xpt425Mux1AYUV || + iter->second == NTV2_Xpt425Mux1BYUV || + iter->second == NTV2_Xpt425Mux2AYUV || + iter->second == NTV2_Xpt425Mux2BYUV || + iter->first == NTV2_Xpt425Mux1AInput || + iter->first == NTV2_Xpt425Mux1BInput || + iter->first == NTV2_Xpt425Mux2AInput || + iter->first == NTV2_Xpt425Mux2BInput || + iter->second == NTV2_XptHDMIIn1 || + iter->second == NTV2_XptHDMIIn1Q2 || + iter->second == NTV2_XptHDMIIn1Q3 || + iter->second == NTV2_XptHDMIIn1Q4) + router.RemoveConnection(iter->first, iter->second); + } + } else if (self->channel == NTV2_CHANNEL1) { + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == NTV2_XptFrameBuffer1Input || + iter->first == NTV2_XptFrameBuffer1BInput || + iter->first == NTV2_XptFrameBuffer1DS2Input || + iter->first == NTV2_XptFrameBuffer2Input || + iter->first == NTV2_XptFrameBuffer2BInput || + iter->first == NTV2_XptFrameBuffer2DS2Input || + iter->second == NTV2_Xpt425Mux1AYUV || + iter->second == NTV2_Xpt425Mux1BYUV || + iter->second == NTV2_Xpt425Mux2AYUV || + iter->second == NTV2_Xpt425Mux2BYUV || + iter->first == NTV2_Xpt425Mux1AInput || + iter->first == NTV2_Xpt425Mux1BInput || + iter->first == NTV2_Xpt425Mux2AInput || + iter->first == NTV2_Xpt425Mux2BInput || + iter->second == NTV2_XptSDIIn1 || iter->second == NTV2_XptSDIIn2 || + iter->second == NTV2_XptSDIIn3 || iter->second == NTV2_XptSDIIn4 || + iter->second == NTV2_XptSDIIn1DS2 || + iter->second == NTV2_XptSDIIn2DS2 || + iter->first == NTV2_XptFrameBuffer1Input || + iter->first == NTV2_XptFrameBuffer2Input || + iter->first == NTV2_XptFrameBuffer3Input || + iter->first == NTV2_XptFrameBuffer4Input) + router.RemoveConnection(iter->first, iter->second); + } + } else if (self->channel == NTV2_CHANNEL5) { + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (iter->first == NTV2_XptFrameBuffer5Input || + iter->first == NTV2_XptFrameBuffer5BInput || + iter->first == NTV2_XptFrameBuffer5DS2Input || + iter->first == NTV2_XptFrameBuffer6Input || + iter->first == NTV2_XptFrameBuffer6BInput || + iter->first == NTV2_XptFrameBuffer6DS2Input || + iter->second == NTV2_Xpt425Mux3AYUV || + iter->second == NTV2_Xpt425Mux3BYUV || + iter->second == NTV2_Xpt425Mux4AYUV || + iter->second == NTV2_Xpt425Mux4BYUV || + iter->first == NTV2_Xpt425Mux3AInput || + iter->first == NTV2_Xpt425Mux3BInput || + iter->first == NTV2_Xpt425Mux4AInput || + iter->first == NTV2_Xpt425Mux4BInput || + iter->second == NTV2_XptSDIIn5 || iter->second == NTV2_XptSDIIn6 || + iter->second == NTV2_XptSDIIn7 || iter->second == NTV2_XptSDIIn8 || + iter->second == NTV2_XptSDIIn5DS2 || + iter->second == NTV2_XptSDIIn6DS2 || + iter->first == NTV2_XptFrameBuffer5Input || + iter->first == NTV2_XptFrameBuffer6Input || + iter->first == NTV2_XptFrameBuffer7Input || + iter->first == NTV2_XptFrameBuffer8Input) + router.RemoveConnection(iter->first, iter->second); + } + } else { + g_assert_not_reached(); + } + } else { + // This also removes all connections for any previous quad mode on the + // corresponding channels. + + NTV2OutputCrosspointID quad_input_source_ids[10]; + + if (input_source_id == NTV2_XptSDIIn1 || + input_source_id == NTV2_XptSDIIn2 || + input_source_id == NTV2_XptSDIIn3 || + input_source_id == NTV2_XptSDIIn4) { + if (had_quad_enabled || had_quad_quad_enabled) { + quad_input_source_ids[0] = NTV2_XptSDIIn1; + quad_input_source_ids[1] = NTV2_XptSDIIn2; + quad_input_source_ids[2] = NTV2_XptSDIIn3; + quad_input_source_ids[3] = NTV2_XptSDIIn4; + quad_input_source_ids[4] = NTV2_XptSDIIn1DS2; + quad_input_source_ids[5] = NTV2_XptSDIIn2DS2; + quad_input_source_ids[6] = NTV2_Xpt425Mux1AYUV; + quad_input_source_ids[7] = NTV2_Xpt425Mux1BYUV; + quad_input_source_ids[8] = NTV2_Xpt425Mux2AYUV; + quad_input_source_ids[9] = NTV2_Xpt425Mux2BYUV; + } + } else if (input_source_id == NTV2_XptSDIIn5 || + input_source_id == NTV2_XptSDIIn6 || + input_source_id == NTV2_XptSDIIn7 || + input_source_id == NTV2_XptSDIIn8) { + if (had_quad_enabled || had_quad_quad_enabled) { + quad_input_source_ids[0] = NTV2_XptSDIIn5; + quad_input_source_ids[1] = NTV2_XptSDIIn6; + quad_input_source_ids[2] = NTV2_XptSDIIn7; + quad_input_source_ids[3] = NTV2_XptSDIIn8; + quad_input_source_ids[4] = NTV2_XptSDIIn5DS2; + quad_input_source_ids[5] = NTV2_XptSDIIn6DS2; + quad_input_source_ids[6] = NTV2_Xpt425Mux3AYUV; + quad_input_source_ids[7] = NTV2_Xpt425Mux3BYUV; + quad_input_source_ids[8] = NTV2_Xpt425Mux4AYUV; + quad_input_source_ids[9] = NTV2_Xpt425Mux4BYUV; + } + } else { + g_assert_not_reached(); + } + + for (auto iter = connections.begin(); iter != connections.end(); iter++) { + if (had_quad_enabled || had_quad_quad_enabled) { + for (auto quad_input_source_id : quad_input_source_ids) { + if (iter->second == quad_input_source_id) + router.RemoveConnection(iter->first, iter->second); + } + } else { + if (iter->first == framebuffer_id || iter->second == input_source_id) + router.RemoveConnection(iter->first, iter->second); + } + } + } + + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + input_source_id = NTV2_Xpt425Mux1AYUV; + } else if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI && + !NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format) && + !NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) + input_source_id = NTV2_Xpt425Mux1AYUV; + else if (self->channel == NTV2_CHANNEL5) + input_source_id = NTV2_Xpt425Mux3AYUV; + else + g_assert_not_reached(); + } + } + + GST_DEBUG_OBJECT(self, "Creating connection %d - %d", framebuffer_id, + input_source_id); + router.AddConnection(framebuffer_id, input_source_id); + + if (self->quad_mode) { + if (self->input_source >= GST_AJA_INPUT_SOURCE_HDMI1 && + self->input_source <= GST_AJA_INPUT_SOURCE_HDMI4) { + router.AddConnection(NTV2_XptFrameBuffer1BInput, NTV2_Xpt425Mux1BYUV); + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_Xpt425Mux2AYUV); + router.AddConnection(NTV2_XptFrameBuffer2BInput, NTV2_Xpt425Mux2BYUV); + + router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptHDMIIn1); + router.AddConnection(NTV2_Xpt425Mux1BInput, NTV2_XptHDMIIn1Q2); + router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptHDMIIn1Q3); + router.AddConnection(NTV2_Xpt425Mux2BInput, NTV2_XptHDMIIn1Q4); + } else { + if (self->sdi_mode == GST_AJA_SDI_MODE_QUAD_LINK_TSI) { + if (NTV2_IS_QUAD_QUAD_HFR_VIDEO_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptFrameBuffer1DS2Input, NTV2_XptSDIIn2); + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn3); + router.AddConnection(NTV2_XptFrameBuffer2DS2Input, NTV2_XptSDIIn4); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptFrameBuffer5DS2Input, NTV2_XptSDIIn6); + router.AddConnection(NTV2_XptFrameBuffer5Input, NTV2_XptSDIIn7); + router.AddConnection(NTV2_XptFrameBuffer6DS2Input, NTV2_XptSDIIn8); + } else { + g_assert_not_reached(); + } + } else if (NTV2_IS_QUAD_QUAD_FORMAT(self->video_format)) { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptFrameBuffer1DS2Input, + NTV2_XptSDIIn1DS2); + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn2); + router.AddConnection(NTV2_XptFrameBuffer2DS2Input, + NTV2_XptSDIIn2DS2); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptFrameBuffer5DS2Input, + NTV2_XptSDIIn5DS2); + router.AddConnection(NTV2_XptFrameBuffer5Input, NTV2_XptSDIIn6); + router.AddConnection(NTV2_XptFrameBuffer6DS2Input, + NTV2_XptSDIIn6DS2); + } else { + g_assert_not_reached(); + } + // FIXME: Need special handling of NTV2_IS_4K_HFR_VIDEO_FORMAT for + // TSI? + } else { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptFrameBuffer1BInput, + NTV2_Xpt425Mux1BYUV); + router.AddConnection(NTV2_XptFrameBuffer2Input, + NTV2_Xpt425Mux2AYUV); + router.AddConnection(NTV2_XptFrameBuffer2BInput, + NTV2_Xpt425Mux2BYUV); + + router.AddConnection(NTV2_Xpt425Mux1AInput, NTV2_XptSDIIn1); + router.AddConnection(NTV2_Xpt425Mux1BInput, NTV2_XptSDIIn2); + router.AddConnection(NTV2_Xpt425Mux2AInput, NTV2_XptSDIIn3); + router.AddConnection(NTV2_Xpt425Mux2BInput, NTV2_XptSDIIn4); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptFrameBuffer5BInput, + NTV2_Xpt425Mux3BYUV); + router.AddConnection(NTV2_XptFrameBuffer6Input, + NTV2_Xpt425Mux4AYUV); + router.AddConnection(NTV2_XptFrameBuffer6BInput, + NTV2_Xpt425Mux4BYUV); + + router.AddConnection(NTV2_Xpt425Mux3AInput, NTV2_XptSDIIn5); + router.AddConnection(NTV2_Xpt425Mux3BInput, NTV2_XptSDIIn6); + router.AddConnection(NTV2_Xpt425Mux4AInput, NTV2_XptSDIIn7); + router.AddConnection(NTV2_Xpt425Mux4BInput, NTV2_XptSDIIn8); + } else { + g_assert_not_reached(); + } + } + } else { + if (self->channel == NTV2_CHANNEL1) { + router.AddConnection(NTV2_XptFrameBuffer2Input, NTV2_XptSDIIn2); + router.AddConnection(NTV2_XptFrameBuffer3Input, NTV2_XptSDIIn3); + router.AddConnection(NTV2_XptFrameBuffer4Input, NTV2_XptSDIIn4); + } else if (self->channel == NTV2_CHANNEL5) { + router.AddConnection(NTV2_XptFrameBuffer6Input, NTV2_XptSDIIn6); + router.AddConnection(NTV2_XptFrameBuffer7Input, NTV2_XptSDIIn7); + router.AddConnection(NTV2_XptFrameBuffer8Input, NTV2_XptSDIIn8); + } else { + g_assert_not_reached(); + } + } + } + } + + { + std::stringstream os; + CNTV2SignalRouter oldRouter; + self->device->device->GetRouting(oldRouter); + oldRouter.Print(os); + GST_DEBUG_OBJECT(self, "Previous routing:\n%s", os.str().c_str()); + } + self->device->device->ApplySignalRoute(router, true); + { + std::stringstream os; + CNTV2SignalRouter currentRouter; + self->device->device->GetRouting(currentRouter); + currentRouter.Print(os); + GST_DEBUG_OBJECT(self, "New routing:\n%s", os.str().c_str()); + } + + switch (self->audio_system_setting) { + case GST_AJA_AUDIO_SYSTEM_1: + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + break; + case GST_AJA_AUDIO_SYSTEM_2: + self->audio_system = ::NTV2_AUDIOSYSTEM_2; + break; + case GST_AJA_AUDIO_SYSTEM_3: + self->audio_system = ::NTV2_AUDIOSYSTEM_3; + break; + case GST_AJA_AUDIO_SYSTEM_4: + self->audio_system = ::NTV2_AUDIOSYSTEM_4; + break; + case GST_AJA_AUDIO_SYSTEM_5: + self->audio_system = ::NTV2_AUDIOSYSTEM_5; + break; + case GST_AJA_AUDIO_SYSTEM_6: + self->audio_system = ::NTV2_AUDIOSYSTEM_6; + break; + case GST_AJA_AUDIO_SYSTEM_7: + self->audio_system = ::NTV2_AUDIOSYSTEM_7; + break; + case GST_AJA_AUDIO_SYSTEM_8: + self->audio_system = ::NTV2_AUDIOSYSTEM_8; + break; + case GST_AJA_AUDIO_SYSTEM_AUTO: + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + if (::NTV2DeviceGetNumAudioSystems(self->device_id) > 1) + self->audio_system = ::NTV2ChannelToAudioSystem(self->channel); + if (!::NTV2DeviceCanDoFrameStore1Display(self->device_id)) + self->audio_system = ::NTV2_AUDIOSYSTEM_1; + break; + default: + g_assert_not_reached(); + break; + } + + GST_DEBUG_OBJECT(self, "Using audio system %d", self->audio_system); + + NTV2AudioSource audio_source; + switch (self->audio_source) { + case GST_AJA_AUDIO_SOURCE_EMBEDDED: + audio_source = ::NTV2_AUDIO_EMBEDDED; + break; + case GST_AJA_AUDIO_SOURCE_AES: + audio_source = ::NTV2_AUDIO_AES; + break; + case GST_AJA_AUDIO_SOURCE_ANALOG: + audio_source = ::NTV2_AUDIO_ANALOG; + break; + case GST_AJA_AUDIO_SOURCE_HDMI: + audio_source = ::NTV2_AUDIO_HDMI; + break; + case GST_AJA_AUDIO_SOURCE_MIC: + audio_source = ::NTV2_AUDIO_MIC; + break; + default: + g_assert_not_reached(); + break; + } + + NTV2EmbeddedAudioInput embedded_audio_input; + switch (self->embedded_audio_input) { + case GST_AJA_EMBEDDED_AUDIO_INPUT_AUTO: + embedded_audio_input = + ::NTV2InputSourceToEmbeddedAudioInput(input_source); + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO1: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_1; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO2: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_2; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO3: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_3; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO4: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_4; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO5: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_5; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO6: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_6; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO7: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_7; + break; + case GST_AJA_EMBEDDED_AUDIO_INPUT_VIDEO8: + embedded_audio_input = ::NTV2_EMBEDDED_AUDIO_INPUT_VIDEO_8; + break; + default: + g_assert_not_reached(); + break; + } + + self->device->device->SetAudioSystemInputSource( + self->audio_system, audio_source, embedded_audio_input); + self->configured_audio_channels = + ::NTV2DeviceGetMaxAudioChannels(self->device_id); + self->device->device->SetNumberAudioChannels(self->configured_audio_channels, + self->audio_system); + self->device->device->SetAudioRate(::NTV2_AUDIO_48K, self->audio_system); + self->device->device->SetAudioBufferSize(::NTV2_AUDIO_BUFFER_BIG, + self->audio_system); + self->device->device->SetAudioLoopBack(::NTV2_AUDIO_LOOPBACK_OFF, + self->audio_system); + self->device->device->SetEmbeddedAudioClock( + ::NTV2_EMBEDDED_AUDIO_CLOCK_VIDEO_INPUT, self->audio_system); + + NTV2ReferenceSource reference_source; + switch (self->reference_source) { + case GST_AJA_REFERENCE_SOURCE_AUTO: + reference_source = ::NTV2InputSourceToReferenceSource(input_source); + break; + case GST_AJA_REFERENCE_SOURCE_EXTERNAL: + reference_source = ::NTV2_REFERENCE_EXTERNAL; + break; + case GST_AJA_REFERENCE_SOURCE_FREERUN: + reference_source = ::NTV2_REFERENCE_FREERUN; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_1: + reference_source = ::NTV2_REFERENCE_INPUT1; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_2: + reference_source = ::NTV2_REFERENCE_INPUT2; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_3: + reference_source = ::NTV2_REFERENCE_INPUT3; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_4: + reference_source = ::NTV2_REFERENCE_INPUT4; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_5: + reference_source = ::NTV2_REFERENCE_INPUT5; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_6: + reference_source = ::NTV2_REFERENCE_INPUT6; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_7: + reference_source = ::NTV2_REFERENCE_INPUT7; + break; + case GST_AJA_REFERENCE_SOURCE_INPUT_8: + reference_source = ::NTV2_REFERENCE_INPUT8; + break; + default: + g_assert_not_reached(); + break; + } + GST_DEBUG_OBJECT(self, "Configuring reference source %d", + (int)reference_source); + + self->device->device->SetReference(reference_source); + self->device->device->SetLTCInputEnable(true); + self->device->device->SetRP188SourceFilter(self->channel, 0xff); + + guint video_buffer_size = ::GetVideoActiveSize( + self->video_format, ::NTV2_FBF_10BIT_YCBCR, self->vanc_mode); + + self->buffer_pool = gst_buffer_pool_new(); + GstStructure *config = gst_buffer_pool_get_config(self->buffer_pool); + gst_buffer_pool_config_set_params(config, NULL, video_buffer_size, + 2 * self->queue_size, 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->buffer_pool, config); + gst_buffer_pool_set_active(self->buffer_pool, TRUE); + + guint audio_buffer_size = 401 * 1024; + + self->audio_buffer_pool = gst_buffer_pool_new(); + config = gst_buffer_pool_get_config(self->audio_buffer_pool); + gst_buffer_pool_config_set_params(config, NULL, audio_buffer_size, + 2 * self->queue_size, 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->audio_buffer_pool, config); + gst_buffer_pool_set_active(self->audio_buffer_pool, TRUE); + + guint anc_buffer_size = 8 * 1024; + + if (self->vanc_mode == ::NTV2_VANCMODE_OFF && + ::NTV2DeviceCanDoCustomAnc(self->device_id)) { + self->anc_buffer_pool = gst_buffer_pool_new(); + config = gst_buffer_pool_get_config(self->anc_buffer_pool); + gst_buffer_pool_config_set_params( + config, NULL, anc_buffer_size, + (self->configured_info.interlace_mode == + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE + ? 1 + : 2) * + self->queue_size, + 0); + gst_buffer_pool_config_set_allocator(config, self->allocator, NULL); + gst_buffer_pool_set_config(self->anc_buffer_pool, config); + gst_buffer_pool_set_active(self->anc_buffer_pool, TRUE); + } + + gst_element_post_message(GST_ELEMENT_CAST(self), + gst_message_new_latency(GST_OBJECT_CAST(self))); + + return TRUE; +} + +static gboolean gst_aja_src_start(GstAjaSrc *self) { + GST_DEBUG_OBJECT(self, "Starting"); + + self->video_format = NTV2_FORMAT_UNKNOWN; + self->signal = FALSE; + + self->capture_thread = new AJAThread(); + self->capture_thread->Attach(capture_thread_func, self); + self->capture_thread->SetPriority(AJA_ThreadPriority_High); + self->capture_thread->Start(); + g_mutex_lock(&self->queue_lock); + self->shutdown = FALSE; + self->playing = FALSE; + self->flushing = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + return TRUE; +} + +static gboolean gst_aja_src_stop(GstAjaSrc *self) { + QueueItem *item; + + GST_DEBUG_OBJECT(self, "Stopping"); + + g_mutex_lock(&self->queue_lock); + self->shutdown = TRUE; + self->flushing = TRUE; + self->playing = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + if (self->capture_thread) { + self->capture_thread->Stop(); + delete self->capture_thread; + self->capture_thread = NULL; + } + + GST_OBJECT_LOCK(self); + memset(&self->current_info, 0, sizeof(self->current_info)); + memset(&self->configured_info, 0, sizeof(self->configured_info)); + self->configured_audio_channels = 0; + GST_OBJECT_UNLOCK(self); + + while ((item = (QueueItem *)gst_queue_array_pop_head_struct(self->queue))) { + queue_item_clear(item); + } + self->queue_num_frames = 0; + + if (self->buffer_pool) { + gst_buffer_pool_set_active(self->buffer_pool, FALSE); + gst_clear_object(&self->buffer_pool); + } + + if (self->audio_buffer_pool) { + gst_buffer_pool_set_active(self->audio_buffer_pool, FALSE); + gst_clear_object(&self->audio_buffer_pool); + } + + if (self->anc_buffer_pool) { + gst_buffer_pool_set_active(self->anc_buffer_pool, FALSE); + gst_clear_object(&self->anc_buffer_pool); + } + + self->video_format = NTV2_FORMAT_UNKNOWN; + + if (self->signal) { + self->signal = FALSE; + g_object_notify(G_OBJECT(self), "signal"); + } + + GST_DEBUG_OBJECT(self, "Stopped"); + + return TRUE; +} + +static GstStateChangeReturn gst_aja_src_change_state( + GstElement *element, GstStateChange transition) { + GstAjaSrc *self = GST_AJA_SRC(element); + GstStateChangeReturn ret; + + switch (transition) { + case GST_STATE_CHANGE_NULL_TO_READY: + if (!gst_aja_src_open(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_READY_TO_PAUSED: + if (!gst_aja_src_start(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + break; + default: + break; + } + + ret = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition); + if (ret == GST_STATE_CHANGE_FAILURE) return ret; + + switch (transition) { + case GST_STATE_CHANGE_PLAYING_TO_PAUSED: + g_mutex_lock(&self->queue_lock); + self->playing = FALSE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + g_mutex_lock(&self->queue_lock); + self->playing = TRUE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + if (!gst_aja_src_stop(self)) return GST_STATE_CHANGE_FAILURE; + break; + case GST_STATE_CHANGE_READY_TO_NULL: + if (!gst_aja_src_close(self)) return GST_STATE_CHANGE_FAILURE; + break; + default: + break; + } + + return ret; +} + +static GstCaps *gst_aja_src_get_caps(GstBaseSrc *bsrc, GstCaps *filter) { + GstAjaSrc *self = GST_AJA_SRC(bsrc); + GstCaps *caps; + + if (self->device) { + caps = gst_ntv2_supported_caps(self->device_id); + } else { + caps = gst_pad_get_pad_template_caps(GST_BASE_SRC_PAD(self)); + } + + // Intersect with the configured video format if any to constrain the caps + // further. + if (self->video_format_setting != GST_AJA_VIDEO_FORMAT_AUTO) { + GstCaps *configured_caps = + gst_aja_video_format_to_caps(self->video_format_setting); + + if (configured_caps) { + GstCaps *tmp; + + // Remove pixel-aspect-ratio from the configured caps to allow for both + // widescreen and non-widescreen PAL/NTSC. It's added back by the + // template caps above when intersecting. + guint n = gst_caps_get_size(configured_caps); + for (guint i = 0; i < n; i++) { + GstStructure *s = gst_caps_get_structure(configured_caps, i); + + gst_structure_remove_fields(s, "pixel-aspect-ratio", NULL); + } + + tmp = gst_caps_intersect(caps, configured_caps); + gst_caps_unref(caps); + gst_caps_unref(configured_caps); + caps = tmp; + } + } + + if (filter) { + GstCaps *tmp = + gst_caps_intersect_full(filter, caps, GST_CAPS_INTERSECT_FIRST); + gst_caps_unref(caps); + caps = tmp; + } + + return caps; +} + +static gboolean gst_aja_src_query(GstBaseSrc *bsrc, GstQuery *query) { + GstAjaSrc *self = GST_AJA_SRC(bsrc); + gboolean ret = TRUE; + + switch (GST_QUERY_TYPE(query)) { + case GST_QUERY_LATENCY: { + if (self->current_info.finfo && + self->current_info.finfo->format != GST_VIDEO_FORMAT_UNKNOWN) { + GstClockTime min, max; + + min = gst_util_uint64_scale_ceil( + GST_SECOND, 3 * self->current_info.fps_d, self->current_info.fps_n); + max = self->queue_size * min; + + gst_query_set_latency(query, TRUE, min, max); + ret = TRUE; + } else { + ret = FALSE; + } + + return ret; + } + + default: + return GST_BASE_SRC_CLASS(parent_class)->query(bsrc, query); + break; + } +} + +static gboolean gst_aja_src_unlock(GstBaseSrc *bsrc) { + GstAjaSrc *self = GST_AJA_SRC(bsrc); + + g_mutex_lock(&self->queue_lock); + self->flushing = TRUE; + g_cond_signal(&self->queue_cond); + g_mutex_unlock(&self->queue_lock); + + return TRUE; +} + +static gboolean gst_aja_src_unlock_stop(GstBaseSrc *bsrc) { + GstAjaSrc *self = GST_AJA_SRC(bsrc); + + g_mutex_lock(&self->queue_lock); + self->flushing = FALSE; + g_mutex_unlock(&self->queue_lock); + + return TRUE; +} + +static GstFlowReturn gst_aja_src_create(GstPushSrc *psrc, GstBuffer **buffer) { + GstAjaSrc *self = GST_AJA_SRC(psrc); + GstFlowReturn flow_ret = GST_FLOW_OK; + QueueItem item = { + .type = QUEUE_ITEM_TYPE_DUMMY, + }; + +next_item: + item.type = QUEUE_ITEM_TYPE_DUMMY; + + g_mutex_lock(&self->queue_lock); + while (gst_queue_array_is_empty(self->queue) && !self->flushing) { + g_cond_wait(&self->queue_cond, &self->queue_lock); + } + + if (self->flushing) { + g_mutex_unlock(&self->queue_lock); + GST_DEBUG_OBJECT(self, "Flushing"); + return GST_FLOW_FLUSHING; + } + + item = *(QueueItem *)gst_queue_array_pop_head_struct(self->queue); + if (item.type == QUEUE_ITEM_TYPE_FRAME) { + self->queue_num_frames -= 1; + } + g_mutex_unlock(&self->queue_lock); + + switch (item.type) { + case QUEUE_ITEM_TYPE_DUMMY: + queue_item_clear(&item); + goto next_item; + case QUEUE_ITEM_TYPE_SIGNAL_CHANGE: + // These are already only produced when signal status is changing + if (item.signal_change.have_signal) { + GST_ELEMENT_INFO(GST_ELEMENT(self), RESOURCE, READ, + ("Signal recovered"), ("Input source detected")); + self->signal = TRUE; + g_object_notify(G_OBJECT(self), "signal"); + } else if (!item.signal_change.have_signal) { + if (item.signal_change.detected_format != ::NTV2_FORMAT_UNKNOWN) { + std::string format_string = + NTV2VideoFormatToString(item.signal_change.detected_format); + + GST_ELEMENT_WARNING_WITH_DETAILS( + GST_ELEMENT(self), RESOURCE, READ, ("Signal lost"), + ("Input source with different mode %s was detected", + format_string.c_str()), + ("detected-format", G_TYPE_STRING, format_string.c_str(), "vpid", + G_TYPE_UINT, item.signal_change.vpid, NULL)); + } else { + GST_ELEMENT_WARNING(GST_ELEMENT(self), RESOURCE, READ, + ("Signal lost"), + ("No input source was detected")); + } + self->signal = FALSE; + g_object_notify(G_OBJECT(self), "signal"); + } + queue_item_clear(&item); + goto next_item; + case QUEUE_ITEM_TYPE_ERROR: + GST_ERROR_OBJECT(self, "Stopping because of error on capture thread"); + gst_element_post_message(GST_ELEMENT(self), + (GstMessage *)g_steal_pointer(&item.error.msg)); + queue_item_clear(&item); + return GST_FLOW_ERROR; + case QUEUE_ITEM_TYPE_FRAMES_DROPPED: + GST_WARNING_OBJECT( + self, "Dropped frames from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT, + GST_TIME_ARGS(item.frames_dropped.timestamp_start), + GST_TIME_ARGS(item.frames_dropped.timestamp_end)); + gst_element_post_message( + GST_ELEMENT(self), + gst_message_new_qos(GST_OBJECT_CAST(self), TRUE, GST_CLOCK_TIME_NONE, + GST_CLOCK_TIME_NONE, + item.frames_dropped.timestamp_start, + item.frames_dropped.timestamp_end - + item.frames_dropped.timestamp_start)); + queue_item_clear(&item); + goto next_item; + case QUEUE_ITEM_TYPE_FRAME: + // fall through below + break; + } + + g_assert(item.type == QUEUE_ITEM_TYPE_FRAME); + + if (!self->signal) { + self->signal = TRUE; + g_object_notify(G_OBJECT(self), "signal"); + } + + *buffer = (GstBuffer *)g_steal_pointer(&item.frame.video_buffer); + gst_buffer_add_aja_audio_meta(*buffer, item.frame.audio_buffer); + gst_clear_buffer(&item.frame.audio_buffer); + + if (item.frame.tc.IsValid()) { + TimecodeFormat tc_format = ::kTCFormatUnknown; + GstVideoTimeCodeFlags flags = GST_VIDEO_TIME_CODE_FLAGS_NONE; + + if (self->configured_info.fps_n == 24 && self->configured_info.fps_d == 1) { + tc_format = kTCFormat24fps; + } else if (self->configured_info.fps_n == 25 && + self->configured_info.fps_d == 1) { + tc_format = kTCFormat25fps; + } else if (self->configured_info.fps_n == 30 && + self->configured_info.fps_d == 1) { + tc_format = kTCFormat30fps; + } else if (self->configured_info.fps_n == 30000 && + self->configured_info.fps_d == 1001) { + tc_format = kTCFormat30fpsDF; + flags = + (GstVideoTimeCodeFlags)(flags | GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME); + } else if (self->configured_info.fps_n == 48 && + self->configured_info.fps_d == 1) { + tc_format = kTCFormat48fps; + } else if (self->configured_info.fps_n == 50 && + self->configured_info.fps_d == 1) { + tc_format = kTCFormat50fps; + } else if (self->configured_info.fps_n == 60 && + self->configured_info.fps_d == 1) { + tc_format = kTCFormat60fps; + } else if (self->configured_info.fps_n == 60000 && + self->configured_info.fps_d == 1001) { + tc_format = kTCFormat60fpsDF; + flags = + (GstVideoTimeCodeFlags)(flags | GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME); + } + + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) + flags = + (GstVideoTimeCodeFlags)(flags | GST_VIDEO_TIME_CODE_FLAGS_INTERLACED); + + CRP188 rp188(item.frame.tc, tc_format); + + { + std::stringstream os; + os << rp188; + GST_TRACE_OBJECT(self, "Adding timecode %s", os.str().c_str()); + } + + guint hours, minutes, seconds, frames; + rp188.GetRP188Hrs(hours); + rp188.GetRP188Mins(minutes); + rp188.GetRP188Secs(seconds); + rp188.GetRP188Frms(frames); + + GstVideoTimeCode tc; + gst_video_time_code_init(&tc, self->configured_info.fps_n, + self->configured_info.fps_d, NULL, flags, hours, + minutes, seconds, frames, 0); + gst_buffer_add_video_time_code_meta(*buffer, &tc); + } + + AJAAncillaryList anc_packets; + + if (item.frame.anc_buffer) { + GstMapInfo map = GST_MAP_INFO_INIT; + GstMapInfo map2 = GST_MAP_INFO_INIT; + + gst_buffer_map(item.frame.anc_buffer, &map, GST_MAP_READ); + if (item.frame.anc_buffer2) + gst_buffer_map(item.frame.anc_buffer2, &map2, GST_MAP_READ); + + NTV2_POINTER ptr1(map.data, map.size); + NTV2_POINTER ptr2(map2.data, map2.size); + + AJAAncillaryList::SetFromDeviceAncBuffers(ptr1, ptr2, anc_packets); + + if (item.frame.anc_buffer2) gst_buffer_unmap(item.frame.anc_buffer2, &map2); + gst_buffer_unmap(item.frame.anc_buffer, &map); + } else if (self->vanc_mode != ::NTV2_VANCMODE_OFF) { + GstMapInfo map; + + NTV2FormatDescriptor format_desc(self->video_format, ::NTV2_FBF_10BIT_YCBCR, + self->vanc_mode); + + gst_buffer_map(*buffer, &map, GST_MAP_READ); + NTV2_POINTER ptr(map.data, map.size); + AJAAncillaryList::SetFromVANCData(ptr, format_desc, anc_packets); + gst_buffer_unmap(*buffer, &map); + + guint offset = + format_desc.RasterLineToByteOffset(format_desc.GetFirstActiveLine()); + guint size = format_desc.GetVisibleRasterBytes(); + + gst_buffer_resize(*buffer, offset, size); + } + + gst_clear_buffer(&item.frame.anc_buffer); + gst_clear_buffer(&item.frame.anc_buffer2); + + // Not using CountAncillaryDataWithType(AJAAncillaryDataType_Cea708) etc + // here because for SD it doesn't recognize the packets. It assumes they + // would only be received on AJAAncillaryDataChannel_Y but for SD it is + // actually AJAAncillaryDataChannel_Both. + // + // See AJA SDK support ticket #4844. + guint32 n_vanc_packets = anc_packets.CountAncillaryData(); + + // Check if we have either CEA608 or CEA708 packets, or both. + bool have_cea608 = false; + bool have_cea708 = false; + for (guint32 i = 0; i < n_vanc_packets; i++) { + AJAAncillaryData *packet = anc_packets.GetAncillaryDataAtIndex(i); + + if (packet->GetDID() == AJAAncillaryData_Cea608_Vanc_DID && + packet->GetSID() == AJAAncillaryData_Cea608_Vanc_SID && + packet->GetPayloadData() && packet->GetPayloadByteCount() && + AJA_SUCCESS(packet->ParsePayloadData())) { + GST_TRACE_OBJECT( + self, "Found CEA608 VANC of %" G_GSIZE_FORMAT " bytes at line %u", + packet->GetPayloadByteCount(), packet->GetLocationLineNumber()); + have_cea608 = true; + } else if (packet->GetDID() == AJAAncillaryData_CEA708_DID && + packet->GetSID() == AJAAncillaryData_CEA708_SID && + packet->GetPayloadData() && packet->GetPayloadByteCount() && + AJA_SUCCESS(packet->ParsePayloadData())) { + GST_TRACE_OBJECT( + self, "Found CEA708 CDP VANC of %" G_GSIZE_FORMAT " bytes at line %u", + packet->GetPayloadByteCount(), packet->GetLocationLineNumber()); + have_cea708 = true; + } + } + + // Decide based on the closed-caption-capture-mode property and closed + // caption availability which ones to add as metadata to the output buffer. + bool want_cea608 = + have_cea608 && + (self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_AND_CEA608 || + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_OR_CEA708 || + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_ONLY || + (!have_cea708 && + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_OR_CEA608)); + + bool want_cea708 = + have_cea708 && + (self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_AND_CEA608 || + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_OR_CEA608 || + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA708_ONLY || + (!have_cea608 && + self->closed_caption_capture_mode == + GST_AJA_CLOSED_CAPTION_CAPTURE_MODE_CEA608_OR_CEA708)); + + bool aspect_ratio_flag = false; + bool have_afd_bar = false; + for (guint32 i = 0; i < n_vanc_packets; i++) { + AJAAncillaryData *packet = anc_packets.GetAncillaryDataAtIndex(i); + + if (want_cea608 && packet->GetDID() == AJAAncillaryData_Cea608_Vanc_DID && + packet->GetSID() == AJAAncillaryData_Cea608_Vanc_SID && + packet->GetPayloadData() && packet->GetPayloadByteCount() && + AJA_SUCCESS(packet->ParsePayloadData())) { + GST_TRACE_OBJECT( + self, "Adding CEA608 VANC of %" G_GSIZE_FORMAT " bytes at line %u", + packet->GetPayloadByteCount(), packet->GetLocationLineNumber()); + gst_buffer_add_video_caption_meta( + *buffer, GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A, + packet->GetPayloadData(), packet->GetPayloadByteCount()); + } else if (want_cea708 && packet->GetDID() == AJAAncillaryData_CEA708_DID && + packet->GetSID() == AJAAncillaryData_CEA708_SID && + packet->GetPayloadData() && packet->GetPayloadByteCount() && + AJA_SUCCESS(packet->ParsePayloadData())) { + GST_TRACE_OBJECT( + self, + "Adding CEA708 CDP VANC of %" G_GSIZE_FORMAT " bytes at line %u", + packet->GetPayloadByteCount(), packet->GetLocationLineNumber()); + gst_buffer_add_video_caption_meta( + *buffer, GST_VIDEO_CAPTION_TYPE_CEA708_CDP, packet->GetPayloadData(), + packet->GetPayloadByteCount()); + } else if (packet->GetDID() == 0x41 && packet->GetSID() == 0x05 && + packet->GetPayloadData() && packet->GetPayloadByteCount() == 8) { + const guint8 *data = packet->GetPayloadData(); + + have_afd_bar = true; + aspect_ratio_flag = (data[0] >> 2) & 0x1; + + GstVideoAFDValue afd = (GstVideoAFDValue)((data[0] >> 3) & 0xf); + gboolean is_letterbox = ((data[3] >> 4) & 0x3) == 0; + guint16 bar1 = GST_READ_UINT16_BE(&data[4]); + guint16 bar2 = GST_READ_UINT16_BE(&data[6]); + + GST_TRACE_OBJECT(self, + "Found AFD/Bar VANC at line %u: AR %u, AFD %u, " + "letterbox %u, bar1 %u, bar2 %u", + packet->GetLocationLineNumber(), aspect_ratio_flag, afd, + is_letterbox, bar1, bar2); + + const NTV2Standard standard( + ::GetNTV2StandardFromVideoFormat(item.frame.detected_format)); + const NTV2SmpteLineNumber smpte_line_num_info = + ::GetSmpteLineNumber(standard); + bool field2 = + packet->GetLocationLineNumber() > + smpte_line_num_info.GetLastLine( + smpte_line_num_info.firstFieldTop ? NTV2_FIELD0 : NTV2_FIELD1); + + gst_buffer_add_video_afd_meta(*buffer, field2 ? 1 : 0, + GST_VIDEO_AFD_SPEC_SMPTE_ST2016_1, afd); + gst_buffer_add_video_bar_meta(*buffer, field2 ? 1 : 0, is_letterbox, bar1, + bar2); + } + } + + bool caps_changed = false; + + CNTV2VPID vpid(item.frame.vpid); + if (vpid.IsValid()) { + GstVideoInfo info; + + { + std::stringstream os; + vpid.Print(os); + GST_TRACE_OBJECT(self, "Got valid VPID %s", os.str().c_str()); + } + + if (gst_video_info_from_ntv2_video_format(&info, + item.frame.detected_format)) { + switch (vpid.GetTransferCharacteristics()) { + default: + case NTV2_VPID_TC_SDR_TV: + if (info.height < 720) { + info.colorimetry.transfer = GST_VIDEO_TRANSFER_BT601; + } else { + info.colorimetry.transfer = GST_VIDEO_TRANSFER_BT709; + } + break; + case NTV2_VPID_TC_HLG: + info.colorimetry.transfer = GST_VIDEO_TRANSFER_ARIB_STD_B67; + break; + case NTV2_VPID_TC_PQ: + info.colorimetry.transfer = GST_VIDEO_TRANSFER_SMPTE2084; + break; + } + + switch (vpid.GetColorimetry()) { + case NTV2_VPID_Color_Rec709: + info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT709; + info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT709; + break; + case NTV2_VPID_Color_UHDTV: + info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT2020; + info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020; + break; + default: + // Default handling + break; + } + + switch (vpid.GetRGBRange()) { + case NTV2_VPID_Range_Full: + info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255; + break; + case NTV2_VPID_Range_Narrow: + info.colorimetry.range = GST_VIDEO_COLOR_RANGE_16_235; + break; + } + + if (!have_afd_bar && vpid.GetImageAspect16x9()) aspect_ratio_flag = true; + + // Widescreen PAL/NTSC + if (aspect_ratio_flag && info.height == 486) { + info.par_n = 40; + info.par_d = 33; + } else if (aspect_ratio_flag && info.height == 576) { + info.par_n = 16; + info.par_d = 11; + } + + if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self)) || + !gst_video_info_is_equal(&info, &self->current_info)) { + self->current_info = info; + caps_changed = true; + } + } + } else { + GstVideoInfo info; + + if (gst_video_info_from_ntv2_video_format(&info, + item.frame.detected_format)) { + // Widescreen PAL/NTSC + if (aspect_ratio_flag && info.height == 486) { + info.par_n = 40; + info.par_d = 33; + } else if (aspect_ratio_flag && info.height == 576) { + info.par_n = 16; + info.par_d = 11; + } + + if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self)) || + !gst_video_info_is_equal(&info, &self->current_info)) { + self->current_info = info; + caps_changed = true; + } + } else if (!gst_pad_has_current_caps(GST_BASE_SRC_PAD(self))) { + self->current_info = self->configured_info; + + // Widescreen PAL/NTSC + if (aspect_ratio_flag && self->current_info.height == 486) { + self->current_info.par_n = 40; + self->current_info.par_d = 33; + } else if (aspect_ratio_flag && self->current_info.height == 576) { + self->current_info.par_n = 16; + self->current_info.par_d = 11; + } + + caps_changed = true; + } + } + + if (caps_changed) { + GstCaps *caps = gst_video_info_to_caps(&self->current_info); + gst_caps_set_simple(caps, "audio-channels", G_TYPE_INT, + self->configured_audio_channels, NULL); + GST_DEBUG_OBJECT(self, "Configuring caps %" GST_PTR_FORMAT, caps); + gst_base_src_set_caps(GST_BASE_SRC_CAST(self), caps); + gst_caps_unref(caps); + } + + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { + GST_BUFFER_FLAG_SET(*buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED); + switch (GST_VIDEO_INFO_FIELD_ORDER(&self->configured_info)) { + case GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST: + GST_BUFFER_FLAG_SET(*buffer, GST_VIDEO_BUFFER_FLAG_TFF); + default: + break; + } + } + + queue_item_clear(&item); + + GST_TRACE_OBJECT(self, "Outputting buffer %" GST_PTR_FORMAT, *buffer); + + return flow_ret; +} + +#define AJA_SRC_ERROR(el, domain, code, text, debug) \ + G_STMT_START { \ + gchar *__txt = _gst_element_error_printf text; \ + gchar *__dbg = _gst_element_error_printf debug; \ + GstMessage *__msg; \ + GError *__err; \ + gchar *__name, *__fmt_dbg; \ + if (__txt) GST_WARNING_OBJECT(el, "error: %s", __txt); \ + if (__dbg) GST_WARNING_OBJECT(el, "error: %s", __dbg); \ + if (!__txt) \ + __txt = gst_error_get_message(GST_##domain##_ERROR, \ + GST_##domain##_ERROR_##code); \ + __err = g_error_new_literal(GST_##domain##_ERROR, \ + GST_##domain##_ERROR_##code, __txt); \ + __name = gst_object_get_path_string(GST_OBJECT_CAST(el)); \ + if (__dbg) \ + __fmt_dbg = g_strdup_printf("%s(%d): %s (): %s:\n%s", __FILE__, \ + __LINE__, GST_FUNCTION, __name, __dbg); \ + else \ + __fmt_dbg = g_strdup_printf("%s(%d): %s (): %s", __FILE__, __LINE__, \ + GST_FUNCTION, __name); \ + g_free(__name); \ + g_free(__dbg); \ + __msg = gst_message_new_error(GST_OBJECT(el), __err, __fmt_dbg); \ + QueueItem item = {.type = QUEUE_ITEM_TYPE_ERROR, .error{.msg = __msg}}; \ + gst_queue_array_push_tail_struct(el->queue, &item); \ + g_cond_signal(&el->queue_cond); \ + } \ + G_STMT_END; + +static void capture_thread_func(AJAThread *thread, void *data) { + GstAjaSrc *self = GST_AJA_SRC(data); + GstClock *clock = NULL; + AUTOCIRCULATE_TRANSFER transfer; + guint64 frames_dropped_last = G_MAXUINT64; + gboolean have_signal = TRUE, discont = TRUE; + guint iterations_without_frame = 0; + NTV2VideoFormat last_detected_video_format = ::NTV2_FORMAT_UNKNOWN; + + if (self->capture_cpu_core != G_MAXUINT) { + cpu_set_t mask; + pthread_t current_thread = pthread_self(); + + CPU_ZERO(&mask); + CPU_SET(self->capture_cpu_core, &mask); + + if (pthread_setaffinity_np(current_thread, sizeof(mask), &mask) != 0) { + GST_ERROR_OBJECT(self, + "Failed to set affinity for current thread to core %u", + self->capture_cpu_core); + } + } + + g_mutex_lock(&self->queue_lock); +restart: + GST_DEBUG_OBJECT(self, "Waiting for playing or shutdown"); + while (!self->playing && !self->shutdown) + g_cond_wait(&self->queue_cond, &self->queue_lock); + if (self->shutdown) { + GST_DEBUG_OBJECT(self, "Shutting down"); + g_mutex_unlock(&self->queue_lock); + return; + } + + GST_DEBUG_OBJECT(self, "Starting capture"); + g_mutex_unlock(&self->queue_lock); + + gst_clear_object(&clock); + clock = gst_element_get_clock(GST_ELEMENT_CAST(self)); + + frames_dropped_last = G_MAXUINT64; + have_signal = TRUE; + + g_mutex_lock(&self->queue_lock); + while (self->playing && !self->shutdown) { + // If we don't have a video format configured, configure the device now + // and potentially auto-detect the video format + if (self->video_format == NTV2_FORMAT_UNKNOWN) { + // Don't keep queue locked while configuring as this might take a while + g_mutex_unlock(&self->queue_lock); + + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + if (!gst_aja_src_configure(self)) { + g_mutex_lock(&self->queue_lock); + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to configure device")); + goto out; + } + g_mutex_lock(&self->queue_lock); + + if (!self->playing || self->shutdown) goto restart; + + if (self->video_format == ::NTV2_FORMAT_UNKNOWN) { + GST_DEBUG_OBJECT(self, "No signal, waiting"); + frames_dropped_last = G_MAXUINT64; + if (have_signal) { + QueueItem item = { + .type = QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + .signal_change = {.have_signal = FALSE, + .detected_format = ::NTV2_FORMAT_UNKNOWN, + .vpid = 0}}; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); + have_signal = FALSE; + discont = TRUE; + } + self->device->device->WaitForInputVerticalInterrupt(self->channel); + continue; + } + + guint16 start_frame = self->start_frame; + guint16 end_frame = self->end_frame; + + // If both are set to the same value, try to find that many unallocated + // frames and use those. + if (start_frame == end_frame) { + gint assigned_start_frame = gst_aja_ntv2_device_find_unallocated_frames( + self->device, self->channel, self->start_frame); + + if (assigned_start_frame == -1) { + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to allocate %u frames", start_frame)); + goto out; + } + + start_frame = assigned_start_frame; + end_frame = start_frame + self->start_frame - 1; + } + + GST_DEBUG_OBJECT( + self, "Configuring channel %u with start frame %u and end frame %u", + self->channel, start_frame, end_frame); + + if (!self->device->device->AutoCirculateInitForInput( + self->channel, 0, self->audio_system, + (self->rp188 ? AUTOCIRCULATE_WITH_RP188 : 0) | + (self->vanc_mode == ::NTV2_VANCMODE_OFF + ? AUTOCIRCULATE_WITH_ANC + : 0), + 1, start_frame, end_frame)) { + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to initialize autocirculate")); + goto out; + } + + self->device->device->AutoCirculateStart(self->channel); + } + + // Check for valid signal first + NTV2VideoFormat current_video_format = + self->device->device->GetInputVideoFormat( + self->configured_input_source); + + bool all_quads_equal = true; + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + NTV2VideoFormat other_video_format = + self->device->device->GetInputVideoFormat( + (NTV2InputSource)(self->configured_input_source + i)); + if (other_video_format != current_video_format) { + std::string current_string = + NTV2VideoFormatToString(current_video_format); + std::string other_string = + NTV2VideoFormatToString(other_video_format); + GST_DEBUG_OBJECT( + self, + "Not all quadrants had the same format in " + "quad-link-mode: %s (%d) on input 1 vs. %s (%d) on input %d", + current_string.c_str(), current_video_format, + other_string.c_str(), other_video_format, i + 1); + all_quads_equal = false; + break; + } + } + } + + ULWord vpid_a = 0; + ULWord vpid_b = 0; + self->device->device->ReadSDIInVPID(self->channel, vpid_a, vpid_b); + + { + std::string current_string = + NTV2VideoFormatToString(current_video_format); + GST_TRACE_OBJECT( + self, "Detected input video format %s (%d) with VPID %08x / %08x", + current_string.c_str(), (int)current_video_format, vpid_a, vpid_b); + } + + NTV2VideoFormat effective_video_format = self->video_format; + // Can't call this unconditionally as it also maps e.g. 3840x2160p to 1080p + if (self->quad_mode) { + effective_video_format = + ::GetQuarterSizedVideoFormat(effective_video_format); + } + switch (self->video_format) { + case NTV2_FORMAT_1080psf_2500_2: + if (current_video_format == NTV2_FORMAT_1080i_5000) + current_video_format = NTV2_FORMAT_1080psf_2500_2; + break; + case NTV2_FORMAT_1080psf_2997_2: + if (current_video_format == NTV2_FORMAT_1080i_5994) + current_video_format = NTV2_FORMAT_1080psf_2997_2; + break; + case NTV2_FORMAT_1080psf_3000_2: + if (current_video_format == NTV2_FORMAT_1080i_6000) + current_video_format = NTV2_FORMAT_1080psf_3000_2; + break; + default: + break; + } + + if (current_video_format == ::NTV2_FORMAT_UNKNOWN || !all_quads_equal) { + if (self->video_format_setting == GST_AJA_VIDEO_FORMAT_AUTO) + self->video_format = NTV2_FORMAT_UNKNOWN; + + GST_DEBUG_OBJECT(self, "No signal, waiting"); + g_mutex_unlock(&self->queue_lock); + frames_dropped_last = G_MAXUINT64; + if (have_signal) { + QueueItem item = { + .type = QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + .signal_change = {.have_signal = FALSE, + .detected_format = ::NTV2_FORMAT_UNKNOWN, + .vpid = 0}}; + last_detected_video_format = ::NTV2_FORMAT_UNKNOWN; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); + have_signal = FALSE; + discont = TRUE; + } + self->device->device->WaitForInputVerticalInterrupt(self->channel); + g_mutex_lock(&self->queue_lock); + continue; + } else if (current_video_format != effective_video_format && + current_video_format != self->video_format) { + // Try reconfiguring with the newly detected video format + if (self->video_format_setting == GST_AJA_VIDEO_FORMAT_AUTO) { + self->video_format = NTV2_FORMAT_UNKNOWN; + continue; + } + + std::string current_string = + NTV2VideoFormatToString(current_video_format); + std::string configured_string = + NTV2VideoFormatToString(self->video_format); + std::string effective_string = + NTV2VideoFormatToString(effective_video_format); + + GST_DEBUG_OBJECT(self, + "Different input format %s than configured %s " + "(effective %s), waiting", + current_string.c_str(), configured_string.c_str(), + effective_string.c_str()); + g_mutex_unlock(&self->queue_lock); + frames_dropped_last = G_MAXUINT64; + if (have_signal || current_video_format != last_detected_video_format) { + QueueItem item = { + .type = QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + .signal_change = {.have_signal = FALSE, + .detected_format = current_video_format, + .vpid = vpid_a}}; + last_detected_video_format = current_video_format; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); + have_signal = FALSE; + discont = TRUE; + } + self->device->device->WaitForInputVerticalInterrupt(self->channel); + g_mutex_lock(&self->queue_lock); + continue; + } + + AUTOCIRCULATE_STATUS status; + + self->device->device->AutoCirculateGetStatus(self->channel, status); + + GST_TRACE_OBJECT(self, + "Start frame %d " + "end frame %d " + "active frame %d " + "start time %" G_GUINT64_FORMAT + " " + "current time %" G_GUINT64_FORMAT + " " + "frames processed %u " + "frames dropped %u " + "buffer level %u", + status.acStartFrame, status.acEndFrame, + status.acActiveFrame, status.acRDTSCStartTime, + status.acRDTSCCurrentTime, status.acFramesProcessed, + status.acFramesDropped, status.acBufferLevel); + + if (frames_dropped_last == G_MAXUINT64) { + frames_dropped_last = status.acFramesDropped; + } else if (frames_dropped_last < status.acFramesDropped) { + GST_WARNING_OBJECT(self, "Dropped %" G_GUINT64_FORMAT " frames", + status.acFramesDropped - frames_dropped_last); + + GstClockTime timestamp = + gst_util_uint64_scale(status.acFramesProcessed + frames_dropped_last, + self->configured_info.fps_n, + self->configured_info.fps_d * GST_SECOND); + GstClockTime timestamp_end = gst_util_uint64_scale( + status.acFramesProcessed + status.acFramesDropped, + self->configured_info.fps_n, + self->configured_info.fps_d * GST_SECOND); + + QueueItem item = {.type = QUEUE_ITEM_TYPE_FRAMES_DROPPED, + .frames_dropped = {.driver_side = TRUE, + .timestamp_start = timestamp, + .timestamp_end = timestamp_end}}; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); + + frames_dropped_last = status.acFramesDropped; + discont = TRUE; + } + + if (status.IsRunning() && status.acBufferLevel > 1) { + GstBuffer *video_buffer = NULL; + GstBuffer *audio_buffer = NULL; + GstBuffer *anc_buffer = NULL, *anc_buffer2 = NULL; + GstMapInfo video_map = GST_MAP_INFO_INIT; + GstMapInfo audio_map = GST_MAP_INFO_INIT; + GstMapInfo anc_map = GST_MAP_INFO_INIT; + GstMapInfo anc_map2 = GST_MAP_INFO_INIT; + AUTOCIRCULATE_TRANSFER transfer; + + if (!have_signal) { + QueueItem item = { + .type = QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + .signal_change = {.have_signal = TRUE, + .detected_format = current_video_format, + .vpid = vpid_a}}; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); + have_signal = TRUE; + } + + iterations_without_frame = 0; + + if (gst_buffer_pool_acquire_buffer(self->buffer_pool, &video_buffer, + NULL) != GST_FLOW_OK) { + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire video buffer")); + break; + } + + if (gst_buffer_pool_acquire_buffer(self->audio_buffer_pool, &audio_buffer, + NULL) != GST_FLOW_OK) { + gst_buffer_unref(video_buffer); + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire audio buffer")); + break; + } + + if (self->vanc_mode == ::NTV2_VANCMODE_OFF && + ::NTV2DeviceCanDoCustomAnc(self->device_id)) { + if (gst_buffer_pool_acquire_buffer(self->anc_buffer_pool, &anc_buffer, + NULL) != GST_FLOW_OK) { + gst_buffer_unref(audio_buffer); + gst_buffer_unref(video_buffer); + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire anc buffer")); + break; + } + + if (self->configured_info.interlace_mode != + GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) { + if (gst_buffer_pool_acquire_buffer( + self->anc_buffer_pool, &anc_buffer2, NULL) != GST_FLOW_OK) { + gst_buffer_unref(anc_buffer); + gst_buffer_unref(audio_buffer); + gst_buffer_unref(video_buffer); + AJA_SRC_ERROR(self, STREAM, FAILED, (NULL), + ("Failed to acquire anc buffer")); + break; + } + } + } + + gst_buffer_map(video_buffer, &video_map, GST_MAP_READWRITE); + gst_buffer_map(audio_buffer, &audio_map, GST_MAP_READWRITE); + if (anc_buffer) gst_buffer_map(anc_buffer, &anc_map, GST_MAP_READWRITE); + if (anc_buffer2) + gst_buffer_map(anc_buffer2, &anc_map2, GST_MAP_READWRITE); + + transfer.acFrameBufferFormat = ::NTV2_FBF_10BIT_YCBCR; + + transfer.SetVideoBuffer((ULWord *)video_map.data, video_map.size); + transfer.SetAudioBuffer((ULWord *)audio_map.data, audio_map.size); + transfer.SetAncBuffers((ULWord *)anc_map.data, anc_map.size, + (ULWord *)anc_map2.data, anc_map2.size); + + g_mutex_unlock(&self->queue_lock); + + bool transfered = true; + if (!self->device->device->AutoCirculateTransfer(self->channel, + transfer)) { + GST_WARNING_OBJECT(self, "Failed to transfer frame"); + transfered = false; + } + + if (anc_buffer2) gst_buffer_unmap(anc_buffer2, &anc_map2); + if (anc_buffer) gst_buffer_unmap(anc_buffer, &anc_map); + gst_buffer_unmap(audio_buffer, &audio_map); + gst_buffer_unmap(video_buffer, &video_map); + + g_mutex_lock(&self->queue_lock); + + if (!transfered) { + gst_clear_buffer(&anc_buffer2); + gst_clear_buffer(&anc_buffer); + gst_clear_buffer(&audio_buffer); + gst_clear_buffer(&video_buffer); + continue; + } + + gst_buffer_set_size(audio_buffer, transfer.GetCapturedAudioByteCount()); + if (anc_buffer) + gst_buffer_set_size(anc_buffer, + transfer.GetCapturedAncByteCount(false)); + if (anc_buffer2) + gst_buffer_set_size(anc_buffer2, + transfer.GetCapturedAncByteCount(true)); + + NTV2TCIndex tc_index; + switch (self->timecode_index) { + case GST_AJA_TIMECODE_INDEX_VITC: + tc_index = ::NTV2InputSourceToTimecodeIndex( + self->configured_input_source, true); + break; + case GST_AJA_TIMECODE_INDEX_ATC_LTC: + tc_index = ::NTV2InputSourceToTimecodeIndex( + self->configured_input_source, false); + break; + case GST_AJA_TIMECODE_INDEX_LTC1: + tc_index = ::NTV2_TCINDEX_LTC1; + break; + case GST_AJA_TIMECODE_INDEX_LTC2: + tc_index = ::NTV2_TCINDEX_LTC2; + break; + default: + g_assert_not_reached(); + break; + } + + NTV2_RP188 time_code; + transfer.acTransferStatus.acFrameStamp.GetInputTimeCode(time_code, + tc_index); + + gint64 frame_time = transfer.acTransferStatus.acFrameStamp.acFrameTime; + gint64 now_sys = g_get_real_time(); + GstClockTime now_gst = gst_clock_get_time(clock); + if (now_sys * 10 > frame_time) { + GstClockTime diff = now_sys * 1000 - frame_time * 100; + if (now_gst > diff) + now_gst -= diff; + else + now_gst = 0; + } + + GstClockTime base_time = + gst_element_get_base_time(GST_ELEMENT_CAST(self)); + if (now_gst > base_time) + now_gst -= base_time; + else + now_gst = 0; + + // TODO: Drift detection and compensation + GST_BUFFER_PTS(video_buffer) = now_gst; + GST_BUFFER_DURATION(video_buffer) = gst_util_uint64_scale( + GST_SECOND, self->configured_info.fps_d, self->configured_info.fps_n); + GST_BUFFER_PTS(audio_buffer) = now_gst; + GST_BUFFER_DURATION(audio_buffer) = gst_util_uint64_scale( + GST_SECOND, self->configured_info.fps_d, self->configured_info.fps_n); + + while (self->queue_num_frames >= self->queue_size) { + guint n = gst_queue_array_get_length(self->queue); + + for (guint i = 0; i < n; i++) { + QueueItem *tmp = + (QueueItem *)gst_queue_array_peek_nth_struct(self->queue, i); + if (tmp->type == QUEUE_ITEM_TYPE_FRAME) { + GST_WARNING_OBJECT(self, + "Element queue overrun, dropping old frame"); + + QueueItem item = { + .type = QUEUE_ITEM_TYPE_FRAMES_DROPPED, + .frames_dropped = { + .driver_side = FALSE, + .timestamp_start = tmp->frame.capture_time, + .timestamp_end = + tmp->frame.capture_time + + gst_util_uint64_scale(GST_SECOND, + self->configured_info.fps_d, + self->configured_info.fps_n)}}; + queue_item_clear(tmp); + gst_queue_array_drop_struct(self->queue, i, NULL); + gst_queue_array_push_tail_struct(self->queue, &item); + self->queue_num_frames -= 1; + discont = TRUE; + g_cond_signal(&self->queue_cond); + break; + } + } + } + + if (discont) { + GST_BUFFER_FLAG_SET(video_buffer, GST_BUFFER_FLAG_DISCONT); + GST_BUFFER_FLAG_SET(audio_buffer, GST_BUFFER_FLAG_DISCONT); + discont = FALSE; + } + + QueueItem item = { + .type = QUEUE_ITEM_TYPE_FRAME, + .frame = {.capture_time = now_gst, + .video_buffer = video_buffer, + .audio_buffer = audio_buffer, + .anc_buffer = anc_buffer, + .anc_buffer2 = anc_buffer2, + .tc = time_code, + .detected_format = + (self->quad_mode + ? ::GetQuadSizedVideoFormat(current_video_format) + : current_video_format), + .vpid = vpid_a}}; + + GST_TRACE_OBJECT(self, "Queuing frame %" GST_TIME_FORMAT, + GST_TIME_ARGS(now_gst)); + gst_queue_array_push_tail_struct(self->queue, &item); + self->queue_num_frames += 1; + GST_TRACE_OBJECT(self, "%u frames queued", self->queue_num_frames); + g_cond_signal(&self->queue_cond); + } else { + g_mutex_unlock(&self->queue_lock); + + // If we don't have a frame for 32 iterations (512ms) then consider + // this as signal loss too even if the driver still reports the + // expected mode above + if (have_signal && iterations_without_frame < 32) { + iterations_without_frame++; + } else { + frames_dropped_last = G_MAXUINT64; + if (have_signal || last_detected_video_format != current_video_format) { + QueueItem item = { + .type = QUEUE_ITEM_TYPE_SIGNAL_CHANGE, + .signal_change = {.have_signal = TRUE, + .detected_format = current_video_format, + .vpid = vpid_a}}; + last_detected_video_format = current_video_format; + gst_queue_array_push_tail_struct(self->queue, &item); + g_cond_signal(&self->queue_cond); + have_signal = FALSE; + discont = TRUE; + } + } + + self->device->device->WaitForInputVerticalInterrupt(self->channel); + + g_mutex_lock(&self->queue_lock); + } + } + +out : { + // Make sure to globally lock here as the routing settings and others are + // global shared state + ShmMutexLocker locker; + + self->device->device->AutoCirculateStop(self->channel); + self->device->device->UnsubscribeInputVerticalEvent(self->channel); + self->device->device->DisableInputInterrupt(self->channel); + + self->device->device->DisableChannel(self->channel); + if (self->quad_mode) { + for (int i = 1; i < 4; i++) { + self->device->device->DisableChannel((NTV2Channel)(self->channel + i)); + } + } +} + + if (!self->playing && !self->shutdown) goto restart; + g_mutex_unlock(&self->queue_lock); + + gst_clear_object(&clock); + + GST_DEBUG_OBJECT(self, "Stopped"); +} diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajasrc.h b/subprojects/gst-plugins-bad/sys/aja/gstajasrc.h new file mode 100644 index 0000000000..119fcc807e --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajasrc.h @@ -0,0 +1,102 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include + +#include "gstajacommon.h" + +G_BEGIN_DECLS + +#define GST_TYPE_AJA_SRC (gst_aja_src_get_type()) +#define GST_AJA_SRC(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_SRC, GstAjaSrc)) +#define GST_AJA_SRC_CAST(obj) ((GstAjaSrc *)obj) +#define GST_AJA_SRC_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AJA_SRC, GstAjaSrcClass)) +#define GST_IS_AJA_SRC(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AJA_SRC)) +#define GST_IS_AJA_SRC_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AJA_SRC)) + +typedef struct _GstAjaSrc GstAjaSrc; +typedef struct _GstAjaSrcClass GstAjaSrcClass; + +struct _GstAjaSrc { + GstPushSrc parent; + + // Everything below protected by queue lock + GMutex queue_lock; + GCond queue_cond; + GstQueueArray *queue; + guint queue_num_frames; + gboolean playing; + gboolean shutdown; + gboolean flushing; + + GstAjaNtv2Device *device; + NTV2DeviceID device_id; + GstAllocator *allocator; + GstBufferPool *buffer_pool; + GstBufferPool *audio_buffer_pool; + GstBufferPool *anc_buffer_pool; + + // Properties + gchar *device_identifier; + NTV2Channel channel; + GstAjaAudioSystem audio_system_setting; + GstAjaVideoFormat video_format_setting; + GstAjaSdiMode sdi_mode; + GstAjaInputSource input_source; + GstAjaAudioSource audio_source; + GstAjaEmbeddedAudioInput embedded_audio_input; + GstAjaTimecodeIndex timecode_index; + gboolean rp188; + GstAjaReferenceSource reference_source; + GstAjaClosedCaptionCaptureMode closed_caption_capture_mode; + guint queue_size; + guint start_frame, end_frame; + guint capture_cpu_core; + gboolean signal; + + NTV2AudioSystem audio_system; + NTV2VideoFormat video_format; + bool quad_mode; + NTV2VANCMode vanc_mode; + NTV2InputSource configured_input_source; + + GstVideoInfo configured_info; // Based on properties + GstVideoInfo current_info; // Based on properties + stream metadata + + gint configured_audio_channels; + + AJAThread *capture_thread; +}; + +struct _GstAjaSrcClass { + GstPushSrcClass parent_class; +}; + +G_GNUC_INTERNAL +GType gst_aja_src_get_type(void); + +G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.cpp b/subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.cpp new file mode 100644 index 0000000000..5ae841ed90 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.cpp @@ -0,0 +1,292 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include + +#include "gstajacommon.h" +#include "gstajasrcdemux.h" + +GST_DEBUG_CATEGORY_STATIC(gst_aja_src_demux_debug); +#define GST_CAT_DEFAULT gst_aja_src_demux_debug + +static GstStaticPadTemplate video_src_template = GST_STATIC_PAD_TEMPLATE( + "video", GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS("video/x-raw")); + +static GstStaticPadTemplate audio_src_template = GST_STATIC_PAD_TEMPLATE( + "audio", GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS("audio/x-raw")); + +static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE( + "sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS("video/x-raw")); + +static GstFlowReturn gst_aja_src_demux_sink_chain(GstPad *pad, + GstObject *parent, + GstBuffer *buffer); +static gboolean gst_aja_src_demux_sink_event(GstPad *pad, GstObject *parent, + GstEvent *event); +static gboolean gst_aja_src_demux_audio_src_query(GstPad *pad, + GstObject *parent, + GstQuery *query); +static gboolean gst_aja_src_demux_video_src_query(GstPad *pad, + GstObject *parent, + GstQuery *query); + +#define parent_class gst_aja_src_demux_parent_class +G_DEFINE_TYPE(GstAjaSrcDemux, gst_aja_src_demux, GST_TYPE_ELEMENT); + +static void gst_aja_src_demux_class_init(GstAjaSrcDemuxClass *klass) { + GstElementClass *element_class = GST_ELEMENT_CLASS(klass); + + gst_element_class_add_static_pad_template(element_class, &sink_template); + gst_element_class_add_static_pad_template(element_class, &video_src_template); + gst_element_class_add_static_pad_template(element_class, &audio_src_template); + + gst_element_class_set_static_metadata( + element_class, "AJA audio/video source demuxer", "Audio/Video/Demux", + "Demuxes audio/video from video buffers", + "Sebastian Dröge "); + + GST_DEBUG_CATEGORY_INIT(gst_aja_src_demux_debug, "ajasrcdemux", 0, + "AJA source demuxer"); +} + +static void gst_aja_src_demux_init(GstAjaSrcDemux *self) { + self->sink = gst_pad_new_from_static_template(&sink_template, "sink"); + gst_pad_set_chain_function(self->sink, + GST_DEBUG_FUNCPTR(gst_aja_src_demux_sink_chain)); + gst_pad_set_event_function(self->sink, + GST_DEBUG_FUNCPTR(gst_aja_src_demux_sink_event)); + gst_element_add_pad(GST_ELEMENT(self), self->sink); + + self->audio_src = + gst_pad_new_from_static_template(&audio_src_template, "audio"); + gst_pad_set_query_function( + self->audio_src, GST_DEBUG_FUNCPTR(gst_aja_src_demux_audio_src_query)); + gst_element_add_pad(GST_ELEMENT(self), self->audio_src); + + self->video_src = + gst_pad_new_from_static_template(&video_src_template, "video"); + gst_pad_set_query_function( + self->video_src, GST_DEBUG_FUNCPTR(gst_aja_src_demux_video_src_query)); + gst_element_add_pad(GST_ELEMENT(self), self->video_src); +} + +static GstFlowReturn gst_aja_src_demux_sink_chain(GstPad *pad, + GstObject *parent, + GstBuffer *buffer) { + GstAjaSrcDemux *self = GST_AJA_SRC_DEMUX(parent); + GstAjaAudioMeta *meta = gst_buffer_get_aja_audio_meta(buffer); + GstFlowReturn audio_flow_ret = GST_FLOW_OK; + GstFlowReturn video_flow_ret = GST_FLOW_OK; + + if (meta) { + GstBuffer *audio_buffer; + buffer = gst_buffer_make_writable(buffer); + meta = gst_buffer_get_aja_audio_meta(buffer); + audio_buffer = gst_buffer_ref(meta->buffer); + gst_buffer_remove_meta(buffer, GST_META_CAST(meta)); + + audio_flow_ret = gst_pad_push(self->audio_src, audio_buffer); + } else { + GstEvent *event = + gst_event_new_gap(GST_BUFFER_PTS(buffer), GST_BUFFER_DURATION(buffer)); + gst_pad_push_event(self->audio_src, event); + } + + video_flow_ret = gst_pad_push(self->video_src, buffer); + + // Combine flows the way it makes sense + if (video_flow_ret == GST_FLOW_NOT_LINKED && + audio_flow_ret == GST_FLOW_NOT_LINKED) + return GST_FLOW_NOT_LINKED; + if (video_flow_ret == GST_FLOW_EOS && audio_flow_ret == GST_FLOW_EOS) + return GST_FLOW_EOS; + if (video_flow_ret == GST_FLOW_FLUSHING || + video_flow_ret <= GST_FLOW_NOT_NEGOTIATED) + return video_flow_ret; + if (audio_flow_ret == GST_FLOW_FLUSHING || + audio_flow_ret <= GST_FLOW_NOT_NEGOTIATED) + return audio_flow_ret; + return GST_FLOW_OK; +} + +static gboolean gst_aja_src_demux_sink_event(GstPad *pad, GstObject *parent, + GstEvent *event) { + GstAjaSrcDemux *self = GST_AJA_SRC_DEMUX(parent); + + switch (GST_EVENT_TYPE(event)) { + case GST_EVENT_CAPS: { + GstCaps *caps; + GstStructure *s; + GstAudioInfo audio_info; + gint audio_channels = 0; + + gst_event_parse_caps(event, &caps); + s = gst_caps_get_structure(caps, 0); + + gst_structure_get_int(s, "audio-channels", &audio_channels); + + GstCaps *audio_caps, *video_caps; + + gst_audio_info_init(&audio_info); + gst_audio_info_set_format(&audio_info, GST_AUDIO_FORMAT_S32LE, 48000, + audio_channels ? audio_channels : 1, NULL); + audio_caps = gst_audio_info_to_caps(&audio_info); + gst_pad_set_caps(self->audio_src, audio_caps); + gst_caps_unref(audio_caps); + + video_caps = gst_caps_ref(caps); + gst_event_unref(event); + video_caps = gst_caps_make_writable(video_caps); + s = gst_caps_get_structure(video_caps, 0); + gst_structure_remove_field(s, "audio-channels"); + gst_pad_set_caps(self->video_src, video_caps); + gst_caps_unref(video_caps); + + return TRUE; + } + default: + return gst_pad_event_default(pad, parent, event); + } +} + +static gboolean gst_aja_src_demux_audio_src_query(GstPad *pad, + GstObject *parent, + GstQuery *query) { + GstAjaSrcDemux *self = GST_AJA_SRC_DEMUX(parent); + + switch (GST_QUERY_TYPE(query)) { + case GST_QUERY_CAPS: { + GstCaps *filter, *caps; + + gst_query_parse_caps(query, &filter); + if ((caps = gst_pad_get_current_caps(pad))) { + GST_DEBUG_OBJECT( + pad, "Returning currently negotiated caps %" GST_PTR_FORMAT, caps); + } else if ((caps = gst_pad_peer_query_caps(self->sink, NULL))) { + guint n; + GstAudioInfo audio_info; + gint audio_channels = 0; + GstCaps *tmp; + + GST_DEBUG_OBJECT(pad, "Got upstream caps %" GST_PTR_FORMAT, caps); + + n = gst_caps_get_size(caps); + for (guint i = 0; i < n; i++) { + GstStructure *s = gst_caps_get_structure(caps, i); + gint tmp; + + if (!gst_structure_get_int(s, "audio-channels", &tmp)) { + tmp = 0; + } + + // No audio channels in all caps + if (tmp == 0 || (audio_channels != 0 && audio_channels != tmp)) { + audio_channels = 0; + break; + } + + audio_channels = tmp; + } + + gst_audio_info_init(&audio_info); + gst_audio_info_set_format(&audio_info, GST_AUDIO_FORMAT_S32LE, 48000, + audio_channels ? audio_channels : 1, NULL); + tmp = gst_audio_info_to_caps(&audio_info); + gst_caps_unref(caps); + caps = tmp; + + if (!audio_channels) { + gst_caps_set_simple(caps, "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, + NULL); + } + + GST_DEBUG_OBJECT(pad, "Returning caps %" GST_PTR_FORMAT, caps); + } else { + caps = gst_pad_get_pad_template_caps(pad); + + GST_DEBUG_OBJECT(pad, "Returning template caps %" GST_PTR_FORMAT, caps); + } + + if (filter) { + GstCaps *tmp = + gst_caps_intersect_full(filter, caps, GST_CAPS_INTERSECT_FIRST); + gst_caps_unref(caps); + caps = tmp; + } + + gst_query_set_caps_result(query, caps); + gst_caps_unref(caps); + + return TRUE; + } + default: + return gst_pad_query_default(pad, parent, query); + } +} + +static gboolean gst_aja_src_demux_video_src_query(GstPad *pad, + GstObject *parent, + GstQuery *query) { + GstAjaSrcDemux *self = GST_AJA_SRC_DEMUX(parent); + + switch (GST_QUERY_TYPE(query)) { + case GST_QUERY_CAPS: { + GstCaps *filter, *caps; + + gst_query_parse_caps(query, &filter); + if ((caps = gst_pad_get_current_caps(pad))) { + GST_DEBUG_OBJECT( + pad, "Returning currently negotiated caps %" GST_PTR_FORMAT, caps); + } else if ((caps = gst_pad_peer_query_caps(self->sink, NULL))) { + guint n; + + GST_DEBUG_OBJECT(pad, "Returning upstream caps %" GST_PTR_FORMAT, caps); + + caps = gst_caps_make_writable(caps); + n = gst_caps_get_size(caps); + for (guint i = 0; i < n; i++) { + GstStructure *s = gst_caps_get_structure(caps, i); + gst_structure_remove_field(s, "audio-channels"); + } + } else { + caps = gst_pad_get_pad_template_caps(pad); + + GST_DEBUG_OBJECT(pad, "Returning template caps %" GST_PTR_FORMAT, caps); + } + + if (filter) { + GstCaps *tmp = + gst_caps_intersect_full(filter, caps, GST_CAPS_INTERSECT_FIRST); + gst_caps_unref(caps); + caps = tmp; + } + + gst_query_set_caps_result(query, caps); + gst_caps_unref(caps); + + return TRUE; + } + default: + return gst_pad_query_default(pad, parent, query); + } +} diff --git a/subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.h b/subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.h new file mode 100644 index 0000000000..fe1b791729 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/gstajasrcdemux.h @@ -0,0 +1,59 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#pragma once + +#include +#include +#include + +#include "gstajacommon.h" + +G_BEGIN_DECLS + +#define GST_TYPE_AJA_SRC_DEMUX (gst_aja_src_demux_get_type()) +#define GST_AJA_SRC_DEMUX(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AJA_SRC_DEMUX, GstAjaSrcDemux)) +#define GST_AJA_SRC_DEMUX_CAST(obj) ((GstAjaSrcDemux *)obj) +#define GST_AJA_SRC_DEMUX_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AJA_SRC_DEMUX, \ + GstAjaSrcDemuxClass)) +#define GST_IS_AJA_SRC_DEMUX(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AJA_SRC_DEMUX)) +#define GST_IS_AJA_SRC_DEMUX_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AJA_SRC_DEMUX)) + +typedef struct _GstAjaSrcDemux GstAjaSrcDemux; +typedef struct _GstAjaSrcDemuxClass GstAjaSrcDemuxClass; + +struct _GstAjaSrcDemux { + GstElement parent; + + GstPad *sink; + GstPad *video_src, *audio_src; +}; + +struct _GstAjaSrcDemuxClass { + GstElementClass parent_class; +}; + +G_GNUC_INTERNAL +GType gst_aja_src_demux_get_type(void); + +G_END_DECLS diff --git a/subprojects/gst-plugins-bad/sys/aja/meson.build b/subprojects/gst-plugins-bad/sys/aja/meson.build new file mode 100644 index 0000000000..7ae140dbd9 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/meson.build @@ -0,0 +1,112 @@ +project('gst-aja', 'cpp', + version : '0.1.0', + meson_version : '>= 0.63.0', + default_options : [ 'warning_level=1', + 'buildtype=debugoptimized', + 'cpp_std=c++11', + 'cpp_eh=none', + 'cpp_rtti=false', + ] +) + +plugins_install_dir = '@0@/gstreamer-1.0'.format(get_option('libdir')) + +cxx = meson.get_compiler('cpp') + +if cxx.has_argument('-fvisibility=hidden') + add_project_arguments('-fvisibility=hidden', language: 'cpp') +endif + +if cxx.get_id() == 'msvc' + # Ignore several spurious warnings for things gstreamer does very commonly + # If a warning is completely useless and spammy, use '/wdXXXX' to suppress it + # If a warning is harmless but hard to fix, use '/woXXXX' so it's shown once + # NOTE: Only add warnings here if you are sure they're spurious + test_cppflags = [] + msvc_args = [ + '/wd4018', # implicit signed/unsigned conversion + '/wd4146', # unary minus on unsigned (beware INT_MIN) + '/wd4244', # lossy type conversion (e.g. double -> int) + '/wd4305', # truncating type conversion (e.g. double -> float) + ] + add_project_arguments(msvc_args, language : 'cpp') + # Disable SAFESEH with MSVC for plugins and libs that use external deps that + # are built with MinGW + noseh_link_args = ['/SAFESEH:NO'] +else + test_cppflags = ['-Wno-non-virtual-dtor'] + noseh_link_args = [] +endif + +common_flags = [ + '-DAJALinux=1', + '-DAJA_LINUX=1', +] +foreach cxxflag: test_cppflags + if cxx.has_argument(cxxflag) + common_flags += [ cxxflag ] + endif +endforeach + +gst_dep = dependency('gstreamer-1.0', version : '>= 1.18', required : true) +gstbase_dep = dependency('gstreamer-base-1.0', version : '>= 1.18', required : true) +gstaudio_dep = dependency('gstreamer-audio-1.0', version : '>= 1.18', required : true) +gstvideo_dep = dependency('gstreamer-video-1.0', version : '>= 1.18', required : true) + +thread_dep = dependency('threads') +rt_dep = cxx.find_library('rt', required : false) + +aja_sdk_dir = get_option('aja-sdk-dir') +if aja_sdk_dir == '' + ajantv2_dep = dependency('libajantv2') + aja_includedirs = [] + + if not ajantv2_dep.found() + subdir_done() + endif +else + aja_includedirs = include_directories( + f'@aja_sdk_dir@/ajalibraries', + f'@aja_sdk_dir@/ajalibraries/ajantv2/includes', + f'@aja_sdk_dir@/ajalibraries/ajantv2/src/lin', + ) + + message('Looking for AJA SDK in directory ' + aja_sdk_dir) + if not cxx.has_header('ajabase/common/videotypes.h', + include_directories : aja_includedirs, + ) + error('Cannot find AJA SDK') + endif + + + ajantv2_lib = cxx.find_library('ajantv2', + # If the header is found, this should also be + required : true, + dirs : [f'@aja_sdk_dir@/lib'], + ) + ajantv2_dep = declare_dependency( + dependencies: ajantv2_lib, + include_directories: aja_includedirs, + ) +endif + +gstaja = library('gstaja', + ['plugin.cpp', + 'gstajacommon.cpp', + 'gstajasink.cpp', + 'gstajasinkcombiner.cpp', + 'gstajasrc.cpp', + 'gstajasrcdemux.cpp', + 'gstajadeviceprovider.cpp', + ], + cpp_args : [ + '-DPACKAGE="gst-aja"', + '-DGST_PACKAGE_NAME="gstreamer-aja"', + '-DGST_PACKAGE_ORIGIN="https://github.com/centricular/gstreamer-aja"', + '-DVERSION="@0@"'.format(meson.project_version())] + common_flags, + link_args : noseh_link_args, + dependencies : [gstvideo_dep, gstaudio_dep, gstbase_dep, gst_dep, ajantv2_dep, thread_dep, rt_dep], + install : true, + install_dir : plugins_install_dir, +) + diff --git a/subprojects/gst-plugins-bad/sys/aja/meson_options.txt b/subprojects/gst-plugins-bad/sys/aja/meson_options.txt new file mode 100644 index 0000000000..d03c4d5426 --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/meson_options.txt @@ -0,0 +1,2 @@ +option('aja-sdk-dir', type : 'string', value : '', + description : 'Directory with AJA SDK, e.g. ntv2sdklinux_16.0.0.4') diff --git a/subprojects/gst-plugins-bad/sys/aja/plugin.cpp b/subprojects/gst-plugins-bad/sys/aja/plugin.cpp new file mode 100644 index 0000000000..bfd6f7f96e --- /dev/null +++ b/subprojects/gst-plugins-bad/sys/aja/plugin.cpp @@ -0,0 +1,53 @@ +/* GStreamer + * Copyright (C) 2021 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include + +#include "gstajacommon.h" +#include "gstajadeviceprovider.h" +#include "gstajasink.h" +#include "gstajasinkcombiner.h" +#include "gstajasrc.h" +#include "gstajasrcdemux.h" + +static gboolean plugin_init(GstPlugin* plugin) { + AJADebug::Open(); + + gst_aja_common_init(); + + gst_element_register(plugin, "ajasrc", GST_RANK_NONE, GST_TYPE_AJA_SRC); + gst_element_register(plugin, "ajasrcdemux", GST_RANK_NONE, + GST_TYPE_AJA_SRC_DEMUX); + gst_element_register(plugin, "ajasink", GST_RANK_NONE, GST_TYPE_AJA_SINK); + gst_element_register(plugin, "ajasinkcombiner", GST_RANK_NONE, + GST_TYPE_AJA_SINK_COMBINER); + + gst_device_provider_register(plugin, "ajadeviceprovider", GST_RANK_PRIMARY, + GST_TYPE_AJA_DEVICE_PROVIDER); + + return TRUE; +} + +GST_PLUGIN_DEFINE(GST_VERSION_MAJOR, GST_VERSION_MINOR, aja, + "GStreamer AJA plugin", plugin_init, VERSION, "LGPL", + PACKAGE_NAME, GST_PACKAGE_ORIGIN) diff --git a/subprojects/ntv2.wrap b/subprojects/ntv2.wrap new file mode 100644 index 0000000000..a4dc981fc0 --- /dev/null +++ b/subprojects/ntv2.wrap @@ -0,0 +1,9 @@ +[wrap-file] +directory = ntv2-16.2-bugfix5 +source_url = https://github.com/aja-video/ntv2/archive/refs/tags/v16.2-bugfix5.tar.gz +source_filename = ntv2-16.2-bugfix5.tar.gz +source_hash = 560c798c3a43aa0cef1cba6be5adb669ec72e648c28814158eb649275efc9f88 +diff_files = ntv2-16.2-bugfix5/ntv2-16.2-bugfix5.meson.patch + +[provide] +libajantv2 = libajantv2_dep diff --git a/subprojects/packagefiles/ntv2-16.2-bugfix5/ntv2-16.2-bugfix5.meson.patch b/subprojects/packagefiles/ntv2-16.2-bugfix5/ntv2-16.2-bugfix5.meson.patch new file mode 100644 index 0000000000..99963f371b --- /dev/null +++ b/subprojects/packagefiles/ntv2-16.2-bugfix5/ntv2-16.2-bugfix5.meson.patch @@ -0,0 +1,167 @@ +--- /dev/null 2023-10-13 08:29:31.027000134 +0300 ++++ ntv2-16.2-bugfix5/meson.build 2023-10-21 09:58:37.680821179 +0300 +@@ -0,0 +1,164 @@ ++project('ntv2', 'cpp', ++ version : '16.2-bugfix5', ++ meson_version : '>= 0.54.0', ++ default_options : [ 'warning_level=1', ++ 'buildtype=debugoptimized', ++ 'cpp_std=c++11', ++ 'cpp_eh=none', ++ 'cpp_rtti=false', ++ ] ++) ++ ++cxx = meson.get_compiler('cpp') ++test_cppflags = ['-Wno-non-virtual-dtor'] ++ ++common_flags = [ ++ '-DAJALinux=1', ++ '-DAJA_LINUX=1', ++] ++foreach cxxflag: test_cppflags ++ if cxx.has_argument(cxxflag) ++ common_flags += [ cxxflag ] ++ endif ++endforeach ++ ++thread_dep = dependency('threads') ++rt_dep = cxx.find_library('rt', required : false) ++ ++ajantv2_sources = [ ++ 'ajalibraries/ajaanc/src/ancillarydata.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydatafactory.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_cea608.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_cea608_line21.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_cea608_vanc.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_cea708.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_framestatusinfo524D.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_framestatusinfo5251.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_hdr_hdr10.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_hdr_hlg.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_hdr_sdr.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_timecode.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_timecode_atc.cpp', ++ 'ajalibraries/ajaanc/src/ancillarydata_timecode_vitc.cpp', ++ 'ajalibraries/ajaanc/src/ancillarylist.cpp', ++ 'ajalibraries/ajabase/system/atomic.cpp', ++ 'ajalibraries/ajabase/common/audioutilities.cpp', ++ 'ajalibraries/ajabase/common/buffer.cpp', ++ 'ajalibraries/ajabase/common/common.cpp', ++ 'ajalibraries/ajabase/system/debug.cpp', ++ 'ajalibraries/ajabase/common/dpx_hdr.cpp', ++ 'ajalibraries/ajabase/common/dpxfileio.cpp', ++ 'ajalibraries/ajabase/system/event.cpp', ++ 'ajalibraries/ajabase/system/linux/eventimpl.cpp', ++ 'ajalibraries/ajabase/system/file_io.cpp', ++ 'ajalibraries/ajabase/common/guid.cpp', ++ 'ajalibraries/ajabase/system/info.cpp', ++ 'ajalibraries/ajabase/system/linux/infoimpl.cpp', ++ 'ajalibraries/ajabase/network/ip_socket.cpp', ++ 'ajalibraries/ajabase/system/lock.cpp', ++ 'ajalibraries/ajabase/system/linux/lockimpl.cpp', ++ 'ajalibraries/ajabase/system/memory.cpp', ++ 'ajalibraries/ajabase/common/options_popt.cpp', ++ 'ajalibraries/ajabase/common/performance.cpp', ++ 'ajalibraries/ajabase/common/pixelformat.cpp', ++ 'ajalibraries/ajabase/pnp/pnp.cpp', ++ 'ajalibraries/ajabase/pnp/linux/pnpimpl.cpp', ++ 'ajalibraries/ajabase/system/process.cpp', ++ 'ajalibraries/ajabase/system/linux/processimpl.cpp', ++ 'ajalibraries/ajabase/system/system.cpp', ++ 'ajalibraries/ajabase/system/systemtime.cpp', ++ 'ajalibraries/ajabase/common/testpatterngen.cpp', ++ 'ajalibraries/ajabase/system/thread.cpp', ++ 'ajalibraries/ajabase/system/linux/threadimpl.cpp', ++ 'ajalibraries/ajabase/common/timebase.cpp', ++ 'ajalibraries/ajabase/common/timecode.cpp', ++ 'ajalibraries/ajabase/common/timecodeburn.cpp', ++ 'ajalibraries/ajabase/common/timer.cpp', ++ 'ajalibraries/ajabase/network/udp_socket.cpp', ++ 'ajalibraries/ajabase/common/videoutilities.cpp', ++ 'ajalibraries/ajabase/common/wavewriter.cpp', ++ 'ajalibraries/ajabase/persistence/persistence.cpp', ++ 'ajalibraries/ajantv2/src/ntv2audio.cpp', ++ 'ajalibraries/ajantv2/src/ntv2anc.cpp', ++ 'ajalibraries/ajantv2/src/ntv2autocirculate.cpp', ++ 'ajalibraries/ajantv2/src/ntv2bitfile.cpp', ++ 'ajalibraries/ajantv2/src/ntv2bitfilemanager.cpp', ++ 'ajalibraries/ajantv2/src/ntv2card.cpp', ++ 'ajalibraries/ajantv2/src/ntv2config2022.cpp', ++ 'ajalibraries/ajantv2/src/ntv2config2110.cpp', ++ 'ajalibraries/ajantv2/src/ntv2configts2022.cpp', ++ 'ajalibraries/ajantv2/src/ntv2csclut.cpp', ++ 'ajalibraries/ajantv2/src/ntv2cscmatrix.cpp', ++ 'ajalibraries/ajantv2/src/ntv2debug.cpp', ++ 'ajalibraries/ajantv2/src/ntv2devicefeatures.cpp', ++ 'ajalibraries/ajantv2/src/ntv2devicescanner.cpp', ++ 'ajalibraries/ajantv2/src/ntv2discover.cpp', ++ 'ajalibraries/ajantv2/src/ntv2dma.cpp', ++ 'ajalibraries/ajantv2/src/ntv2dynamicdevice.cpp', ++ 'ajalibraries/ajantv2/src/ntv2hdmi.cpp', ++ 'ajalibraries/ajantv2/src/ntv2hevc.cpp', ++ 'ajalibraries/ajantv2/src/ntv2driverinterface.cpp', ++ 'ajalibraries/ajantv2/src/ntv2enhancedcsc.cpp', ++ 'ajalibraries/ajantv2/src/ntv2formatdescriptor.cpp', ++ 'ajalibraries/ajantv2/src/ntv2interrupts.cpp', ++ 'ajalibraries/ajantv2/src/ntv2konaflashprogram.cpp', ++ 'ajalibraries/ajantv2/src/lin/ntv2linuxdriverinterface.cpp', ++ 'ajalibraries/ajantv2/src/ntv2mailbox.cpp', ++ 'ajalibraries/ajantv2/src/ntv2mbcontroller.cpp', ++ 'ajalibraries/ajantv2/src/ntv2mcsfile.cpp', ++ 'ajalibraries/ajantv2/src/ntv2nubaccess.cpp', ++ 'ajalibraries/ajantv2/src/ntv2nubpktcom.cpp', ++ 'ajalibraries/ajantv2/src/ntv2publicinterface.cpp', ++ 'ajalibraries/ajantv2/src/ntv2register.cpp', ++ 'ajalibraries/ajantv2/src/ntv2registerexpert.cpp', ++ 'ajalibraries/ajantv2/src/ntv2resample.cpp', ++ 'ajalibraries/ajantv2/src/ntv2routingexpert.cpp', ++ 'ajalibraries/ajantv2/src/ntv2rp188.cpp', ++ 'ajalibraries/ajantv2/src/ntv2serialcontrol.cpp', ++ 'ajalibraries/ajantv2/src/ntv2signalrouter.cpp', ++ 'ajalibraries/ajantv2/src/ntv2spiinterface.cpp', ++ 'ajalibraries/ajantv2/src/ntv2subscriptions.cpp', ++ 'ajalibraries/ajantv2/src/ntv2supportlogger.cpp', ++ 'ajalibraries/ajantv2/src/ntv2transcode.cpp', ++ 'ajalibraries/ajantv2/src/ntv2utf8.cpp', ++ 'ajalibraries/ajantv2/src/ntv2utils.cpp', ++ 'ajalibraries/ajantv2/src/ntv2verticalfilter.cpp', ++ 'ajalibraries/ajantv2/src/ntv2vpid.cpp', ++ 'ajalibraries/ajantv2/src/ntv2vpidfromspec.cpp', ++ 'ajalibraries/ajantv2/src/ntv2task.cpp', ++ 'ajalibraries/ajantv2/src/ntv2testpatterngen.cpp', ++] ++ ++ajantv2_args = [ ++ '-D_REENTRANT', ++ '-DAJASTATIC', ++ '-DAJALinux', ++ '-DAJA_LINUX', ++ '-D_LARGEFILE_SOURCE', ++ '-D_LARGEFILE64_SOURCE', ++ '-D_FILE_OFFSET_BITS=64', ++] ++ ++ajantv2_inc = include_directories( ++ 'ajalibraries/ajaanc/includes', ++ 'ajalibraries/ajantv2/includes', ++ 'ajalibraries/ajantv2/src', ++ 'ajalibraries/ajantv2/src/lin', ++ 'ajalibraries', ++ 'ajalibraries/ajabase', ++) ++ ++libajantv2 = static_library( ++ 'libajantv2', ++ sources: ajantv2_sources, ++ cpp_args: ajantv2_args, ++ include_directories: ajantv2_inc, ++ pic: true, ++ override_options: ['cpp_eh=default', 'werror=false'], ++ install: false ++) ++ ++libajantv2_dep = declare_dependency( ++ link_with: libajantv2, ++ include_directories: ajantv2_inc, ++)