diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index bc7609dda2..0339b4a0a8 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -116,7 +116,7 @@ trigger:
.fedora image:
variables:
- FDO_DISTRIBUTION_VERSION: '31'
+ FDO_DISTRIBUTION_VERSION: '34'
FDO_REPO_SUFFIX: "$FEDORA_AMD64_SUFFIX"
FDO_DISTRIBUTION_TAG: "$FEDORA_TAG-$GST_UPSTREAM_BRANCH"
FDO_DISTRIBUTION_EXEC: 'GIT_BRANCH=$CI_COMMIT_REF_NAME GIT_URL=$CI_REPOSITORY_URL bash ci/docker/fedora/prepare.sh'
diff --git a/.gitlab-image-tags.yml b/.gitlab-image-tags.yml
index c15dc8814b..0f0ae44e1a 100644
--- a/.gitlab-image-tags.yml
+++ b/.gitlab-image-tags.yml
@@ -5,7 +5,7 @@ variables:
# If you are hacking on them or need a them to rebuild, its enough
# to change any part of the string of the image you want.
###
- FEDORA_TAG: '2023-10-10.0'
+ FEDORA_TAG: '2023-08-31.1-f34'
INDENT_TAG: '2023-08-24.3'
diff --git a/ci/docker/fedora/prepare.sh b/ci/docker/fedora/prepare.sh
index 96ae09d84b..dc264be5e1 100644
--- a/ci/docker/fedora/prepare.sh
+++ b/ci/docker/fedora/prepare.sh
@@ -1,8 +1,11 @@
+#! /bin/bash
+
set -eux
# Fedora base image disable installing documentation files. See https://pagure.io/atomic-wg/issue/308
# We need them to cleanly build our doc.
-sed -i "s/tsflags=nodocs//g" /etc/dnf/dnf.conf
+sed -i '/tsflags=nodocs/d' /etc/dnf/dnf.conf
+dnf -y swap coreutils-single coreutils-full
dnf install -y git-core dnf-plugins-core python3-pip
@@ -28,6 +31,7 @@ dnf install -y \
libcaca-devel \
libdav1d \
libdav1d-devel \
+ libdrm-devel \
ccache \
cmake \
clang-devel \
@@ -39,6 +43,7 @@ dnf install -y \
gdb \
git-lfs \
glslc \
+ gtk-doc \
gtk3 \
gtk3-devel \
gtk4 \
@@ -62,10 +67,20 @@ dnf install -y \
flex \
flite \
flite-devel \
+ libsoup \
+ libsoup-devel \
mono-devel \
procps-ng \
patch \
- qt5-devel \
+ qconf \
+ qt5-linguist \
+ qt5-qtbase-devel \
+ qt5-qtbase-private-devel \
+ qt5-qtdeclarative-devel \
+ qt5-qtquickcontrols2-devel \
+ qt5-qttools-common \
+ qt5-qtwayland-devel \
+ qt5-qtx11extras-devel \
redhat-rpm-config \
json-glib \
json-glib-devel \
@@ -74,15 +89,20 @@ dnf install -y \
libsodium-devel \
libunwind \
libunwind-devel \
+ libva-devel \
libyaml-devel \
libxml2-devel \
libxslt-devel \
llvm-devel \
log4c-devel \
+ libxcb-devel \
+ libxkbcommon-devel \
+ libxkbcommon-x11-devel \
make \
nasm \
neon \
neon-devel \
+ ninja-build \
nunit \
npm \
opencv \
@@ -99,12 +119,14 @@ dnf install -y \
python3 \
python3-devel \
python3-libs \
+ python3-wheel \
python3-gobject \
python3-cairo \
python3-cairo-devel \
valgrind \
vulkan \
vulkan-devel \
+ vulkan-loader \
mesa-libGL \
mesa-libGL-devel \
mesa-libGLU \
@@ -119,9 +141,9 @@ dnf install -y \
mesa-libd3d-devel \
mesa-libOSMesa \
mesa-libOSMesa-devel \
+ mesa-dri-drivers \
mesa-vulkan-drivers \
- wpewebkit \
- wpewebkit-devel \
+ xset \
xorg-x11-server-utils \
xorg-x11-server-Xvfb
@@ -142,7 +164,11 @@ dnf debuginfo-install -y gtk3 \
libjpeg-turbo \
glib-networking \
libcurl \
+ libdrm \
libsoup \
+ libxcb \
+ libxkbcommon \
+ libxkbcommon-x11 \
nss \
nss-softokn \
nss-softokn-freebl \
@@ -162,6 +188,7 @@ dnf debuginfo-install -y gtk3 \
libffi \
libsrtp \
libunwind \
+ libdvdread \
mpg123-libs \
neon \
orc-compiler \
@@ -170,6 +197,8 @@ dnf debuginfo-install -y gtk3 \
pulseaudio-libs \
pulseaudio-libs-glib2 \
wavpack \
+ "libwayland-*" \
+ "wayland-*" \
webrtc-audio-processing \
ffmpeg \
ffmpeg-libs \
@@ -178,6 +207,7 @@ dnf debuginfo-install -y gtk3 \
libmpeg2 \
faac \
fdk-aac \
+ vulkan-loader \
x264 \
x264-libs \
x265 \
@@ -192,6 +222,7 @@ dnf builddep -y gstreamer1 \
gstreamer1-plugins-base \
gstreamer1-plugins-good \
gstreamer1-plugins-good-extras \
+ gstreamer1-plugins-good-qt \
gstreamer1-plugins-ugly \
gstreamer1-plugins-ugly-free \
gstreamer1-plugins-bad-free \
@@ -202,20 +233,21 @@ dnf builddep -y gstreamer1 \
gstreamer1-vaapi \
python3-gstreamer1
-dnf remove -y meson
-# FIXME: Install ninja from rpm when we update our base image as we fail building
-# documentation with rust plugins as we the version from F31 we hit:
-# `ninja: error: build.ninja:26557: multiple outputs aren't (yet?) supported by depslog; bring this up on the mailing list if it affects you
-pip3 install meson==1.1.1 hotdoc==0.15 python-gitlab ninja tomli
+dnf remove -y meson -x ninja-build
+pip3 install meson==1.2.3 hotdoc==0.15 python-gitlab tomli
# Remove gst-devel packages installed by builddep above
dnf remove -y "gstreamer1*devel"
-# FIXME: Why does installing directly with dnf doesn't actually install
-# the documentation files?
-dnf download glib2-doc gdk-pixbuf2-devel*x86_64* gtk3-devel-docs gtk4-devel-docs
-rpm -i --reinstall *.rpm
-rm -f *.rpm
+dnf install -y glib2-doc gdk-pixbuf2-devel gtk3-devel-docs gtk4-devel-docs libsoup-doc
+
+# Install gdk-pixbuf manually as fedora 34 doesn't build the docs/.devhelp2
+git clone --branch gdk-pixbuf-2-40 https://gitlab.gnome.org/GNOME/gdk-pixbuf.git
+cd gdk-pixbuf
+meson setup _build --prefix=/usr -Ddocs=true
+meson install -C _build
+cd ..
+rm -rf gdk-pixbuf
# Install Rust
RUSTUP_VERSION=1.26.0
diff --git a/girs/GES-1.0.gir b/girs/GES-1.0.gir
index 9e4a497255..de3541e37b 100644
--- a/girs/GES-1.0.gir
+++ b/girs/GES-1.0.gir
@@ -15756,8 +15756,9 @@ account.
Crossfade
-
- Similar to crossfade, but fade in the front video without fading out the background one (Since: 1.22)
+
+ Similar to crossfade, but fade in the front video without fading out
+the background one
@@ -16240,6 +16241,37 @@ formatter asset.
+
+ GES needs to be initialized after GStreamer itself. This section
+contains the various functions to do so.
+
+
+ A #GESSourceClip that overlays timing information on top.
+
+## Asset
+
+The default asset ID is "time-overlay" (of type #GES_TYPE_SOURCE_CLIP),
+but the framerate and video size can be overridden using an ID of the form:
+
+```
+time-overlay, framerate=60/1, width=1920, height=1080, max-duration=5.0
+```
+
+## Children properties
+
+{{ libs/GESTimeOverlayClip-children-props.md }}
+
+## Symbols
+
+
+ GStreamer Editing Services data types
+
+
+ The #GESUriClipAsset is a special #GESAsset that lets you handle
+the media file to use inside the GStreamer Editing Services. It has APIs that
+let you get information about the medias. Also, the tags found in the media file are
+set as Metadata of the Asset.
+
Initialize the GStreamer Editing Service. Call this before any usage of
GES. You should take care of initilizing GStreamer before calling this
diff --git a/girs/Gst-1.0.gir b/girs/Gst-1.0.gir
index 9ff9078d27..1fcbe5f280 100644
--- a/girs/Gst-1.0.gir
+++ b/girs/Gst-1.0.gir
@@ -1865,13 +1865,16 @@ this message should chain up to the parent class implementation so the
GstBinFlags are a set of flags specific to bins. Most are set/used
internally. They can be checked using the GST_OBJECT_FLAG_IS_SET() macro,
and (un)set using GST_OBJECT_FLAG_SET() and GST_OBJECT_FLAG_UNSET().
-
+
+ Don't resync a state change when elements are added or linked in the bin
-
+
+ Indicates whether the bin can handle elements that add/remove source pads
+at any point in time without first posting a no-more-pads signal.
- the last enum in the series of flags for bins.
-Derived classes can use this as first value in a list of flags.
+ The last enum in the series of flags for bins. Derived classes can use this
+as first value in a list of flags.
@@ -3512,7 +3515,8 @@ function to specify which items should be copied.
flag indicating that buffer memory should be
merged
-
+
+ flag indicating that memory should always be copied instead of reffed
@@ -3567,9 +3571,17 @@ function to specify which items should be copied.
this flag is set when memory of the buffer
is added/removed
-
+
+ Elements which write to disk or permanent storage should ensure the data
+is synced after writing the contents of this buffer.
-
+
+ This buffer is important and should not be dropped.
+
+This can be used to mark important buffers, e.g. to flag RTP packets
+carrying keyframes or codec setup data for RTP Forward Error Correction
+purposes, or to prevent still video frames from being dropped by elements
+due to QoS.
additional media specific flags can be added starting from
@@ -9555,7 +9567,8 @@ not be extended or allocated using a custom allocator.
clock can be slaved to a master clock
-
+
+ clock needs to be synced before it can be used
subclasses can add additional flags starting from this flag
@@ -10508,8 +10521,8 @@ gst_custom_meta_get_structure().
parent #GstMeta
-
- a #GstStructure containing custom metadata. (Since: 1.24)
+
+ #GstStructure containing custom metadata.
@@ -37244,8 +37257,8 @@ Either @new_query or the #GstQuery pointed to by @old_query may be %NULL.
the bitrate query (since 1.16)
-
- Query stream selection capability (Since: 1.22)
+
+ Query stream selection capability.
Gets the #GstQueryTypeFlags associated with @type.
@@ -49202,7 +49215,7 @@ Free-function: g_free
a #GstValueDeserializeFunc
-
+
a #GstValueDeserializeWithPSpecFunc
@@ -50693,6 +50706,12 @@ everything to log level 2.
+
+ These utility functions help with generating dot graphs which can
+be rendered with [graphviz] to multiple formats.
+
+[graphviz]: https://graphviz.org/
+
Clean up any resources created by GStreamer in gst_init().
@@ -51160,6 +51179,271 @@ parent process.
+
+ GStreamer is a framework for constructing graphs of various filters
+(termed elements here) that will handle streaming media.
+
+Any discrete (packetizable) media type is supported, with provisions for
+automatically determining source type.
+
+Formatting/framing information is provided with a powerful negotiation
+framework.
+
+Plugins are heavily used to provide for all elements, allowing one to
+construct plugins outside of the GST library, even released binary-only if
+license require (please don't).
+
+GStreamer covers a wide range of use cases including: playback, recording,
+editing, serving streams, voice over ip and video calls.
+
+The `GStreamer` library should be initialized with
+gst_init() before it can be used. You should pass pointers to the main `argc`
+and `argv` variables so that GStreamer can process its own command line
+options, as shown in the following example.
+
+## Initializing the gstreamer library
+
+``` C
+int main (int argc, char *argv[])
+{
+ // initialize the GStreamer library
+ gst_init (&argc, &argv);
+ ...
+}
+```
+
+It's allowed to pass two %NULL pointers to gst_init() in case you don't want
+to pass the command line args to GStreamer.
+
+You can also use #GOptionContext to initialize your own parameters as shown in
+the next code fragment:
+
+## Initializing own parameters when initializing GStreamer
+
+``` C
+static gboolean stats = FALSE;
+...
+int
+main (int argc, char *argv[])
+{
+ GOptionEntry options[] = {
+ {"tags", 't', 0, G_OPTION_ARG_NONE, &tags,
+ N_("Output tags (also known as metadata)"), NULL},
+ {NULL}
+ };
+ ctx = g_option_context_new ("[ADDITIONAL ARGUMENTS]");
+ g_option_context_add_main_entries (ctx, options, GETTEXT_PACKAGE);
+ g_option_context_add_group (ctx, gst_init_get_option_group ());
+ if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
+ g_print ("Error initializing: %s\n", GST_STR_NULL (err->message));
+ exit (1);
+ }
+ g_option_context_free (ctx);
+...
+}
+```
+
+Use gst_version() to query the library version at runtime or use the
+GST_VERSION_* macros to find the version at compile time. Optionally
+gst_version_string() returns a printable string.
+
+The gst_deinit() call is used to clean up all internal resources used
+by GStreamer. It is mostly used in unit tests to check for leaks.
+
+
+ Please do not use these in new code.
+These symbols are only available by defining GST_DISABLE_DEPRECATED.
+This can be done in CFLAGS for compiling old code.
+
+
+ GStreamer elements can throw non-fatal warnings and fatal errors.
+Higher-level elements and applications can programmatically filter
+the ones they are interested in or can recover from,
+and have a default handler handle the rest of them.
+
+The rest of this section will use the term "error"
+to mean both (non-fatal) warnings and (fatal) errors; they are treated
+similarly.
+
+Errors from elements are the combination of a #GError and a debug string.
+The #GError contains:
+- a domain type: CORE, LIBRARY, RESOURCE or STREAM
+- a code: an enum value specific to the domain
+- a translated, human-readable message
+- a non-translated additional debug string, which also contains
+- file and line information
+
+Elements do not have the context required to decide what to do with
+errors. As such, they should only inform about errors, and stop their
+processing. In short, an element doesn't know what it is being used for.
+
+It is the application or compound element using the given element that
+has more context about the use of the element. Errors can be received by
+listening to the #GstBus of the element/pipeline for #GstMessage objects with
+the type %GST_MESSAGE_ERROR or %GST_MESSAGE_WARNING. The thrown errors should
+be inspected, and filtered if appropriate.
+
+An application is expected to, by default, present the user with a
+dialog box (or an equivalent) showing the error message. The dialog
+should also allow a way to get at the additional debug information,
+so the user can provide bug reporting information.
+
+A compound element is expected to forward errors by default higher up
+the hierarchy; this is done by default in the same way as for other types
+of #GstMessage.
+
+When applications or compound elements trigger errors that they can
+recover from, they can filter out these errors and take appropriate action.
+For example, an application that gets an error from xvimagesink
+that indicates all XVideo ports are taken, the application can attempt
+to use another sink instead.
+
+Elements throw errors using the #GST_ELEMENT_ERROR convenience macro:
+
+## Throwing an error
+
+ |[
+ GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND,
+ (_("No file name specified for reading.")), (NULL));
+ ]|
+
+Things to keep in mind:
+
+ * Don't go off inventing new error codes. The ones
+ currently provided should be enough. If you find your type of error
+ does not fit the current codes, you should use FAILED.
+ * Don't provide a message if the default one suffices.
+ this keeps messages more uniform. Use (%NULL) - not forgetting the
+ parentheses.
+ * If you do supply a custom message, it should be
+ marked for translation. The message should start with a capital
+ and end with a period. The message should describe the error in short,
+ in a human-readable form, and without any complex technical terms.
+ A user interface will present this message as the first thing a user
+ sees. Details, technical info, ... should go in the debug string.
+
+ * The debug string can be as you like. Again, use (%NULL)
+ if there's nothing to add - file and line number will still be
+ passed. #GST_ERROR_SYSTEM can be used as a shortcut to give
+ debug information on a system call error.
+
+
+ GstFormats functions are used to register a new format to the gstreamer
+core. Formats can be used to perform seeking or conversions/query
+operations.
+
+
+ GStreamer's debugging subsystem is an easy way to get information about what
+the application is doing. It is not meant for programming errors. Use GLib
+methods (g_warning and friends) for that.
+
+The debugging subsystem works only after GStreamer has been initialized
+- for example by calling gst_init().
+
+The debugging subsystem is used to log informational messages while the
+application runs. Each messages has some properties attached to it. Among
+these properties are the debugging category, the severity (called "level"
+here) and an optional #GObject it belongs to. Each of these messages is sent
+to all registered debugging handlers, which then handle the messages.
+GStreamer attaches a default handler on startup, which outputs requested
+messages to stderr.
+
+Messages are output by using shortcut macros like #GST_DEBUG,
+#GST_CAT_ERROR_OBJECT or similar. These all expand to calling gst_debug_log()
+with the right parameters.
+The only thing a developer will probably want to do is define his own
+categories. This is easily done with 3 lines. At the top of your code,
+declare
+the variables and set the default category.
+|[<!-- language="C" -->
+ GST_DEBUG_CATEGORY_STATIC (my_category); // define category (statically)
+ #define GST_CAT_DEFAULT my_category // set as default
+]|
+After that you only need to initialize the category.
+|[<!-- language="C" -->
+ GST_DEBUG_CATEGORY_INIT (my_category, "my category",
+ 0, "This is my very own");
+]|
+Initialization must be done before the category is used first.
+Plugins do this
+in their plugin_init function, libraries and applications should do that
+during their initialization.
+
+The whole debugging subsystem can be disabled at build time with passing the
+--disable-gst-debug switch to configure. If this is done, every function,
+macro and even structs described in this file evaluate to default values or
+nothing at all.
+So don't take addresses of these functions or use other tricks.
+If you must do that for some reason, there is still an option.
+If the debugging
+subsystem was compiled out, GST_DISABLE_GST_DEBUG is defined in
+<gst/gst.h>,
+so you can check that before doing your trick.
+Disabling the debugging subsystem will give you a slight (read: unnoticeable)
+speed increase and will reduce the size of your compiled code. The GStreamer
+library itself becomes around 10% smaller.
+
+Please note that there are naming conventions for the names of debugging
+categories. These are explained at GST_DEBUG_CATEGORY_INIT().
+
+
+ GParamSpec implementations specific to GStreamer.
+
+
+ These function allow to create a pipeline based on the syntax used in the
+gst-launch-1.0 utility (see man-page for syntax documentation).
+
+Please note that these functions take several measures to create
+somewhat dynamic pipelines. Due to that such pipelines are not always
+reusable (set the state to NULL and back to PLAYING).
+
+
+ The GstProtectionMeta class enables the information needed to decrypt a
+#GstBuffer to be attached to that buffer.
+
+Typically, a demuxer element would attach GstProtectionMeta objects
+to the buffers that it pushes downstream. The demuxer would parse the
+protection information for a video/audio frame from its input data and use
+this information to populate the #GstStructure @info field,
+which is then encapsulated in a GstProtectionMeta object and attached to
+the corresponding output buffer using the gst_buffer_add_protection_meta()
+function. The information in this attached GstProtectionMeta would be
+used by a downstream decrypter element to recover the original unencrypted
+frame.
+
+
+ A #GstStream is a high-level object defining a stream of data which is, or
+can be, present in a #GstPipeline.
+
+It is defined by a unique identifier, a "Stream ID". A #GstStream does not
+automatically imply the stream is present within a pipeline or element.
+
+Any element that can introduce new streams in a pipeline should create the
+appropriate #GstStream object, and can convey that object via the
+%GST_EVENT_STREAM_START event and/or the #GstStreamCollection.
+
+Elements that do not modify the nature of the stream can add extra information
+on it (such as enrich the #GstCaps, or #GstTagList). This is typically done
+by parsing elements.
+
+
+ GValue implementations specific to GStreamer.
+
+Note that operations on the same #GValue from multiple threads may lead to
+undefined behaviour.
+
+
+ Use the GST_VERSION_* macros e.g. when defining own plugins. The GStreamer
+runtime checks if these plugin and core version match and refuses to use a
+plugin compiled against a different version of GStreamer.
+You can also use the macros to keep the GStreamer version information in
+your application.
+
+Use the gst_version() function if you want to know which version of
+GStreamer you are currently linked against.
+
+The version macros get defined by including "gst/gst.h".
+
Convert @value to a gdouble.
diff --git a/girs/GstAllocators-1.0.gir b/girs/GstAllocators-1.0.gir
index f1b8b2ef08..b18449b2e7 100644
--- a/girs/GstAllocators-1.0.gir
+++ b/girs/GstAllocators-1.0.gir
@@ -164,7 +164,8 @@ number and the height is scaled according to the sub-sampling.
-
+
+ Parent Class.
diff --git a/girs/GstAudio-1.0.gir b/girs/GstAudio-1.0.gir
index 4aade83bc5..a5ee361f15 100644
--- a/girs/GstAudio-1.0.gir
+++ b/girs/GstAudio-1.0.gir
@@ -11691,6 +11691,16 @@ the #GstDsdFormat if there is one, or NULL otherwise.
+
+ This library contains some helper functions for audio elements.
+
+
+ This library contains some helper functions for multichannel audio.
+
+
+ This module contains some helper functions for encapsulating various
+audio formats in IEC 61937 headers and padding.
+
diff --git a/girs/GstBadBaseCameraBin-1.0.gir b/girs/GstBadBaseCameraBin-1.0.gir
index ef2c910557..9ea665eb1e 100644
--- a/girs/GstBadBaseCameraBin-1.0.gir
+++ b/girs/GstBadBaseCameraBin-1.0.gir
@@ -565,5 +565,50 @@ on the bus
+
+ Common helper functions for #GstCameraBin.
+
+
+ Base class for the camera source bin used by camerabin for capture.
+Sophisticated camera hardware can derive from this baseclass and map the
+features to this interface.
+
+The design mandates that the subclasses implement the following features and
+behaviour:
+
+* 3 pads: viewfinder, image capture, video capture
+
+During `construct_pipeline()` vmethod a subclass can add several elements into
+the bin and expose 3 srcs pads as ghostpads implementing the 3 pad templates.
+
+However the subclass is responsible for adding the pad templates for the
+source pads and they must be named "vidsrc", "imgsrc" and "vfsrc". The pad
+templates should be installed in the subclass' class_init function, like so:
+|[
+static void
+my_element_class_init (GstMyElementClass *klass)
+{
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ // pad templates should be a #GstStaticPadTemplate with direction
+ // #GST_PAD_SRC and name "vidsrc", "imgsrc" and "vfsrc"
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &vidsrc_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &imgsrc_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &vfsrc_template);
+ // see #GstElementDetails
+ gst_element_class_set_details (gstelement_class, &details);
+}
+]|
+
+It is also possible to add regular pads from the subclass and implement the
+dataflow methods on these pads. This way all functionality can be implemented
+directly in the subclass without extra elements.
+
+The src will receive the capture mode from `GstCameraBin2` on the
+#GstBaseCameraSrc:mode property. Possible capture modes are defined in
+#GstCameraBinMode.
+
diff --git a/girs/GstBase-1.0.gir b/girs/GstBase-1.0.gir
index a9c05a41fb..40782a18d6 100644
--- a/girs/GstBase-1.0.gir
+++ b/girs/GstBase-1.0.gir
@@ -13847,6 +13847,12 @@ ISO-8859-1).
+
+ Utility functions for elements doing typefinding:
+gst_type_find_helper() does typefinding in pull mode, while
+gst_type_find_helper_for_buffer() is useful for elements needing to do
+typefinding in push mode from a chain function.
+
Allocates a new #GstQueueArray object with an initial
queue size of @initial_size.
diff --git a/girs/GstCheck-1.0.gir b/girs/GstCheck-1.0.gir
index 2bb5ddc2e5..f729651ce1 100644
--- a/girs/GstCheck-1.0.gir
+++ b/girs/GstCheck-1.0.gir
@@ -3468,6 +3468,22 @@ data flow is inconsistent.
+
+ These macros and functions are for internal use of the unit tests found
+inside the 'check' directories of various GStreamer packages.
+
+One notable feature is that one can use the environment variables GST_CHECKS
+and GST_CHECKS_IGNORE to select which tests to run or skip. Both variables
+can contain a comma separated list of test name globs (e.g. test_*).
+
+
+ These macros and functions are for internal use of the unit tests found
+inside the 'check' directories of various GStreamer packages.
+
+
+ These macros and functions are for internal use of the unit tests found
+inside the 'check' directories of various GStreamer packages.
+
Creates a new harness. Works like gst_harness_new_with_padnames(), except it
assumes the #GstElement sinkpad is named "sink" and srcpad is named "src"
diff --git a/girs/GstCodecs-1.0.gir b/girs/GstCodecs-1.0.gir
index 3d92504037..cc58e6407b 100644
--- a/girs/GstCodecs-1.0.gir
+++ b/girs/GstCodecs-1.0.gir
@@ -4575,18 +4575,17 @@ and can be used as a binary.
-
- specifies how the transform size is determined
+
+ Specifies how the transform size is determined.
-
- is a derived syntax element that specifies the type of
- inter prediction to be used
+
+ Is a derived syntax element that specifies the type of
+inter prediction to be used.
-
- modification to the probabilities encoded in the
- bitstream
+
+ Modification to the probabilities encoded in the bitstream.
diff --git a/girs/GstCuda-1.0.gir b/girs/GstCuda-1.0.gir
index 6e08fdf886..601965adf9 100644
--- a/girs/GstCuda-1.0.gir
+++ b/girs/GstCuda-1.0.gir
@@ -847,11 +847,13 @@ CUDA stream is in use
CUDA memory allocation method
-
+
-
+
+ Memory allocated via cuMemAlloc or cuMemAllocPitch
-
+
+ Memory allocated via cuMemCreate and cuMemMap
@@ -860,11 +862,14 @@ CUDA stream is in use
CUDA memory transfer flags
-
+
+ the device memory needs downloading to the staging memory
-
+
+ the staging memory needs uploading to the device memory
-
+
+ the device memory needs synchronization
diff --git a/girs/GstGL-1.0.gir b/girs/GstGL-1.0.gir
index 87d7645c00..9f79e56986 100644
--- a/girs/GstGL-1.0.gir
+++ b/girs/GstGL-1.0.gir
@@ -1096,13 +1096,16 @@ is available and is not available within this element.
The #GstGLContext in use by this #GstGLBaseMixer
-
+
+ parent #GstVideoAggregator
-
+
+ the currently configured #GstGLDisplay
-
+
+ the currently configured #GstGLContext
@@ -1116,10 +1119,12 @@ is available and is not available within this element.
-
+
+ the parent #GstVideoAggregatorClass
-
+
+ the logical-OR of #GstGLAPI's supported by this element
@@ -1157,7 +1162,8 @@ is available and is not available within this element.
-
+
+ parent #GstVideoAggregatorPad
@@ -1168,7 +1174,8 @@ is available and is not available within this element.
-
+
+ parent #GstVideoAggregatorPadClass
@@ -3445,15 +3452,21 @@ display's object lock held.
Mesa3D GBM display
-
+
+ EGLDevice display.
-
+
+ EAGL display.
-
+
+ WinRT display.
-
+
+ Android display.
-
+
+ Mesa3D surfaceless display using the EGL_PLATFORM_SURFACELESS_MESA
+extension.
any display type
@@ -4815,10 +4828,11 @@ Intended for use within implementations of
-
+
-
+
+ the configured output #GstCaps
@@ -4832,7 +4846,7 @@ Intended for use within implementations of
-
+
@@ -4893,10 +4907,12 @@ manually.
-
+
+ parent #GstGLBaseMixerPad
-
+
+ the current input texture for this pad
@@ -4907,7 +4923,8 @@ manually.
-
+
+ parent #GstGLBaseMixerPadClass
@@ -11650,6 +11667,17 @@ or a valid GLSL version and/or profile.
+
+ Provides some helper API for dealing with OpenGL API's and platforms
+
+
+ A common list of well-known values for what a config retrievable from or set
+on a `GstGLContext` may contain.
+
+
+ Some useful utilities for converting between various formats and OpenGL
+formats.
+
diff --git a/girs/GstMpegts-1.0.gir b/girs/GstMpegts-1.0.gir
index c7ba32c4e5..ea369e3251 100644
--- a/girs/GstMpegts-1.0.gir
+++ b/girs/GstMpegts-1.0.gir
@@ -824,6 +824,9 @@ Consult the relevant specifications for more details.
+ Partial Transport Stream descriptor. Only present in SIT Sections.
+
+See also: %GST_MPEGTS_SECTION_SIT, %GstMpegtsSIT
@@ -930,7 +933,8 @@ Consult the relevant specifications for more details.
-
+
+ Provide all avaliable audio programme for user selection
@@ -2816,13 +2820,17 @@ See ISO/IEC 13818-1:2018 Section 2.6.60 and Section 2.6.61 for more information.
metadata_descriptor metadata_format valid values. See ISO/IEC 13818-1:2018(E) Table 2-85.
-
+
+ ISO/IEC 15938-1 TeM.
-
+
+ ISO/IEC 15938-1 BiM.
-
+
+ Defined by metadata application format.
-
+
+ Defined by metadata_format_identifier field.
@@ -3187,10 +3195,14 @@ Corresponds to table 6 of ETSI EN 300 468 (v1.13.0)
-
+
+ When encrypted, or when encountering an unknown command type,
+we may still want to pass the sit through.
-
+
+ When the SIT was constructed by the application, splice times
+are in running_time and must be translated before packetizing.
@@ -3293,10 +3305,12 @@ Corresponds to table 6 of ETSI EN 300 468 (v1.13.0)
-
+
+ The UTC time of the signaled splice event
-
+
+ Per-PID splice time information
@@ -4195,7 +4209,8 @@ see also: #GstMpegtsSectionTableID and other variants.
A/90: Data Service Table (DST)
-
+
+ A/57B: Program Identifier Table.
A/90: Network Resources Table (NRT)
@@ -4242,9 +4257,11 @@ see also: #GstMpegtsSectionTableID
Bouquet Association Table (BAT)
-
+
+ ETSI TS 102 006: Update Notification Table (UNT)
-
+
+ ETSI EN 303 560: Downloadable Font Info
Event Information Table (EIT), Actual Transport Stream, present/following
@@ -4297,7 +4314,8 @@ see also: #GstMpegtsSectionTableID
ETSI TS 102 772: MPE-IFEC Section
-
+
+ ETSI TS 102 809: Protection Message Section
Discontinuity Information Table (DIT)
@@ -4401,11 +4419,14 @@ see also #GstMpegtsSectionATSCTableID, #GstMpegtsSectionDVBTableID, and
IPMP Control Information
-
+
+ ISO/IEC 14496 Section.
-
+
+ ISO/IEC 23001-11 (Green Access Unit) Section.
-
+
+ ISO/ISO 23001-10 (Quality Access Unit) Section.
DSM-CC Multi-Protocol Encapsulated (MPE) Data
@@ -4467,7 +4488,8 @@ use.
Time Offset Table (EN 300 468)
-
+
+ Selection Information Table (EN 300 468)
ATSC Terrestrial Virtual Channel Table (A65)
@@ -4644,7 +4666,8 @@ profiles defined in Annex A for service-compatible stereoscopic 3D services
IPMP stream
-
+
+ User Private stream id (used for VC-1) as defined by SMPTE RP227.
@@ -5122,6 +5145,114 @@ of #GstMpegtsDescriptor
+
+ This contains the various descriptors defined by the ATSC specifications
+
+
+ The list of section types defined and used by the ATSC specifications can be
+seen in %GstMpegtsSectionATSCTableID.
+
+# Supported ATSC MPEG-TS sections
+These are the sections for which parsing and packetizing code exists.
+
+## Master Guide Table (MGT)
+See:
+* gst_mpegts_section_get_atsc_mgt()
+* %GstMpegtsAtscMGT
+* %GstMpegtsAtscMGTTable
+* gst_mpegts_atsc_mgt_new()
+
+## Terrestrial (TVCT) and Cable (CVCT) Virtual Channel Table
+See:
+* gst_mpegts_section_get_atsc_tvct()
+* gst_mpegts_section_get_atsc_cvct()
+* %GstMpegtsAtscVCT
+* %GstMpegtsAtscVCTSource
+
+## Rating Region Table (RRT)
+See:
+* gst_mpegts_section_get_atsc_rrt()
+* %GstMpegtsAtscRRT
+* gst_mpegts_atsc_rrt_new()
+
+## Event Information Table (EIT)
+See:
+* gst_mpegts_section_get_atsc_eit()
+* %GstMpegtsAtscEIT
+* %GstMpegtsAtscEITEvent
+
+## Extended Text Table (ETT)
+See:
+* gst_mpegts_section_get_atsc_ett()
+* %GstMpegtsAtscETT
+
+## System Time Table (STT)
+See:
+* gst_mpegts_section_get_atsc_stt()
+* %GstMpegtsAtscSTT
+* gst_mpegts_atsc_stt_new()
+
+# API
+
+
+ The list of section types defined and used by the DVB specifications can be
+seen in %GstMpegtsSectionDVBTableID.
+
+# Supported DVB MPEG-TS sections
+These are the sections for which parsing and packetizing code exists.
+
+## Network Information Table (NIT)
+See:
+* gst_mpegts_section_get_nit()
+* %GstMpegtsNIT
+* %GstMpegtsNITStream
+* gst_mpegts_nit_new()
+
+## Service Description Table (SDT)
+See:
+* gst_mpegts_section_get_sdt()
+* %GstMpegtsSDT
+* %GstMpegtsSDTService
+* gst_mpegts_sdt_new()
+
+## Bouquet Association Table (BAT)
+See:
+* gst_mpegts_section_get_bat()
+* %GstMpegtsBAT
+* %GstMpegtsBATStream
+
+## Event Information Table (EIT)
+See:
+* gst_mpegts_section_get_eit()
+* %GstMpegtsEIT
+* %GstMpegtsEITEvent
+
+## Time Date Table (TDT)
+See:
+* gst_mpegts_section_get_tdt()
+
+## Time Offset Table (TOT)
+See:
+* gst_mpegts_section_get_tot()
+* %GstMpegtsTOT
+
+## Selection Information Table (SIT)
+See:
+* gst_mpegts_section_get_sit()
+* %GstMpegtsSIT
+* %GstMpegtsSITService
+
+# API
+
+
+ This contains the various descriptors defined by the ISDB specifications
+
+
+ This contains the %GstMpegtsSection relevent to SCTE specifications.
+
+
+ Before any usage of this library, the initialization function should be called.
+
Initializes the MPEG-TS helper library. Must be called before any
usage.
diff --git a/girs/GstNet-1.0.gir b/girs/GstNet-1.0.gir
index cc9fc3df24..e2cedcbc0f 100644
--- a/girs/GstNet-1.0.gir
+++ b/girs/GstNet-1.0.gir
@@ -756,6 +756,10 @@ is no such metadata on @buffer.
+
+ GstNetUtils gathers network utility functions, enabling use for all
+gstreamer plugins.
+
diff --git a/girs/GstPbutils-1.0.gir b/girs/GstPbutils-1.0.gir
index 63718f99ac..877881c222 100644
--- a/girs/GstPbutils-1.0.gir
+++ b/girs/GstPbutils-1.0.gir
@@ -2944,9 +2944,9 @@ describe the format of the caps.
Container format can store any kind of
stream type.
-
- Caps describe a metadata
- format, or a container format that can store metadata.
+
+ Caps describe a metadata format, or a container format that can store
+metadata.
@@ -3708,6 +3708,324 @@ invalid Opus caps.
+
+ Functions to create and handle encoding profiles.
+
+Encoding profiles describe the media types and settings one wishes to use for
+an encoding process. The top-level profiles are commonly
+#GstEncodingContainerProfile(s) (which contains a user-readable name and
+description along with which container format to use). These, in turn,
+reference one or more #GstEncodingProfile(s) which indicate which encoding
+format should be used on each individual streams.
+
+#GstEncodingProfile(s) can be provided to the 'encodebin' element, which will
+take care of selecting and setting up the required elements to produce an
+output stream conforming to the specifications of the profile.
+
+The encoding profiles do not necessarily specify which #GstElement to use for
+the various encoding and muxing steps, as they allow to specifying the format
+one wishes to use.
+
+Encoding profiles can be created at runtime by the application or loaded from
+(and saved to) file using the #GstEncodingTarget API.
+
+## The encoding profile serialization format
+
+Encoding profiles can be serialized to be used in the command line tools or
+to set it on other other #GObject-s using #gst_util_set_object_arg for
+example.
+
+The serialization format aims at being simple to understand although flexible
+enough to describe any possible encoding profile. There are several ways to
+describe the profile depending on the context but the general idea is that it
+is a colon separated list of EncodingProfiles descriptions, the first one
+needs to describe a #GstEncodingContainerProfile and the following ones
+describe elementary streams.
+
+### Using encoders and muxer element factory name
+
+```
+ muxer_factory_name:video_encoder_factory_name:audio_encoder_factory_name
+```
+
+For example to encode a stream into a WebM container, with an OGG audio
+stream and a VP8 video stream, the serialized #GstEncodingProfile looks like:
+
+```
+ webmmux:vp8enc:vorbisenc
+```
+
+### Define the encoding profile in a generic way using caps:
+
+```
+ muxer_source_caps:video_encoder_source_caps:audio_encoder_source_caps
+```
+
+For example to encode a stream into a WebM container, with an OGG audio
+stream and a VP8 video stream, the serialized #GstEncodingProfile looks like:
+
+```
+ video/webm:video/x-vp8:audio/x-vorbis
+```
+
+It is possible to mix caps and element type names so you can specify a
+specific video encoder while using caps for other encoders/muxer.
+
+### Using preset
+
+You can also set the preset name of the encoding profile using the
+caps+preset_name syntax as in:
+
+```
+ video/webm:video/x-vp8+youtube-preset:audio/x-vorbis
+```
+
+### Setting properties on muxers or on the encoding profile itself
+
+Moreover, you can set the extra properties:
+
+ * `|element-properties,property1=true` (See
+ #gst_encoding_profile_set_element_properties)
+ * `|presence=true` (See See #gst_encoding_profile_get_presence)
+ * `|single-segment=true` (See #gst_encoding_profile_set_single_segment)
+ * `|single-segment=true` (See
+ #gst_encoding_video_profile_set_variableframerate)
+
+for example:
+
+```
+ video/webm:video/x-vp8|presence=1|element-properties,target-bitrate=500000:audio/x-vorbis
+```
+
+### Enforcing properties to the stream itself (video size, number of audio channels, etc..)
+
+You can also use the `restriction_caps->encoded_format_caps` syntax to
+specify the restriction caps to be set on a #GstEncodingProfile
+
+It corresponds to the restriction #GstCaps to apply before the encoder that
+will be used in the profile (See #gst_encoding_profile_get_restriction). The
+fields present in restriction caps are properties of the raw stream (that is,
+before encoding), such as height and width for video and depth and sampling
+rate for audio. This property does not make sense for muxers. See
+#gst_encoding_profile_get_restriction for more details.
+
+To force a video stream to be encoded with a Full HD resolution (using WebM
+as the container format, VP8 as the video codec and Vorbis as the audio
+codec), you should use:
+
+```
+ "video/webm:video/x-raw,width=1920,height=1080->video/x-vp8:audio/x-vorbis"
+```
+
+> NOTE: Make sure to enclose into quotes to avoid '>' to be reinterpreted by
+> the shell.
+
+In the case you are specifying encoders directly, the following is also
+possible:
+
+```
+ matroskamux:x264enc,width=1920,height=1080:audio/x-vorbis
+```
+
+## Some serialized encoding formats examples
+
+### MP3 audio and H264 in MP4**
+
+```
+ video/quicktime,variant=iso:video/x-h264:audio/mpeg,mpegversion=1,layer=3
+```
+
+### Vorbis and theora in OGG
+
+```
+ application/ogg:video/x-theora:audio/x-vorbis
+```
+
+### AC3 and H264 in MPEG-TS
+
+```
+ video/mpegts:video/x-h264:audio/x-ac3
+```
+
+## Loading a profile from encoding targets
+
+Anywhere you have to use a string to define a #GstEncodingProfile, you
+can use load it from a #GstEncodingTarget using the following syntaxes:
+
+```
+ target_name[/profilename/category]
+```
+
+or
+
+```
+ /path/to/target.gep:profilename
+```
+
+## Examples
+
+### Creating a profile
+
+``` c
+#include <gst/pbutils/encoding-profile.h>
+...
+GstEncodingProfile *
+create_ogg_theora_profile(void)
+{
+ GstEncodingContainerProfile *prof;
+ GstCaps *caps;
+
+ caps = gst_caps_from_string("application/ogg");
+ prof = gst_encoding_container_profile_new("Ogg audio/video",
+ "Standard OGG/THEORA/VORBIS",
+ caps, NULL);
+ gst_caps_unref (caps);
+
+ caps = gst_caps_from_string("video/x-theora");
+ gst_encoding_container_profile_add_profile(prof,
+ (GstEncodingProfile*) gst_encoding_video_profile_new(caps, NULL, NULL, 0));
+ gst_caps_unref (caps);
+
+ caps = gst_caps_from_string("audio/x-vorbis");
+ gst_encoding_container_profile_add_profile(prof,
+ (GstEncodingProfile*) gst_encoding_audio_profile_new(caps, NULL, NULL, 0));
+ gst_caps_unref (caps);
+
+ return (GstEncodingProfile*) prof;
+}
+
+```
+
+### Example: Using an encoder preset with a profile
+
+``` c
+#include <gst/pbutils/encoding-profile.h>
+...
+GstEncodingProfile *
+create_ogg_theora_profile(void)
+{
+ GstEncodingVideoProfile *v;
+ GstEncodingAudioProfile *a;
+ GstEncodingContainerProfile *prof;
+ GstCaps *caps;
+ GstPreset *preset;
+
+ caps = gst_caps_from_string ("application/ogg");
+ prof = gst_encoding_container_profile_new ("Ogg audio/video",
+ "Standard OGG/THEORA/VORBIS",
+ caps, NULL);
+ gst_caps_unref (caps);
+
+ preset = GST_PRESET (gst_element_factory_make ("theoraenc", "theorapreset"));
+ g_object_set (preset, "bitrate", 1000, NULL);
+ // The preset will be saved on the filesystem,
+ // so try to use a descriptive name
+ gst_preset_save_preset (preset, "theora_bitrate_preset");
+ gst_object_unref (preset);
+
+ caps = gst_caps_from_string ("video/x-theora");
+ v = gst_encoding_video_profile_new (caps, "theora_bitrate_preset", NULL, 0);
+ gst_encoding_container_profile_add_profile (prof, (GstEncodingProfile*) v);
+ gst_caps_unref (caps);
+
+ caps = gst_caps_from_string ("audio/x-vorbis");
+ a = gst_encoding_audio_profile_new (caps, NULL, NULL, 0);
+ gst_encoding_container_profile_add_profile (prof, (GstEncodingProfile*) a);
+ gst_caps_unref (caps);
+
+ return (GstEncodingProfile*) prof;
+}
+
+```
+
+### Listing categories, targets and profiles
+
+``` c
+#include <gst/pbutils/encoding-profile.h>
+...
+GstEncodingProfile *prof;
+GList *categories, *tmpc;
+GList *targets, *tmpt;
+...
+categories = gst_encoding_list_available_categories ();
+
+... Show available categories to user ...
+
+for (tmpc = categories; tmpc; tmpc = tmpc->next) {
+ gchar *category = (gchar *) tmpc->data;
+
+ ... and we can list all targets within that category ...
+
+ targets = gst_encoding_list_all_targets (category);
+
+ ... and show a list to our users ...
+
+ g_list_foreach (targets, (GFunc) gst_encoding_target_unref, NULL);
+ g_list_free (targets);
+}
+
+g_list_foreach (categories, (GFunc) g_free, NULL);
+g_list_free (categories);
+
+...
+```
+
+
+ On top of the notion of profiles, we implement the notion of EncodingTarget.
+Encoding Targets are basically a higher level of abstraction to define formats
+for specific target types. Those can define several GstEncodingProfiles with
+different names, for example one for transcoding in full HD, another one for
+low res, etc.. which are defined in the same encoding target.
+
+Basically if you want to encode a stream to send it to, say, youtube you should
+have a Youtube encoding target defined in the "online-service" category.
+
+## Encoding target serialization format
+
+Encoding targets are serialized in a KeyFile like files.
+
+|[
+[GStreamer Encoding Target]
+name : <name>
+category : <category>
+\description : <description> #translatable
+
+[profile-<profile1name>]
+name : <name>
+\description : <description> #optional
+format : <format>
+preset : <preset>
+
+[streamprofile-<id>]
+parent : <encodingprofile.name>[,<encodingprofile.name>..]
+\type : <type> # "audio", "video", "text"
+format : <format>
+preset : <preset>
+restriction : <restriction>
+presence : <presence>
+pass : <pass>
+variableframerate : <variableframerate>
+]|
+
+## Location of encoding target files
+
+$GST_DATADIR/gstreamer-GST_API_VERSION/encoding-profile
+$HOME/gstreamer-GST_API_VERSION/encoding-profile
+
+There also is a GST_ENCODING_TARGET_PATH environment variable
+defining a list of folder containing encoding target files.
+
+## Naming convention
+
+|[
+ $(target.category)/$(target.name).gep
+]|
+
+## Naming restrictions:
+
+ * lowercase ASCII letter for the first character
+ * Same for all other characters + numerics + hyphens
+
List all available #GstEncodingTarget for the specified category, or all categories
if @categoryname is %NULL.
@@ -3773,6 +4091,306 @@ of #GstEncodingTarget categories.
+
+ libgstpbutils is a general utility library for plugins and applications.
+It currently provides the
+following:
+
+* human-readable description strings of codecs, elements, sources, decoders,
+encoders, or sinks from decoder/encoder caps, element names, or protocol
+names.
+
+* support for applications to initiate installation of missing plugins (if
+this is supported by the distribution or operating system used)
+
+* API for GStreamer elements to create missing-plugin messages in order to
+communicate to the application that a certain type of plugin is missing
+(decoder, encoder, URI protocol source, URI protocol sink, named element)
+
+* API for applications to recognise and handle missing-plugin messages
+
+## Linking to this library
+
+You should obtain the required CFLAGS and LIBS using pkg-config on the
+gstreamer-plugins-base-1.0 module. You will then also need to add
+'-lgstreamer-pbutils-1.0' manually to your LIBS line.
+
+## Library initialisation
+
+Before using any of its functions, applications and plugins must call
+gst_pb_utils_init() to initialise the library.
+
+
+ Provides codec-specific ulility functions such as functions to provide the
+codec profile and level in human-readable string form from header data.
+
+
+ The above functions provide human-readable strings for media formats
+and decoder/demuxer/depayloader/encoder/muxer/payloader elements for use
+in error dialogs or other messages shown to users.
+
+gst_pb_utils_add_codec_description_to_tag_list() is a utility function
+for demuxer and decoder elements to add audio/video codec tags from a
+given (fixed) #GstCaps.
+
+
+ ## Overview
+
+Using this API, applications can request the installation of missing
+GStreamer plugins. These may be missing decoders/demuxers or
+encoders/muxers for a certain format, sources or sinks for a certain URI
+protocol (e.g. 'http'), or certain elements known by their element
+factory name ('audioresample').
+
+Whether plugin installation is supported or not depends on the operating
+system and/or distribution in question. The vendor of the operating
+system needs to make sure the necessary hooks and mechanisms are in
+place for plugin installation to work. See below for more detailed
+information.
+
+From the application perspective, plugin installation is usually
+triggered either
+
+- when the application itself has found that it wants or needs to
+ install a certain element
+- when the application has been notified by an element (such as
+ playbin or decodebin) that one or more plugins are missing *and* the
+ application has decided that it wants to install one or more of
+ those missing plugins
+
+The install functions in this section all take one or more 'detail
+strings'. These detail strings contain information about the type of
+plugin that needs to be installed (decoder, encoder, source, sink, or
+named element), and some additional information such GStreamer version
+used and a human-readable description of the component to install for
+user dialogs.
+
+Applications should not concern themselves with the composition of the
+string itself. They should regard the string as if it was a shared
+secret between GStreamer and the plugin installer application.
+
+Detail strings can be obtained using the function
+gst_missing_plugin_message_get_installer_detail() on a
+missing-plugin message. Such a message will either have been found by
+the application on a pipeline's #GstBus, or the application will have
+created it itself using gst_missing_element_message_new(),
+gst_missing_decoder_message_new(),
+gst_missing_encoder_message_new(),
+gst_missing_uri_sink_message_new(), or
+gst_missing_uri_source_message_new().
+
+For each GStreamer element/plugin/component that should be installed,
+the application needs one of those 'installer detail' string mentioned
+in the previous section. This string can be obtained, as already
+mentioned above, from a missing-plugin message using the function
+gst_missing_plugin_message_get_installer_detail(). The
+missing-plugin message is either posted by another element and then
+found on the bus by the application, or the application has created it
+itself as described above.
+
+The application will then call gst_install_plugins_async(), passing a
+NULL-terminated array of installer detail strings, and a function that
+should be called when the installation of the plugins has finished
+(successfully or not). Optionally, a #GstInstallPluginsContext created
+with gst_install_plugins_context_new() may be passed as well. This
+way additional optional arguments like the application window's XID can
+be passed to the external installer application.
+
+gst_install_plugins_async() will return almost immediately, with the
+return code indicating whether plugin installation was started or not.
+If the necessary hooks for plugin installation are in place and an
+external installer application has in fact been called, the passed in
+function will be called with a result code as soon as the external
+installer has finished. If the result code indicates that new plugins
+have been installed, the application will want to call
+gst_update_registry() so the run-time plugin registry is updated and
+the new plugins are made available to the application.
+
+> A Gtk/GLib main loop must be running in order for the result function
+> to be called when the external installer has finished. If this is not
+> the case, make sure to regularly call in your code:
+>
+> g_main_context_iteration (NULL,FALSE);
+
+## 1. Installer hook
+
+When GStreamer applications initiate plugin installation via
+gst_install_plugins_async() or gst_install_plugins_sync(), a
+pre-defined helper application will be called.
+
+The exact path of the helper application to be called is set at compile
+time, usually by the build system based on the install prefix.
+For a normal package build into the `/usr` prefix, this will usually
+default to `/usr/libexec/gst-install-plugins-helper` or
+`/usr/lib/gst-install-plugins-helper`.
+
+Vendors/distros who want to support GStreamer plugin installation should
+either provide such a helper script/application or use the meson option
+`-Dinstall_plugins_helper'=/path/to/installer` to make GStreamer call an
+installer of their own directly.
+
+It is strongly recommended that vendors provide a small helper
+application as interlocutor to the real installer though, even more so
+if command line argument munging is required to transform the command
+line arguments passed by GStreamer to the helper application into
+arguments that are understood by the real installer.
+
+The helper application path defined at compile time can be overridden at
+runtime by setting the GST_INSTALL_PLUGINS_HELPER environment
+variable. This can be useful for testing/debugging purposes.
+
+## 2. Arguments passed to the install helper
+
+GStreamer will pass the following arguments to the install helper (this
+is in addition to the path of the executable itself, which is by
+convention argv[0]):
+
+- none to many optional arguments in the form of `--foo-bar=val`.
+ Example: `--transient-for=XID` where XID is the X Window ID of the
+ main window of the calling application (so the installer can make
+ itself transient to that window). Unknown optional arguments should
+ be ignored by the installer.
+
+- one 'installer detail string' argument for each plugin to be
+ installed; these strings will have a `gstreamer` prefix; the exact
+ format of the detail string is explained below
+
+## 3. Detail string describing the missing plugin
+
+The string is in UTF-8 encoding and is made up of several fields,
+separated by '|' characters (but neither the first nor the last
+character is a '|'). The fields are:
+
+- plugin system identifier, ie. "gstreamer"
+ This identifier determines the format of the rest of the detail
+ string. Automatic plugin installers should not process detail
+ strings with unknown identifiers. This allows other plugin-based
+ libraries to use the same mechanism for their automatic plugin
+ installation needs, or for the format to be changed should it turn
+ out to be insufficient.
+- plugin system version, e.g. "1.0"
+ This is required so that when there is GStreamer-2.0 at some point
+ in future, the different major versions can still co-exist and use
+ the same plugin install mechanism in the same way.
+- application identifier, e.g. "totem"
+ This may also be in the form of "pid/12345" if the program name
+ can't be obtained for some reason.
+- human-readable localised description of the required component, e.g.
+ "Vorbis audio decoder"
+- identifier string for the required component (see below for details
+ about how to map this to the package/plugin that needs installing),
+ e.g.
+ - urisource-$(PROTOCOL_REQUIRED), e.g. urisource-http or
+ urisource-mms
+ - element-$(ELEMENT_REQUIRED), e.g. element-videoconvert
+ - decoder-$(CAPS_REQUIRED), e.g. (do read below for more
+ details!):
+ - decoder-audio/x-vorbis
+ - decoder-application/ogg
+ - decoder-audio/mpeg, mpegversion=(int)4
+ - decoder-video/mpeg, systemstream=(boolean)true,
+ mpegversion=(int)2
+ - encoder-$(CAPS_REQUIRED), e.g. encoder-audio/x-vorbis
+- optional further fields not yet specified
+
+An entire ID string might then look like this, for example: `
+gstreamer|1.0|totem|Vorbis audio decoder|decoder-audio/x-vorbis`
+
+Plugin installers parsing this ID string should expect further fields
+also separated by '|' symbols and either ignore them, warn the user, or
+error out when encountering them.
+
+Those unfamiliar with the GStreamer 'caps' system should note a few
+things about the caps string used in the above decoder/encoder case:
+
+- the first part ("video/mpeg") of the caps string is a GStreamer
+ media type and *not* a MIME type. Wherever possible, the GStreamer
+ media type will be the same as the corresponding MIME type, but
+ often it is not.
+- a caps string may or may not have additional comma-separated fields
+ of various types (as seen in the examples above)
+- the caps string of a 'required' component (as above) will always
+ have fields with fixed values, whereas an introspected string (see
+ below) may have fields with non-fixed values. Compare for example:
+ - `audio/mpeg, mpegversion=(int)4` vs.
+ `audio/mpeg, mpegversion=(int){2, 4}`
+ - `video/mpeg, mpegversion=(int)2` vs.
+ `video/mpeg, systemstream=(boolean){ true, false}, mpegversion=(int)[1, 2]`
+
+## 4. Exit codes the installer should return
+
+The installer should return one of the following exit codes when it
+exits:
+
+- 0 if all of the requested plugins could be installed
+ (#GST_INSTALL_PLUGINS_SUCCESS)
+- 1 if no appropriate installation candidate for any of the requested
+ plugins could be found. Only return this if nothing has been
+ installed (#GST_INSTALL_PLUGINS_NOT_FOUND)
+- 2 if an error occurred during the installation. The application will
+ assume that the user will already have seen an error message by the
+ installer in this case and will usually not show another one
+ (#GST_INSTALL_PLUGINS_ERROR)
+- 3 if some of the requested plugins could be installed, but not all
+ (#GST_INSTALL_PLUGINS_PARTIAL_SUCCESS)
+- 4 if the user aborted the installation
+ (#GST_INSTALL_PLUGINS_USER_ABORT)
+
+## 5. How to map the required detail string to packages
+
+It is up to the vendor to find mechanism to map required components from
+the detail string to the actual packages/plugins to install. This could
+be a hardcoded list of mappings, for example, or be part of the
+packaging system metadata.
+
+GStreamer plugin files can be introspected for this information. The
+`gst-inspect` utility has a special command line option that will output
+information similar to what is required. For example `
+$ gst-inspect-1.0 --print-plugin-auto-install-info /path/to/libgstvorbis.so
+should output something along the lines of
+`decoder-audio/x-vorbis`, `element-vorbisdec` `element-vorbisenc`
+`element-vorbisparse`, `element-vorbistag`, `encoder-audio/x-vorbis`
+
+Note that in the encoder and decoder case the introspected caps can be
+more complex with additional fields, e.g.
+`audio/mpeg,mpegversion=(int){2,4}`, so they will not always exactly
+match the caps wanted by the application. It is up to the installer to
+deal with this (either by doing proper caps intersection using the
+GStreamer #GstCaps API, or by only taking into account the media type).
+
+Another potential source of problems are plugins such as ladspa or
+libvisual where the list of elements depends on the installed
+ladspa/libvisual plugins at the time. This is also up to the
+distribution to handle (but usually not relevant for playback
+applications).
+
+
+ Functions to create, recognise and parse missing-plugins messages for
+applications and elements.
+
+Missing-plugin messages are posted on the bus by elements like decodebin
+or playbin if they can't find an appropriate source element or decoder
+element. The application can use these messages for two things:
+
+ * concise error/problem reporting to the user mentioning what exactly
+ is missing, see gst_missing_plugin_message_get_description()
+
+ * initiate installation of missing plugins, see
+ gst_missing_plugin_message_get_installer_detail() and
+ gst_install_plugins_async()
+
+Applications may also create missing-plugin messages themselves to install
+required elements that are missing, using the install mechanism mentioned
+above.
+
+
+ Use the GST_PLUGINS_BASE_VERSION_* macros e.g. to check what version of
+gst-plugins-base you are building against, and gst_plugins_base_version()
+if you need to check at runtime what version of the gst-plugins-base
+libraries are being used / you are currently linked against.
+
+The version macros get defined by including <gst/pbutils/pbutils.h>.
+
Requests plugin installation without blocking. Once the plugins have been
installed or installation has failed, @func will be called with the result
diff --git a/girs/GstRtp-1.0.gir b/girs/GstRtp-1.0.gir
index 6ae0dfdb77..2771b131ba 100644
--- a/girs/GstRtp-1.0.gir
+++ b/girs/GstRtp-1.0.gir
@@ -1989,19 +1989,26 @@ is set as zero, @lost_packets or @dup_packets will be zero.
Private extensions
-
+
+ H.323 callable address
-
+
+ Application Specific Identifier (RFC6776)
-
+
+ Reporting Group Identifier (RFC8861)
-
+
+ RtpStreamId SDES item (RFC8852).
-
+
+ RepairedRtpStreamId SDES item (RFC8852).
-
+
+ CLUE CaptId (RFC8849)
-
+
+ MID SDES item (RFC8843).
@@ -6007,6 +6014,14 @@ is no such metadata on @buffer.
+
+ Provides common defines for the RTP library.
+
+
+ The GstRTPPayloads helper functions makes it easy to deal with static and dynamic
+payloads. Its main purpose is to retrieve properties such as the default clock-rate
+and get session bandwidth information.
+
Open @buffer for reading or writing, depending on @flags. The resulting RTCP
buffer state is stored in @rtcp.
diff --git a/girs/GstRtsp-1.0.gir b/girs/GstRtsp-1.0.gir
index c97c136dd4..daa63237a2 100644
--- a/girs/GstRtsp-1.0.gir
+++ b/girs/GstRtsp-1.0.gir
@@ -4221,6 +4221,9 @@ not equal #GST_RTSP_OK.
+
+ Provides common defines for the RTSP library.
+
Free a %NULL-terminated array of credentials returned from
gst_rtsp_message_parse_auth_credentials().
diff --git a/girs/GstRtspServer-1.0.gir b/girs/GstRtspServer-1.0.gir
index 549e616ff7..78c24ef07d 100644
--- a/girs/GstRtspServer-1.0.gir
+++ b/girs/GstRtspServer-1.0.gir
@@ -12445,6 +12445,386 @@ the media factory and retrieve the role with the same name.
+
+ The #GstRTSPAddressPool is an object that maintains a collection of network
+addresses. It is used to allocate server ports and server multicast addresses
+but also to reserve client provided destination addresses.
+
+A range of addresses can be added with gst_rtsp_address_pool_add_range().
+Both multicast and unicast addresses can be added.
+
+With gst_rtsp_address_pool_acquire_address() an unused address and port range
+can be acquired from the pool. With gst_rtsp_address_pool_reserve_address() a
+specific address can be retrieved. Both methods return a boxed
+#GstRTSPAddress that should be freed with gst_rtsp_address_free() after
+usage, which brings the address back into the pool.
+
+Last reviewed on 2013-07-16 (1.0.0)
+
+
+ The #GstRTSPAuth object is responsible for checking if the current user is
+allowed to perform requested actions. The default implementation has some
+reasonable checks but subclasses can implement custom security policies.
+
+A new auth object is made with gst_rtsp_auth_new(). It is usually configured
+on the #GstRTSPServer object.
+
+The RTSP server will call gst_rtsp_auth_check() with a string describing the
+check to perform. The possible checks are prefixed with
+GST_RTSP_AUTH_CHECK_*. Depending on the check, the default implementation
+will use the current #GstRTSPToken, #GstRTSPContext and
+#GstRTSPPermissions on the object to check if an operation is allowed.
+
+The default #GstRTSPAuth object has support for basic authentication. With
+gst_rtsp_auth_add_basic() you can add a basic authentication string together
+with the #GstRTSPToken that will become active when successfully
+authenticated.
+
+When a TLS certificate has been set with gst_rtsp_auth_set_tls_certificate(),
+the default auth object will require the client to connect with a TLS
+connection.
+
+Last reviewed on 2013-07-16 (1.0.0)
+
+
+ The client object handles the connection with a client for as long as a TCP
+connection is open.
+
+A #GstRTSPClient is created by #GstRTSPServer when a new connection is
+accepted and it inherits the #GstRTSPMountPoints, #GstRTSPSessionPool,
+#GstRTSPAuth and #GstRTSPThreadPool from the server.
+
+The client connection should be configured with the #GstRTSPConnection using
+gst_rtsp_client_set_connection() before it can be attached to a #GMainContext
+using gst_rtsp_client_attach(). From then on the client will handle requests
+on the connection.
+
+Use gst_rtsp_client_session_filter() to iterate or modify all the
+#GstRTSPSession objects managed by the client object.
+
+Last reviewed on 2013-07-11 (1.0.0)
+
+
+ Last reviewed on 2013-07-11 (1.0.0)
+
+
+ a #GstRTSPMedia contains the complete GStreamer pipeline to manage the
+streaming to the clients. The actual data transfer is done by the
+#GstRTSPStream objects that are created and exposed by the #GstRTSPMedia.
+
+The #GstRTSPMedia is usually created from a #GstRTSPMediaFactory when the
+client does a DESCRIBE or SETUP of a resource.
+
+A media is created with gst_rtsp_media_new() that takes the element that will
+provide the streaming elements. For each of the streams, a new #GstRTSPStream
+object needs to be made with the gst_rtsp_media_create_stream() which takes
+the payloader element and the source pad that produces the RTP stream.
+
+The pipeline of the media is set to PAUSED with gst_rtsp_media_prepare(). The
+prepare method will add rtpbin and sinks and sources to send and receive RTP
+and RTCP packets from the clients. Each stream srcpad is connected to an
+input into the internal rtpbin.
+
+It is also possible to dynamically create #GstRTSPStream objects during the
+prepare phase. With gst_rtsp_media_get_status() you can check the status of
+the prepare phase.
+
+After the media is prepared, it is ready for streaming. It will usually be
+managed in a session with gst_rtsp_session_manage_media(). See
+#GstRTSPSession and #GstRTSPSessionMedia.
+
+The state of the media can be controlled with gst_rtsp_media_set_state ().
+Seeking can be done with gst_rtsp_media_seek(), or gst_rtsp_media_seek_full()
+or gst_rtsp_media_seek_trickmode() for finer control of the seek.
+
+With gst_rtsp_media_unprepare() the pipeline is stopped and shut down. When
+gst_rtsp_media_set_eos_shutdown() an EOS will be sent to the pipeline to
+cleanly shut down.
+
+With gst_rtsp_media_set_shared(), the media can be shared between multiple
+clients. With gst_rtsp_media_set_reusable() you can control if the pipeline
+can be prepared again after an unprepare.
+
+Last reviewed on 2013-07-11 (1.0.0)
+
+
+ The #GstRTSPMediaFactory is responsible for creating or recycling
+#GstRTSPMedia objects based on the passed URL.
+
+The default implementation of the object can create #GstRTSPMedia objects
+containing a pipeline created from a launch description set with
+gst_rtsp_media_factory_set_launch().
+
+Media from a factory can be shared by setting the shared flag with
+gst_rtsp_media_factory_set_shared(). When a factory is shared,
+gst_rtsp_media_factory_construct() will return the same #GstRTSPMedia when
+the url matches.
+
+Last reviewed on 2013-07-11 (1.0.0)
+
+
+ This specialized #GstRTSPMediaFactory constructs media pipelines from a URI,
+given with gst_rtsp_media_factory_uri_set_uri().
+
+It will automatically demux and payload the different streams found in the
+media at URL.
+
+Last reviewed on 2013-07-11 (1.0.0)
+
+
+ A #GstRTSPMountPoints object maintains a relation between paths
+and #GstRTSPMediaFactory objects. This object is usually given to
+#GstRTSPClient and used to find the media attached to a path.
+
+With gst_rtsp_mount_points_add_factory () and
+gst_rtsp_mount_points_remove_factory(), factories can be added and
+removed.
+
+With gst_rtsp_mount_points_match() you can find the #GstRTSPMediaFactory
+object that completely matches the given path.
+
+Last reviewed on 2013-07-11 (1.0.0)
+
+
+ a #GstRTSPOnvifMedia contains the complete GStreamer pipeline to manage the
+streaming to the clients. The actual data transfer is done by the
+#GstRTSPStream objects that are created and exposed by the #GstRTSPMedia.
+
+On top of #GstRTSPMedia this subclass adds special ONVIF features.
+Special ONVIF features that are currently supported is a backchannel for
+the client to send back media to the server in a normal PLAY media. To
+handle the ONVIF backchannel, a #GstRTSPOnvifMediaFactory and
+#GstRTSPOnvifServer has to be used.
+
+
+ The #GstRTSPOnvifMediaFactory is responsible for creating or recycling
+#GstRTSPMedia objects based on the passed URL. Different to
+#GstRTSPMediaFactory, this supports special ONVIF features and can create
+#GstRTSPOnvifMedia in addition to normal #GstRTSPMedia.
+
+Special ONVIF features that are currently supported is a backchannel for
+the client to send back media to the server in a normal PLAY media, see
+gst_rtsp_onvif_media_factory_set_backchannel_launch() and
+gst_rtsp_onvif_media_factory_set_backchannel_bandwidth().
+
+
+ The server object is the object listening for connections on a port and
+creating #GstRTSPOnvifClient objects to handle those connections.
+
+The only different to #GstRTSPServer is that #GstRTSPOnvifServer creates
+#GstRTSPOnvifClient that have special handling for ONVIF specific features,
+like a backchannel that allows clients to send back media to the server.
+
+
+ Last reviewed on 2013-07-11 (1.0.0)
+
+
+ The #GstRTSPPermissions object contains an array of roles and associated
+permissions. The roles are represented with a string and the permissions with
+a generic #GstStructure.
+
+The permissions are deliberately kept generic. The possible values of the
+roles and #GstStructure keys and values are only determined by the #GstRTSPAuth
+object that performs the checks on the permissions and the current
+#GstRTSPToken.
+
+As a convenience function, gst_rtsp_permissions_is_allowed() can be used to
+check if the permissions contains a role that contains the boolean value
+%TRUE for the the given key.
+
+Last reviewed on 2013-07-15 (1.0.0)
+
+
+ Last reviewed on 2013-07-11 (1.0.0)
+
+
+ The server object is the object listening for connections on a port and
+creating #GstRTSPClient objects to handle those connections.
+
+The server will listen on the address set with gst_rtsp_server_set_address()
+and the port or service configured with gst_rtsp_server_set_service().
+Use gst_rtsp_server_set_backlog() to configure the amount of pending requests
+that the server will keep. By default the server listens on the current
+network (0.0.0.0) and port 8554.
+
+The server will require an SSL connection when a TLS certificate has been
+set in the auth object with gst_rtsp_auth_set_tls_certificate().
+
+To start the server, use gst_rtsp_server_attach() to attach it to a
+#GMainContext. For more control, gst_rtsp_server_create_source() and
+gst_rtsp_server_create_socket() can be used to get a #GSource and #GSocket
+respectively.
+
+gst_rtsp_server_transfer_connection() can be used to transfer an existing
+socket to the RTSP server, for example from an HTTP server.
+
+Once the server socket is attached to a mainloop, it will start accepting
+connections. When a new connection is received, a new #GstRTSPClient object
+is created to handle the connection. The new client will be configured with
+the server #GstRTSPAuth, #GstRTSPMountPoints, #GstRTSPSessionPool and
+#GstRTSPThreadPool.
+
+The server uses the configured #GstRTSPThreadPool object to handle the
+remainder of the communication with this client.
+
+Last reviewed on 2013-07-11 (1.0.0)
+
+
+ The #GstRTSPSession is identified by an id, unique in the
+#GstRTSPSessionPool that created the session and manages media and its
+configuration.
+
+A #GstRTSPSession has a timeout that can be retrieved with
+gst_rtsp_session_get_timeout(). You can check if the sessions is expired with
+gst_rtsp_session_is_expired(). gst_rtsp_session_touch() will reset the
+expiration counter of the session.
+
+When a client configures a media with SETUP, a session will be created to
+keep track of the configuration of that media. With
+gst_rtsp_session_manage_media(), the media is added to the managed media
+in the session. With gst_rtsp_session_release_media() the media can be
+released again from the session. Managed media is identified in the sessions
+with a url. Use gst_rtsp_session_get_media() to get the media that matches
+(part of) the given url.
+
+The media in a session can be iterated with gst_rtsp_session_filter().
+
+Last reviewed on 2013-07-11 (1.0.0)
+
+
+ The #GstRTSPSessionMedia object manages a #GstRTSPMedia with a given path.
+
+With gst_rtsp_session_media_get_transport() and
+gst_rtsp_session_media_set_transport() the transports of a #GstRTSPStream of
+the managed #GstRTSPMedia can be retrieved and configured.
+
+Use gst_rtsp_session_media_set_state() to control the media state and
+transports.
+
+Last reviewed on 2013-07-16 (1.0.0)
+
+
+ The #GstRTSPSessionPool object manages a list of #GstRTSPSession objects.
+
+The maximum number of sessions can be configured with
+gst_rtsp_session_pool_set_max_sessions(). The current number of sessions can
+be retrieved with gst_rtsp_session_pool_get_n_sessions().
+
+Use gst_rtsp_session_pool_create() to create a new #GstRTSPSession object.
+The session object can be found again with its id and
+gst_rtsp_session_pool_find().
+
+All sessions can be iterated with gst_rtsp_session_pool_filter().
+
+Run gst_rtsp_session_pool_cleanup() periodically to remove timed out sessions
+or use gst_rtsp_session_pool_create_watch() to be notified when session
+cleanup should be performed.
+
+Last reviewed on 2013-07-11 (1.0.0)
+
+
+ The #GstRTSPStream object manages the data transport for one stream. It
+is created from a payloader element and a source pad that produce the RTP
+packets for the stream.
+
+With gst_rtsp_stream_join_bin() the streaming elements are added to the bin
+and rtpbin. gst_rtsp_stream_leave_bin() removes the elements again.
+
+The #GstRTSPStream will use the configured addresspool, as set with
+gst_rtsp_stream_set_address_pool(), to allocate multicast addresses for the
+stream. With gst_rtsp_stream_get_multicast_address() you can get the
+configured address.
+
+With gst_rtsp_stream_get_server_port () you can get the port that the server
+will use to receive RTCP. This is the part that the clients will use to send
+RTCP to.
+
+With gst_rtsp_stream_add_transport() destinations can be added where the
+stream should be sent to. Use gst_rtsp_stream_remove_transport() to remove
+the destination again.
+
+Each #GstRTSPStreamTransport spawns one queue that will serve as a backlog of a
+controllable maximum size when the reflux from the TCP connection's backpressure
+starts spilling all over.
+
+Unlike the backlog in rtspconnection, which we have decided should only contain
+at most one RTP and one RTCP data message in order to allow control messages to
+go through unobstructed, this backlog only consists of data messages, allowing
+us to fill it up without concern.
+
+When multiple TCP transports exist, for example in the context of a shared media,
+we only pop samples from our appsinks when at least one of the transports doesn't
+experience back pressure: this allows us to pace our sample popping to the speed
+of the fastest client.
+
+When a sample is popped, it is either sent directly on transports that don't
+experience backpressure, or queued on the transport's backlog otherwise. Samples
+are then popped from that backlog when the transport reports it has sent the message.
+
+Once the backlog reaches an overly large duration, the transport is dropped as
+the client was deemed too slow.
+
+
+ The #GstRTSPStreamTransport configures the transport used by a
+#GstRTSPStream. It is usually manages by a #GstRTSPSessionMedia object.
+
+With gst_rtsp_stream_transport_set_callbacks(), callbacks can be configured
+to handle the RTP and RTCP packets from the stream, for example when they
+need to be sent over TCP.
+
+With gst_rtsp_stream_transport_set_active() the transports are added and
+removed from the stream.
+
+A #GstRTSPStream will call gst_rtsp_stream_transport_keep_alive() when RTCP
+is received from the client. It will also call
+gst_rtsp_stream_transport_set_timed_out() when a receiver has timed out.
+
+A #GstRTSPClient will call gst_rtsp_stream_transport_message_sent() when it
+has sent a data message for the transport.
+
+Last reviewed on 2013-07-16 (1.0.0)
+
+
+ A #GstRTSPThreadPool manages reusable threads for various server tasks.
+Currently the defined thread types can be found in #GstRTSPThreadType.
+
+Threads of type #GST_RTSP_THREAD_TYPE_CLIENT are used to handle requests from
+a connected client. With gst_rtsp_thread_pool_get_max_threads() a maximum
+number of threads can be set after which the pool will start to reuse the
+same thread for multiple clients.
+
+Threads of type #GST_RTSP_THREAD_TYPE_MEDIA will be used to perform the state
+changes of the media pipelines and handle its bus messages.
+
+gst_rtsp_thread_pool_get_thread() can be used to create a #GstRTSPThread
+object of the right type. The thread object contains a mainloop and context
+that run in a seperate thread and can be used to attached sources to.
+
+gst_rtsp_thread_reuse() can be used to reuse a thread for multiple purposes.
+If all gst_rtsp_thread_reuse() calls are matched with a
+gst_rtsp_thread_stop() call, the mainloop will be quit and the thread will
+stop.
+
+To configure the threads, a subclass of this object should be made and the
+virtual methods should be overriden to implement the desired functionality.
+
+Last reviewed on 2013-07-11 (1.0.0)
+
+
+ A #GstRTSPToken contains the permissions and roles of the user
+performing the current request. A token is usually created when a user is
+authenticated by the #GstRTSPAuth object and is then placed as the current
+token for the current request.
+
+#GstRTSPAuth can use the token and its contents to check authorization for
+various operations by comparing the token to the #GstRTSPPermissions of the
+object.
+
+The accepted values of the token are entirely defined by the #GstRTSPAuth
+object that implements the security policy.
+
+Last reviewed on 2013-07-15 (1.0.0)
+
Get the current #GstRTSPContext. This object is retrieved from the
current thread that is handling the request for a client.
diff --git a/girs/GstSdp-1.0.gir b/girs/GstSdp-1.0.gir
index 414877e3a9..f45145681d 100644
--- a/girs/GstSdp-1.0.gir
+++ b/girs/GstSdp-1.0.gir
@@ -4117,6 +4117,10 @@ time.
+
+ The GstMIKEY helper functions makes it easy to parse and create MIKEY
+messages.
+
Check if the given @addr is a multicast address.
diff --git a/girs/GstTag-1.0.gir b/girs/GstTag-1.0.gir
index 4634e02333..e0f2e37cd5 100644
--- a/girs/GstTag-1.0.gir
+++ b/girs/GstTag-1.0.gir
@@ -929,6 +929,41 @@ the schema wasn't in the list
+
+ Contains additional standardized GStreamer tag definitions for plugins
+and applications, and functions to register them with the GStreamer
+tag system.
+
+
+ Contains utility function to parse #GstTagList<!-- -->s from exif
+buffers and to create exif buffers from #GstTagList<!-- -->s
+
+Note that next IFD fields on the created exif buffers are set to 0.
+
+
+ Contains various utility functions for plugins to parse or create
+ID3 tags and map ID3v2 identifiers to and from GStreamer identifiers.
+
+
+ Provides helper functions to convert between the various ISO-639 language
+codes, and to map language codes to language names.
+
+
+ Provides information about Creative Commons media licenses, which are
+often expressed in media files as a license URI in tags. Also useful
+for applications creating media files, in case the user wants to license
+the content under a Creative Commons license.
+
+
+ Contains various utility functions for plugins to parse or create
+vorbiscomments and map them to and from #GstTagList<!-- -->s.
+
+
+ Contains various utility functions for plugins to parse or create
+xmp packets and map them to and from #GstTagList<!-- -->s.
+
+Please note that the xmp parser is very lightweight and not strict at all.
+
Check if a given string contains a known ISO 639 language code.
diff --git a/girs/GstTranscoder-1.0.gir b/girs/GstTranscoder-1.0.gir
index c29ca1060c..b3848b659f 100644
--- a/girs/GstTranscoder-1.0.gir
+++ b/girs/GstTranscoder-1.0.gir
@@ -348,7 +348,7 @@ Pass 0 to stop updating the position.
-
+
diff --git a/girs/GstValidate-1.0.gir b/girs/GstValidate-1.0.gir
index bc3a7774e9..c85b1f24f5 100644
--- a/girs/GstValidate-1.0.gir
+++ b/girs/GstValidate-1.0.gir
@@ -248,7 +248,8 @@ value to a GstClockTime.
for mandatory streams.
-
+
+ Function that frees the various members of the structure when done using
@@ -274,9 +275,12 @@ GST_VALIDATE_EXECUTE_ACTION_DONE:
-
+
+ The action will be executed asynchronously without blocking further
+actions to be executed
+ Use #GST_VALIDATE_EXECUTE_ACTION_NON_BLOCKING instead.
@@ -357,9 +361,12 @@ GST_VALIDATE_EXECUTE_ACTION_DONE:
The action can be executed ASYNC
-
+
+ The action can be executed asynchronously but without blocking further
+actions execution.
+ Use #GST_VALIDATE_ACTION_TYPE_NON_BLOCKING instead.
The action will be executed on 'element-added'
@@ -385,7 +392,11 @@ GST_VALIDATE_EXECUTE_ACTION_DONE:
The action can be used in config files even if it is not strictly a config
action (ie. it needs a scenario to run).
-
+
+ The action is checking some state from objects in the pipeline. It means that it can
+be used as 'check' in different action which have a `check` "sub action", such as the 'wait' action type.
+This implies that the action can be executed from any thread and not only from the scenario thread as other
+types.
@@ -799,7 +810,8 @@ GST_VALIDATE_ISSUE_FLAGS_NO_BACKTRACE: Do not generate backtrace for the issue t
-
+
+ Always generate backtrace, even if not a critical issue
@@ -3997,6 +4009,61 @@ target-element field.
+
+ TODO
+
+
+ TODO
+
+
+ TODO
+
+
+ To start monitoring and thus run GstValidate tests on a #GstPipeline, the only thing to
+do is to instanciate a #GstValidateRunner and then attach a #GstValidateMonitor
+to it with #gst_validate_monitor_factory_create
+
+
+ TODO
+
+
+ TODO
+
+
+ TODO
+
+
+ Allows you to test a pipeline within GstValidate. It is the object where
+all issue reporting is done.
+
+In the tools using GstValidate the only minimal code to be able to monitor
+your pipelines is:
+
+|[
+ GstPipeline *pipeline = gst_pipeline_new ("monitored-pipeline");
+ GstValidateRunner *runner = gst_validate_runner_new ();
+ GstValidateMonitor *monitor = gst_validate_monitor_factory_create (
+ GST_OBJECT (pipeline), runner, NULL);
+
+ // Run the pipeline and do whatever you want with it
+
+ // In that same order
+ gst_object_unref (pipeline);
+ gst_object_unref (runner);
+ gst_object_unref (monitor);
+]|
+
+
+ A #GstValidateScenario represents the scenario that will be executed on a #GstPipeline.
+It is basically an ordered list of #GstValidateAction that will be executed during the
+execution of the pipeline.
+
+Possible configurations (see [GST_VALIDATE_CONFIG](gst-validate-environment-variables.md)):
+ * scenario-action-execution-interval: Sets the interval in
+ milliseconds (1/1000ths of a second), between which actions
+ will be executed, setting it to 0 means "execute in idle".
+ The default value is 10ms.
+
diff --git a/girs/GstVideo-1.0.gir b/girs/GstVideo-1.0.gir
index 8ec3a8eef6..1da693e019 100644
--- a/girs/GstVideo-1.0.gir
+++ b/girs/GstVideo-1.0.gir
@@ -1962,36 +1962,32 @@ event.
A navigation command event. Use
gst_navigation_event_parse_command() to extract the details from the event.
-
- A mouse scroll event. Use
-gst_navigation_event_parse_mouse_scroll_event() to extract the details from
-the event. (Since: 1.18)
+
+ A mouse scroll event. Use gst_navigation_event_parse_mouse_scroll_event()
+to extract the details from the event.
-
- An event describing a new touch point,
-which will be assigned an identifier that is unique to it for the duration
-of its movement on the screen. Use gst_navigation_event_parse_touch_event()
-to extract the details from the event. (Since: 1.22)
+
+ An event describing a new touch point, which will be assigned an identifier
+that is unique to it for the duration of its movement on the screen.
+Use gst_navigation_event_parse_touch_event() to extract the details
+from the event.
-
- An event describing the movement of an
-active touch point across the screen. Use
-gst_navigation_event_parse_touch_event() to extract the details from the
-event. (Since: 1.22)
+
+ An event describing the movement of an active touch point across
+the screen. Use gst_navigation_event_parse_touch_event() to extract
+the details from the event.
-
- An event describing a removed touch point.
-After this event, its identifier may be reused for any new touch points. Use
-gst_navigation_event_parse_touch_up_event() to extract the details from the
-event. (Since: 1.22)
+
+ An event describing a removed touch point. After this event,
+its identifier may be reused for any new touch points.
+Use gst_navigation_event_parse_touch_up_event() to extract the details
+from the event.
-
- An event signaling the end of a sequence
-of simultaneous touch events. (Since: 1.22)
+
+ An event signaling the end of a sequence of simultaneous touch events.
-
- An event cancelling all currently active
-touch points. (Since: 1.22)
+
+ An event cancelling all currently active touch points.
@@ -5742,8 +5738,8 @@ will be freed.
should the encoder output stream headers
-
- the buffer data is corrupted (Since: 1.20)
+
+ The buffer data is corrupted.
@@ -5778,14 +5774,12 @@ gst_video_encoder_set_output_state() methods.
negotiation. Since: 1.10
-
- Mastering display color volume information
- (HDR metadata) for the stream. Since: 1.20
+
+ Mastering display color volume information (HDR metadata) for the stream.
-
- Content light level information for the stream.
- Since: 1.20
+
+ Content light level information for the stream.
@@ -10099,116 +10093,128 @@ for details about the layout and packing of these formats in memory.
packed 4:4:4:4 YUV, 12 bits per channel(U-Y-V-A...) (Since: 1.18)
-
- NV12 with 4x4 tiles in linear order (Since: 1.18)
+
+ NV12 with 4x4 tiles in linear order.
-
- NV12 with 32x32 tiles in linear order (Since: 1.18)
+
+ NV12 with 32x32 tiles in linear order.
-
- planar 4:4:4 RGB, 8 bits per channel (Since: 1.20)
+
+ Planar 4:4:4 RGB, R-G-B order
-
- planar 4:4:4 RGB, 8 bits per channel (Since: 1.20)
+
+ Planar 4:4:4 RGB, B-G-R order
-
- Planar 4:2:0 YUV with interleaved UV plane with alpha as 3rd plane (Since: 1.20)
+
+ Planar 4:2:0 YUV with interleaved UV plane with alpha as
+3rd plane.
-
- RGB with alpha channel first, 16 bits per channel
+
+ RGB with alpha channel first, 16 bits (little endian)
+per channel.
-
- RGB with alpha channel first, 16 bits per channel
+
+ RGB with alpha channel first, 16 bits (big endian)
+per channel.
-
- RGB with alpha channel last, 16 bits per channel
+
+ RGB with alpha channel last, 16 bits (little endian)
+per channel.
-
- RGB with alpha channel last, 16 bits per channel
+
+ RGB with alpha channel last, 16 bits (big endian)
+per channel.
-
- reverse RGB with alpha channel last, 16 bits per channel
+
+ Reverse RGB with alpha channel last, 16 bits (little endian)
+per channel.
-
- reverse RGB with alpha channel last, 16 bits per channel
+
+ Reverse RGB with alpha channel last, 16 bits (big endian)
+per channel.
-
- reverse RGB with alpha channel first, 16 bits per channel
+
+ Reverse RGB with alpha channel first, 16 bits (little endian)
+per channel.
-
- reverse RGB with alpha channel first, 16 bits per channel
+
+ Reverse RGB with alpha channel first, 16 bits (big endian)
+per channel.
-
- NV12 with 16x32 Y tiles and 16x16 UV tiles. (Since: 1.22)
+
+ NV12 with 16x32 Y tiles and 16x16 UV tiles.
-
- NV12 with 8x128 tiles in linear order (Since: 1.22)
+
+ NV12 with 8x128 tiles in linear order.
-
- NV12 10bit big endian with 8x128 tiles in linear order (Since: 1.22)
+
+ NV12 10bit big endian with 8x128 tiles in linear order.
-
- @GST_VIDEO_FORMAT_NV12_10LE40 with 4x4 pixels tiles (5 bytes per tile row) (Since: 1.24)
+
+ @GST_VIDEO_FORMAT_NV12_10LE40 with 4x4 pixels tiles (5 bytes
+ per tile row). This format is produced by Verisilicon/Hantro decoders.
-
- DMA DRM special format. It's only used with
- memory:DMABuf #GstCapsFeatures, where an extra
- parameter (drm-format) is required to define the
- image format and its memory layout.
+
+ @GST_VIDEO_FORMAT_DMA_DRM represent the DMA DRM special format. It's
+only used with memory:DMABuf #GstCapsFeatures, where an extra
+parameter (drm-format) is required to define the image format and
+its memory layout.
-
- Mediatek 10bit NV12 little endian with 16x32 tiles in linear order, tiled 2 bits (Since: 1.24)
+
+ Mediatek 10bit NV12 little endian with 16x32 tiles in linear order, tile 2
+bits.
-
- Mediatek 10bit NV12 little endian with 16x32 tiles in linear order, raster 2 bits (Since: 1.24)
+
+ Mediatek 10bit NV12 little endian with 16x32 tiles in linear order, raster
+2 bits.
-
- planar 4:4:2:2 YUV, 8 bits per channel (Since: 1.24)
+
+ planar 4:4:2:2 YUV, 8 bits per channel
-
- planar 4:4:4:4 YUV, 8 bits per channel (Since: 1.24)
+
+ planar 4:4:4:4 YUV, 8 bits per channel
-
- planar 4:4:4:4 YUV, 12 bits per channel (Since: 1.24)
+
+ planar 4:4:4:4 YUV, 12 bits per channel
-
- planar 4:4:4:4 YUV, 12 bits per channel (Since: 1.24)
+
+ planar 4:4:4:4 YUV, 12 bits per channel
-
- planar 4:4:2:2 YUV, 12 bits per channel (Since: 1.24)
+
+ planar 4:4:2:2 YUV, 12 bits per channel
-
- planar 4:4:2:2 YUV, 12 bits per channel (Since: 1.24)
+
+ planar 4:4:2:2 YUV, 12 bits per channel
-
- planar 4:4:2:0 YUV, 12 bits per channel (Since: 1.24)
+
+ planar 4:4:2:0 YUV, 12 bits per channel
-
- planar 4:4:2:0 YUV, 12 bits per channel (Since: 1.24)
+
+ planar 4:4:2:0 YUV, 12 bits per channel
-
- planar 4:4:4:4 YUV, 16 bits per channel (Since: 1.24)
+
+ planar 4:4:4:4 YUV, 16 bits per channel
-
- planar 4:4:4:4 YUV, 16 bits per channel (Since: 1.24)
+
+ planar 4:4:4:4 YUV, 16 bits per channel
-
- planar 4:4:2:2 YUV, 16 bits per channel (Since: 1.24)
+
+ planar 4:4:2:2 YUV, 16 bits per channel
-
- planar 4:4:2:2 YUV, 16 bits per channel (Since: 1.24)
+
+ planar 4:4:2:2 YUV, 16 bits per channel
-
- planar 4:4:2:0 YUV, 16 bits per channel (Since: 1.24)
+
+ planar 4:4:2:0 YUV, 16 bits per channel
-
- planar 4:4:2:0 YUV, 16 bits per channel (Since: 1.24)
+
+ planar 4:4:2:0 YUV, 16 bits per channel
-
- planar 4:4:4 RGB, 16 bits per channel (Since: 1.24)
+
+ planar 4:4:4 RGB, 16 bits per channel
-
- planar 4:4:4 RGB, 16 bits per channel (Since: 1.24)
+
+ planar 4:4:4 RGB, 16 bits per channel
Converts a FOURCC value into the corresponding #GstVideoFormat.
@@ -10385,9 +10391,8 @@ the #GstVideoFormat if there is one, or NULL otherwise.
The format is tiled, there is tiling information
in the last plane.
-
- The tile size varies per plane
- according to the subsampling. (Since: 1.22)
+
+ The tile size varies per plane according to the subsampling.
@@ -10505,8 +10510,8 @@ use tile_info[] array instead.
use tile_info[] array instead.
-
- Per-plane tile information
+
+ Information about the tiles for each of the planes.
@@ -14662,16 +14667,24 @@ frames will only be rendered in PLAYING state.
Description of a tile. This structure allow to describe arbitrary tile
dimensions and sizes.
-
+
+ The width in pixels of a tile. This value can be zero if the number of
+pixels per line is not an integer value.
-
+
+ The stride (in bytes) of a tile line. Regardless if the tile have sub-tiles
+this stride multiplied by the height should be equal to
+#GstVideoTileInfo.size. This value is used to translate into linear stride
+when older APIs are being used to expose this format.
-
+
+ The size in bytes of a tile. This value must be divisible by
+#GstVideoTileInfo.stride.
@@ -14691,8 +14704,8 @@ dimensions and sizes.
in memory in Z or flipped Z order. In case of odd rows, the last row
of blocks is arranged in linear order.
-
- Tiles are in row order. (Since: 1.18)
+
+ Tiles are in row order.
@@ -15450,11 +15463,8 @@ non-linear RGB (R'G'B') and linear RGB
STD-B67 and Rec. ITU-R BT.2100-1 hybrid loggamma (HLG) system
Since: 1.18
-
- also known as SMPTE170M / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
- Functionally the same as the values
- GST_VIDEO_TRANSFER_BT709, and GST_VIDEO_TRANSFER_BT2020_10.
- Since: 1.18
+
+ also known as SMPTE170M / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
Convert @val to its gamma decoded value. This is the inverse operation of
@@ -16437,6 +16447,32 @@ in @align
+
+ This library contains some helper functions and includes the
+videosink and videofilter base classes.
+
+
+ A collection of objects and methods to assist with handling Ancillary Data
+present in Vertical Blanking Interval as well as Closed Caption.
+
+
+ The functions gst_video_chroma_from_string() and gst_video_chroma_to_string() convert
+between #GstVideoChromaSite and string descriptions.
+
+#GstVideoChromaResample is a utility object for resampling chroma planes
+and converting between different chroma sampling sitings.
+
+
+ Special GstBufferPool subclass for raw video buffers.
+
+Allows configuration of video-specific requirements such as
+stride alignments or pixel padding, and can also be configured
+to automatically add #GstVideoMeta to the buffers.
+
+
+ A collection of objects and methods to assist with SEI User Data Unregistered
+metadata in H.264 and H.265 streams.
+
Convenience function to check if the given message is a
"prepare-window-handle" message from a #GstVideoOverlay.
@@ -19424,5 +19460,14 @@ and "ITU-T H.273 Table 3".
+
+ This object is used to convert video frames from one format to another.
+The object can perform conversion of:
+
+ * video format
+ * video colorspace
+ * chroma-siting
+ * video size
+
diff --git a/girs/GstVulkan-1.0.gir b/girs/GstVulkan-1.0.gir
index f49839ac39..36c72881e1 100644
--- a/girs/GstVulkan-1.0.gir
+++ b/girs/GstVulkan-1.0.gir
@@ -2428,7 +2428,8 @@ the error
-
+
+ image aspect of this format
@@ -4528,7 +4529,8 @@ called.
number of elements in @queue_family_props
-
+
+ vulkan operations allowed per queue family
@@ -6065,6 +6067,82 @@ Currently intended for subclasses to update internal state.
+
+ #GstVulkanBufferMemory is a #GstMemory subclass providing support for the
+mapping of Vulkan device memory.
+
+
+ a #GstVulkanBufferPool is an object that allocates buffers with #GstVulkanBufferMemory
+
+A #GstVulkanBufferPool is created with gst_vulkan_buffer_pool_new()
+
+#GstVulkanBufferPool implements the VideoMeta buffer pool option
+#GST_BUFFER_POOL_OPTION_VIDEO_META
+
+
+ vulkandescriptorset holds information about a descriptor set.
+
+
+ A #GstVulkanDevice encapsulates a VkDevice
+
+
+ A #GstVulkanDisplay represents a connection to a display server on the platform
+
+
+ A #GstVulkanFence encapsulates a VkFence
+
+
+ A #GstVulkanFullScreenQuad is a helper object for rendering a single input
+image to an output #GstBuffer
+
+
+ #GstVulkanHandle holds information about a vulkan handle.
+
+
+ a #GstVulkanImageBufferPool is an object that allocates buffers with #GstVulkanImageMemory
+
+A #GstVulkanImageBufferPool is created with gst_vulkan_image_buffer_pool_new()
+
+
+ GstVulkanImageMemory is a #GstMemory subclass providing support for the
+mapping of Vulkan device memory.
+
+
+ #GstVulkanImageView is a wrapper around a `VkImageView` mostly for
+usage across element boundaries with #GstVulkanImageMemory
+
+
+ #GstVulkanInstance encapsulates the necessary information for the toplevel
+Vulkan instance object.
+
+If GStreamer is built with debugging support, the default Vulkan API chosen
+can be selected with the environment variable
+`GST_VULKAN_INSTANCE_API_VERSION=1.0`. Any subsequent setting of the
+requested Vulkan API version through the available properties will override
+the environment variable.
+
+
+ GstVulkanMemory is a #GstMemory subclass providing support for the mapping of
+Vulkan device memory.
+
+
+ A #GstVulkanPhysicalDevice encapsulates a VkPhysicalDevice
+
+
+ GstVulkanQueue encapsulates the vulkan command queue.
+
+
+ #GstVulkanSwapper is a helper object for rendering to a surface exposed by
+#GstVulkanWindow.
+
+
+ #GstVulkanTrash is a helper object for freeing resources after a
+#GstVulkanFence is signalled.
+
+
+ GstVulkanWindow represents a window that elements can render into. A window can
+either be a user visible window (onscreen) or hidden (offscreen).
+
Allocate a new #GstVulkanBufferMemory.
@@ -6823,5 +6901,15 @@ associated #GstVulkanFence is signalled
+
+ vulkancommandbuffer holds information about a command buffer.
+
+
+ #GstVulkanHandlePool holds a number of handles that are pooled together.
+
+
+ #GstVulkanVideoFilter is a helper base class for retrieving and holding the
+#GstVulkanInstance, #GstVulkanDevice and #GstVulkanQueue used by an element.
+
diff --git a/girs/GstWebRTC-1.0.gir b/girs/GstWebRTC-1.0.gir
index dd7df6fad5..912c01a386 100644
--- a/girs/GstWebRTC-1.0.gir
+++ b/girs/GstWebRTC-1.0.gir
@@ -643,9 +643,11 @@ for more information.
GStreamer-specific failure, not matching any other value from the specification
-
+
+ invalid-modification (part of WebIDL specification)
-
+
+ type-error (maps to JavaScript TypeError)
@@ -2658,6 +2660,27 @@ answer.
certificate
+
+ <https://www.w3.org/TR/webrtc/#rtcdatachannel>
+
+
+ <https://www.w3.org/TR/webrtc/#rtcdtlstransport>
+
+
+ See the [specification](https://www.w3.org/TR/webrtc/#rtcicetransport)
+
+
+ <https://www.w3.org/TR/webrtc/#rtcrtpreceiver-interface>
+
+
+ <https://www.w3.org/TR/webrtc/#rtcrtpsender-interface>
+
+
+ <https://www.w3.org/TR/webrtc/#rtcsessiondescription-class>
+
+
+ <https://www.w3.org/TR/webrtc/#rtcrtptransceiver-interface>
+
diff --git a/subprojects/gst-devtools/validate/launcher/testsuites/check.py b/subprojects/gst-devtools/validate/launcher/testsuites/check.py
index 2f879eec80..cce51893a3 100644
--- a/subprojects/gst-devtools/validate/launcher/testsuites/check.py
+++ b/subprojects/gst-devtools/validate/launcher/testsuites/check.py
@@ -30,6 +30,7 @@ KNOWN_NOT_LEAKY = r'^check.gst-devtools.*|^check.gstreamer.*|^check-gst-plugins-
LONG_VALGRIND_TESTS = [
(r'check.[a-z-]*.generic_states.test_state_changes_down_seq', 'enough to run one of the sequences'),
(r'check.[a-z-]*.generic_states.test_state_changes_up_seq', 'enough to run one of the sequences',),
+ (r'check.[a-z-]*.generic_states.test_state_changes_up_and_down_seq', 'enough to run the sequences'),
(r'check.gstreamer.gst_gstelement.test_foreach_pad$', '48s'),
(r'check.gstreamer.gst_gstinfo.info_post_gst_init_category_registration$', '21s'),
(r'check.gstreamer.gst_gstsystemclock.test_resolution$', '60s'),
@@ -39,6 +40,10 @@ LONG_VALGRIND_TESTS = [
(r'check.gstreamer.pipelines_simple_launch_lines.test_2_elements$', '58s'),
(r'check.gstreamer.pipelines_stress.test_stress$', '54s'),
(r'check.gstreamer.pipelines_stress.test_stress_preroll$', '27s'),
+ (r'check.gst-plugins-base.elements_appsrc.test_appsrc_limits', '53.37s'),
+ (r'check.gst-plugins-base.elements_appsrc.test_appsrc_send_event_before_buffer', '49.25s'),
+ (r'check.gst-plugins-base.elements_appsrc.test_appsrc_send_event_before_sample', '51.39s'),
+ (r'check.gst-plugins-base.elements_appsrc.test_appsrc_send_event_between_caps_buffer', '56.13s'),
(r'check.gst-plugins-base.elements_appsrc.test_appsrc_block_deadlock$', '265.595s'),
(r'check.gst-plugins-base.elements_audioresample.test_fft$', '91.247s'),
(r'check.gst-plugins-base.elements_audioresample.test_timestamp_drift$', '141.784s'),
@@ -112,9 +117,11 @@ VALGRIND_BLACKLIST = [
(r'check.gst-editing-services.pythontests', 'Need to figure out how to introduce python suppressions'),
(r'check.gst-editing-services.check_keyframes_in_compositor_two_sources', 'Valgrind exit with an exitcode 20 but shows no issue: https://gitlab.freedesktop.org/thiblahute/gst-editing-services/-/jobs/4079972'),
(r'check.gst-plugins-good.elements_splitmuxsrc.test_splitmuxsrc_sparse_streams', 'https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/739'),
- (r'check.gst-plugins-good.elements_udpsrc.test_udpsrc_empty_packet', 'https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/740')
-]
+ (r'check.gst-plugins-good.elements_udpsrc.test_udpsrc_empty_packet', 'https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/740'),
+ (r'check.gst-plugins-good.elements_souphttpsrc2.test_icy_stream', 'flaky in valgrind, leaks in CI but not locally'),
+ (r'check.gst-plugins-bad.elements_svthevc*', 'https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/3011'),
+]
BLACKLIST = [
(r'check.gstreamer.gst_gstsystemclock.test_stress_cleanup_unschedule', 'flaky under high server load'),
@@ -125,6 +132,7 @@ BLACKLIST = [
(r'check.gst-plugins-base.elements_multisocketsink.test_sending_buffers_with_9_gstmemories$', 'https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/779'),
(r'check.gst-plugins-base.elements_multisocketsink.test_client_next_keyframe$', 'https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/779'),
(r'check.gst-plugins-base.elements_multisocketsink.test_add_client$', ''),
+ (r'check.gst-plugins-base.elements_multisocketsink.test_burst_client_bytes$', ''),
(r'check.gst-plugins-base.libs_gstglcolorconvert.test_reorder_buffer$', '?'),
(r'check.gst-plugins-base.elements_audiotestsrc.test_layout$', 'https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/781'),
(r'check.gst-plugins-good.elements_souphttpsrc.test_icy_stream$', 'https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/782'),
@@ -161,8 +169,14 @@ BLACKLIST = [
(r'check.gst-editing-services.nle_simple.test_one_bin_after_other$', 'https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/802'),
(r'check.gstreamer-vaapi.*$', 'only run the tests explicitly'),
(r'check.gst-rtsp-server.gst_rtspserver.test_multiple_transports', 'https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/767'),
+ (r'check.gst-plugins-bad.elements_vkcolorconvert.test_vulkan_color_convert_rgba_reorder$', 'Mesa in the CI image is older, will start passing once we update to llvm16 and mesa 23.1'),
+ (r'check.gst-plugins-bad.libs_vkformat.test_format_from_video_info_2$', 'Mesa in the CI image is older, will start passing once we update to llvm16 and mesa 23.1'),
+ (r'check.gst-plugins-bad.libs_vkimagebufferpool.test_image$', 'Mesa in the CI image is older, will start passing once we update to llvm16 and mesa 23.1'),
+ (r'check.gst-plugins-bad.libs_vkwindow.test_window_new$', 'Mesa in the CI image is older, will start passing once we update to llvm16 and mesa 23.1'),
+ (r'check.gst-plugins-good.elements_souphttpsrc2.test_icy_stream', 'flaky in valgrind, leaks in CI but not locally'),
]
+
KNOWN_ISSUES = {
"https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/773": {
"tests": [
diff --git a/subprojects/gst-libav/docs/gst_plugins_cache.json b/subprojects/gst-libav/docs/gst_plugins_cache.json
index ed612680bd..47c98cbce8 100644
--- a/subprojects/gst-libav/docs/gst_plugins_cache.json
+++ b/subprojects/gst-libav/docs/gst_plugins_cache.json
@@ -1460,7 +1460,7 @@
"presence": "always"
},
"src": {
- "caps": "audio/x-raw:\n format: S16LE\n layout: interleaved\n",
+ "caps": "audio/x-raw:\n format: S16LE\n layout: interleaved\naudio/x-raw:\n format: S16LE\n layout: non-interleaved\n",
"direction": "src",
"presence": "always"
}
@@ -2776,6 +2776,7 @@
"description": "libav argo decoder",
"hierarchy": [
"avdec_argo",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -2791,7 +2792,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -5006,6 +5007,7 @@
"description": "libav cdtoons decoder",
"hierarchy": [
"avdec_cdtoons",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -5021,7 +5023,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -5833,6 +5835,7 @@
"description": "libav cri decoder",
"hierarchy": [
"avdec_cri",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -5848,7 +5851,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -11581,6 +11584,7 @@
"description": "libav imm5 decoder",
"hierarchy": [
"avdec_imm5",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -11596,7 +11600,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -12176,6 +12180,7 @@
"description": "libav ipu decoder",
"hierarchy": [
"avdec_ipu",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -12191,7 +12196,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -13857,6 +13862,7 @@
"description": "libav mobiclip decoder",
"hierarchy": [
"avdec_mobiclip",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -13872,7 +13878,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -15114,6 +15120,7 @@
"description": "libav msp2 decoder",
"hierarchy": [
"avdec_msp2",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -15129,7 +15136,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -15769,6 +15776,7 @@
"description": "libav mv30 decoder",
"hierarchy": [
"avdec_mv30",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -15784,7 +15792,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -16038,6 +16046,7 @@
"description": "libav mvdv decoder",
"hierarchy": [
"avdec_mvdv",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -16053,7 +16062,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -16127,6 +16136,7 @@
"description": "libav mvha decoder",
"hierarchy": [
"avdec_mvha",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -16142,7 +16152,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -16424,6 +16434,7 @@
"description": "libav notchlc decoder",
"hierarchy": [
"avdec_notchlc",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -16439,7 +16450,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -17156,6 +17167,7 @@
"description": "libav pfm decoder",
"hierarchy": [
"avdec_pfm",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -17171,7 +17183,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -17425,6 +17437,7 @@
"description": "libav pgx decoder",
"hierarchy": [
"avdec_pgx",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -17440,7 +17453,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -17514,6 +17527,7 @@
"description": "libav photocd decoder",
"hierarchy": [
"avdec_photocd",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -17529,7 +17543,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -20225,6 +20239,7 @@
"description": "libav sga decoder",
"hierarchy": [
"avdec_sga",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -20240,7 +20255,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -20638,6 +20653,7 @@
"description": "libav simbiosis_imx decoder",
"hierarchy": [
"avdec_simbiosis_imx",
+ "GstFFMpegVidDec",
"GstVideoDecoder",
"GstElement",
"GstObject",
@@ -20653,7 +20669,7 @@
"presence": "always"
},
"src": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n\nvideo/x-raw(format:Interlaced):\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n interlace-mode: alternate\n",
"direction": "src",
"presence": "always"
}
@@ -31654,7 +31670,7 @@
"long-name": "libav ATSC A/52A (AC-3) encoder",
"pad-templates": {
"sink": {
- "caps": "audio/x-raw:\n channels: 1\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 2\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 3\n channel-mask: 0x0000000000000103\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 3\n channel-mask: 0x0000000000000007\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 4\n channel-mask: 0x0000000000000c03\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 4\n channel-mask: 0x0000000000000033\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 4\n channel-mask: 0x0000000000000107\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 5\n channel-mask: 0x0000000000000c07\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 5\n channel-mask: 0x0000000000000037\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 2\n channel-mask: 0x000000000000000c\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 3\n channel-mask: 0x000000000000000b\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 4\n channel-mask: 0x000000000000010b\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 4\n channel-mask: 0x000000000000000f\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 5\n channel-mask: 0x0000000000000c0b\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 5\n channel-mask: 0x000000000000003b\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 5\n channel-mask: 0x000000000000010f\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 6\n channel-mask: 0x0000000000000c0f\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\naudio/x-raw:\n channels: 6\n channel-mask: 0x000000000000003f\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S16LE\n layout: interleaved\n",
+ "caps": "audio/x-raw:\n channels: 1\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 2\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 3\n channel-mask: 0x0000000000000103\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 3\n channel-mask: 0x0000000000000007\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 4\n channel-mask: 0x0000000000000c03\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 4\n channel-mask: 0x0000000000000033\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 4\n channel-mask: 0x0000000000000107\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 5\n channel-mask: 0x0000000000000c07\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 5\n channel-mask: 0x0000000000000037\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 2\n channel-mask: 0x000000000000000c\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 3\n channel-mask: 0x000000000000000b\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 4\n channel-mask: 0x000000000000010b\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 4\n channel-mask: 0x000000000000000f\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 5\n channel-mask: 0x0000000000000c0b\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 5\n channel-mask: 0x000000000000003b\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 5\n channel-mask: 0x000000000000010f\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 6\n channel-mask: 0x0000000000000c0f\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\naudio/x-raw:\n channels: 6\n channel-mask: 0x000000000000003f\n rate: { (int)48000, (int)44100, (int)32000 }\n format: S32LE\n layout: interleaved\n",
"direction": "sink",
"presence": "always"
},
@@ -74496,7 +74512,7 @@
"long-name": "libav JPEG 2000 encoder",
"pad-templates": {
"sink": {
- "caps": "video/x-raw:\n format: { RGB, Y444, GRAY8, I420, Y42B, YUV9, Y41B, RGB8P }\n",
+ "caps": "video/x-raw:\n format: { RGB, Y444, GRAY8, I420, Y42B, YUV9, Y41B, RGB8P, GRAY16_LE }\n",
"direction": "sink",
"presence": "always"
},
@@ -139918,7 +139934,7 @@
"presence": "always"
},
"video_%%u": {
- "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n",
+ "caps": "video/x-raw:\n format: { I420, YUY2, RGB, BGR, Y42B, Y444, YUV9, Y41B, GRAY8, RGB8P, I420, Y42B, Y444, UYVY, NV12, NV21, ARGB, RGBA, ABGR, BGRA, GRAY16_BE, GRAY16_LE, A420, RGB16, RGB15, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, GBRA, xRGB, RGBx, xBGR, BGRx, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GBR_12BE, GBR_12LE, P010_10LE, GBRA_12BE, GBRA_12LE, GBRA_10BE, GBRA_10LE }\n",
"direction": "sink",
"presence": "request"
}
diff --git a/subprojects/gst-plugins-bad/docs/plugins/gst_plugins_cache.json b/subprojects/gst-plugins-bad/docs/plugins/gst_plugins_cache.json
index 0d58957d3c..69b7e7d05b 100644
--- a/subprojects/gst-plugins-bad/docs/plugins/gst_plugins_cache.json
+++ b/subprojects/gst-plugins-bad/docs/plugins/gst_plugins_cache.json
@@ -33792,6 +33792,258 @@
},
"rank": "none"
},
+ "ladspa-ladspa-rubberband-so-rubberband-pitchshifter-mono": {
+ "author": "Breakfast Quay, Juan Manuel Borges Caño , Andy Wingo , Steve Baker , Erik Walthinsen , Stefan Sauer , Wim Taymans ",
+ "description": "Rubber Band Mono Pitch Shifter",
+ "hierarchy": [
+ "ladspa-ladspa-rubberband-so-rubberband-pitchshifter-mono",
+ "GstLADSPAFilter",
+ "GstAudioFilter",
+ "GstBaseTransform",
+ "GstElement",
+ "GstObject",
+ "GInitiallyUnowned",
+ "GObject"
+ ],
+ "klass": "Filter/Effect/Audio/LADSPA/Frequency/Pitch shifters",
+ "pad-templates": {
+ "sink": {
+ "caps": "audio/x-raw:\n format: F32LE\n channels: 1\n rate: [ 1, 2147483647 ]\n layout: interleaved\n",
+ "direction": "sink",
+ "presence": "always"
+ },
+ "src": {
+ "caps": "audio/x-raw:\n format: F32LE\n channels: 1\n rate: [ 1, 2147483647 ]\n layout: interleaved\n",
+ "direction": "src",
+ "presence": "always"
+ }
+ },
+ "properties": {
+ "cents": {
+ "blurb": "Cents",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "0",
+ "max": "100",
+ "min": "-100",
+ "mutable": "null",
+ "readable": true,
+ "type": "gfloat",
+ "writable": true
+ },
+ "crispness": {
+ "blurb": "Crispness",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "3",
+ "max": "3",
+ "min": "0",
+ "mutable": "null",
+ "readable": true,
+ "type": "gint",
+ "writable": true
+ },
+ "formant-preserving": {
+ "blurb": "Formant Preserving",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "false",
+ "mutable": "null",
+ "readable": true,
+ "type": "gboolean",
+ "writable": true
+ },
+ "latency": {
+ "blurb": "latency",
+ "conditionally-available": false,
+ "construct": false,
+ "construct-only": false,
+ "controllable": true,
+ "default": "0",
+ "max": "3.40282e+38",
+ "min": "-3.40282e+38",
+ "mutable": "null",
+ "readable": true,
+ "type": "gfloat",
+ "writable": false
+ },
+ "octaves": {
+ "blurb": "Octaves",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "0",
+ "max": "2",
+ "min": "-2",
+ "mutable": "null",
+ "readable": true,
+ "type": "gint",
+ "writable": true
+ },
+ "semitones": {
+ "blurb": "Semitones",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "0",
+ "max": "12",
+ "min": "-12",
+ "mutable": "null",
+ "readable": true,
+ "type": "gint",
+ "writable": true
+ },
+ "wet-dry-mix": {
+ "blurb": "Wet-Dry Mix",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "0",
+ "max": "1",
+ "min": "0",
+ "mutable": "null",
+ "readable": true,
+ "type": "gfloat",
+ "writable": true
+ }
+ },
+ "rank": "none"
+ },
+ "ladspa-ladspa-rubberband-so-rubberband-pitchshifter-stereo": {
+ "author": "Breakfast Quay, Juan Manuel Borges Caño , Andy Wingo , Steve Baker , Erik Walthinsen , Stefan Sauer , Wim Taymans ",
+ "description": "Rubber Band Stereo Pitch Shifter",
+ "hierarchy": [
+ "ladspa-ladspa-rubberband-so-rubberband-pitchshifter-stereo",
+ "GstLADSPAFilter",
+ "GstAudioFilter",
+ "GstBaseTransform",
+ "GstElement",
+ "GstObject",
+ "GInitiallyUnowned",
+ "GObject"
+ ],
+ "klass": "Filter/Effect/Audio/LADSPA/Frequency/Pitch shifters",
+ "pad-templates": {
+ "sink": {
+ "caps": "audio/x-raw:\n format: F32LE\n channels: 2\n rate: [ 1, 2147483647 ]\n layout: interleaved\n",
+ "direction": "sink",
+ "presence": "always"
+ },
+ "src": {
+ "caps": "audio/x-raw:\n format: F32LE\n channels: 2\n rate: [ 1, 2147483647 ]\n layout: interleaved\n",
+ "direction": "src",
+ "presence": "always"
+ }
+ },
+ "properties": {
+ "cents": {
+ "blurb": "Cents",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "0",
+ "max": "100",
+ "min": "-100",
+ "mutable": "null",
+ "readable": true,
+ "type": "gfloat",
+ "writable": true
+ },
+ "crispness": {
+ "blurb": "Crispness",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "3",
+ "max": "3",
+ "min": "0",
+ "mutable": "null",
+ "readable": true,
+ "type": "gint",
+ "writable": true
+ },
+ "formant-preserving": {
+ "blurb": "Formant Preserving",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "false",
+ "mutable": "null",
+ "readable": true,
+ "type": "gboolean",
+ "writable": true
+ },
+ "latency": {
+ "blurb": "latency",
+ "conditionally-available": false,
+ "construct": false,
+ "construct-only": false,
+ "controllable": true,
+ "default": "0",
+ "max": "3.40282e+38",
+ "min": "-3.40282e+38",
+ "mutable": "null",
+ "readable": true,
+ "type": "gfloat",
+ "writable": false
+ },
+ "octaves": {
+ "blurb": "Octaves",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "0",
+ "max": "2",
+ "min": "-2",
+ "mutable": "null",
+ "readable": true,
+ "type": "gint",
+ "writable": true
+ },
+ "semitones": {
+ "blurb": "Semitones",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "0",
+ "max": "12",
+ "min": "-12",
+ "mutable": "null",
+ "readable": true,
+ "type": "gint",
+ "writable": true
+ },
+ "wet-dry-mix": {
+ "blurb": "Wet-Dry Mix",
+ "conditionally-available": false,
+ "construct": true,
+ "construct-only": false,
+ "controllable": true,
+ "default": "0",
+ "max": "1",
+ "min": "0",
+ "mutable": "null",
+ "readable": true,
+ "type": "gfloat",
+ "writable": true
+ }
+ },
+ "rank": "none"
+ },
"ladspa-sine-so-sine-faaa": {
"author": "Richard Furse (LADSPA example plugins), Juan Manuel Borges Caño , Andy Wingo , Steve Baker , Erik Walthinsen , Stefan Sauer , Wim Taymans ",
"description": "Sine Oscillator (Freq:audio, Amp:audio)",
@@ -224786,7 +225038,7 @@
"construct": false,
"construct-only": false,
"controllable": false,
- "default": "/usr/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml",
+ "default": "/usr/share/opencv4/haarcascades/haarcascade_frontalface_default.xml",
"mutable": "null",
"readable": true,
"type": "gchararray",
@@ -224855,7 +225107,7 @@
"construct": false,
"construct-only": false,
"controllable": false,
- "default": "/usr/share/OpenCV/haarcascades/haarcascade_mcs_eyepair_small.xml",
+ "default": "/usr/share/opencv4/haarcascades/haarcascade_mcs_eyepair_small.xml",
"mutable": "null",
"readable": true,
"type": "gchararray",
@@ -224935,7 +225187,7 @@
"construct": false,
"construct-only": false,
"controllable": false,
- "default": "/usr/share/OpenCV/haarcascades/haarcascade_mcs_mouth.xml",
+ "default": "/usr/share/opencv4/haarcascades/haarcascade_mcs_mouth.xml",
"mutable": "null",
"readable": true,
"type": "gchararray",
@@ -224947,7 +225199,7 @@
"construct": false,
"construct-only": false,
"controllable": false,
- "default": "/usr/share/OpenCV/haarcascades/haarcascade_mcs_nose.xml",
+ "default": "/usr/share/opencv4/haarcascades/haarcascade_mcs_nose.xml",
"mutable": "null",
"readable": true,
"type": "gchararray",
@@ -224959,7 +225211,7 @@
"construct": false,
"construct-only": false,
"controllable": false,
- "default": "/usr/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml",
+ "default": "/usr/share/opencv4/haarcascades/haarcascade_frontalface_default.xml",
"mutable": "null",
"readable": true,
"type": "gchararray",
@@ -233304,6 +233556,30 @@
}
},
"properties": {
+ "authentication": {
+ "blurb": "Authenticate a connection",
+ "conditionally-available": false,
+ "construct": false,
+ "construct-only": false,
+ "controllable": false,
+ "default": "true",
+ "mutable": "null",
+ "readable": true,
+ "type": "gboolean",
+ "writable": true
+ },
+ "auto-reconnect": {
+ "blurb": "Automatically reconnect when connection fails",
+ "conditionally-available": false,
+ "construct": false,
+ "construct-only": false,
+ "controllable": false,
+ "default": "true",
+ "mutable": "null",
+ "readable": true,
+ "type": "gboolean",
+ "writable": true
+ },
"latency": {
"blurb": "Minimum latency (milliseconds)",
"conditionally-available": false,
@@ -233386,7 +233662,7 @@
"construct": false,
"construct-only": false,
"controllable": false,
- "default": "-1",
+ "default": "1000",
"max": "2147483647",
"min": "-1",
"mutable": "ready",
@@ -233459,6 +233735,34 @@
"return-type": "void",
"when": "last"
},
+ "caller-connecting": {
+ "args": [
+ {
+ "name": "arg0",
+ "type": "GSocketAddress"
+ },
+ {
+ "name": "arg1",
+ "type": "gchararray"
+ }
+ ],
+ "return-type": "gboolean",
+ "when": "last"
+ },
+ "caller-rejected": {
+ "args": [
+ {
+ "name": "arg0",
+ "type": "GSocketAddress"
+ },
+ {
+ "name": "arg1",
+ "type": "gchararray"
+ }
+ ],
+ "return-type": "void",
+ "when": "last"
+ },
"caller-removed": {
"args": [
{
@@ -233500,6 +233804,42 @@
}
},
"properties": {
+ "authentication": {
+ "blurb": "Authenticate a connection",
+ "conditionally-available": false,
+ "construct": false,
+ "construct-only": false,
+ "controllable": false,
+ "default": "true",
+ "mutable": "null",
+ "readable": true,
+ "type": "gboolean",
+ "writable": true
+ },
+ "auto-reconnect": {
+ "blurb": "Automatically reconnect when connection fails",
+ "conditionally-available": false,
+ "construct": false,
+ "construct-only": false,
+ "controllable": false,
+ "default": "true",
+ "mutable": "null",
+ "readable": true,
+ "type": "gboolean",
+ "writable": true
+ },
+ "keep-listening": {
+ "blurb": "Toggle keep-listening for connection reuse",
+ "conditionally-available": false,
+ "construct": false,
+ "construct-only": false,
+ "controllable": false,
+ "default": "false",
+ "mutable": "null",
+ "readable": true,
+ "type": "gboolean",
+ "writable": true
+ },
"latency": {
"blurb": "Minimum latency (milliseconds)",
"conditionally-available": false,
@@ -233582,7 +233922,7 @@
"construct": false,
"construct-only": false,
"controllable": false,
- "default": "-1",
+ "default": "1000",
"max": "2147483647",
"min": "-1",
"mutable": "ready",
@@ -233655,6 +233995,34 @@
"return-type": "void",
"when": "last"
},
+ "caller-connecting": {
+ "args": [
+ {
+ "name": "arg0",
+ "type": "GSocketAddress"
+ },
+ {
+ "name": "arg1",
+ "type": "gchararray"
+ }
+ ],
+ "return-type": "gboolean",
+ "when": "last"
+ },
+ "caller-rejected": {
+ "args": [
+ {
+ "name": "arg0",
+ "type": "GSocketAddress"
+ },
+ {
+ "name": "arg1",
+ "type": "gchararray"
+ }
+ ],
+ "return-type": "void",
+ "when": "last"
+ },
"caller-removed": {
"args": [
{