diff --git a/Cargo.toml b/Cargo.toml
index 6caab4027..110be8b8a 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,3 +1,10 @@
[workspace]
-members = ["gstreamer-sys"]
+members = [
+ "gstreamer-sys",
+ "gstreamer-base-sys",
+ "gstreamer-tag-sys",
+ "gstreamer-audio-sys",
+ "gstreamer-video-sys",
+ "gstreamer-pbutils-sys",
+]
diff --git a/Gir_GstAudio.toml b/Gir_GstAudio.toml
new file mode 100644
index 000000000..134fff11f
--- /dev/null
+++ b/Gir_GstAudio.toml
@@ -0,0 +1,15 @@
+[options]
+girs_dir = "gir-files"
+library = "GstAudio"
+version = "1.0"
+min_cfg_version = "1.0"
+target_path = "gstreamer-audio-sys"
+work_mode = "sys"
+
+external_libraries = [
+ "GLib",
+ "GObject",
+ "Gst",
+ "GstBase",
+ "GstTag",
+]
diff --git a/Gir_GstBase.toml b/Gir_GstBase.toml
new file mode 100644
index 000000000..bb2d2389b
--- /dev/null
+++ b/Gir_GstBase.toml
@@ -0,0 +1,13 @@
+[options]
+girs_dir = "gir-files"
+library = "GstBase"
+version = "1.0"
+min_cfg_version = "1.0"
+target_path = "gstreamer-base-sys"
+work_mode = "sys"
+
+external_libraries = [
+ "GLib",
+ "GObject",
+ "Gst",
+]
diff --git a/Gir_GstPbutils.toml b/Gir_GstPbutils.toml
new file mode 100644
index 000000000..d354ff4c0
--- /dev/null
+++ b/Gir_GstPbutils.toml
@@ -0,0 +1,16 @@
+[options]
+girs_dir = "gir-files"
+library = "GstPbutils"
+version = "1.0"
+min_cfg_version = "1.0"
+target_path = "gstreamer-pbutils-sys"
+work_mode = "sys"
+
+external_libraries = [
+ "GLib",
+ "GObject",
+ "Gst",
+ "GstTag",
+ "GstAudio",
+ "GstVideo",
+]
diff --git a/Gir_GstTag.toml b/Gir_GstTag.toml
new file mode 100644
index 000000000..d9df5df79
--- /dev/null
+++ b/Gir_GstTag.toml
@@ -0,0 +1,14 @@
+[options]
+girs_dir = "gir-files"
+library = "GstTag"
+version = "1.0"
+min_cfg_version = "1.0"
+target_path = "gstreamer-tag-sys"
+work_mode = "sys"
+
+external_libraries = [
+ "GLib",
+ "GObject",
+ "Gst",
+ "GstBase",
+]
diff --git a/Gir_GstVideo.toml b/Gir_GstVideo.toml
new file mode 100644
index 000000000..a1142df1e
--- /dev/null
+++ b/Gir_GstVideo.toml
@@ -0,0 +1,14 @@
+[options]
+girs_dir = "gir-files"
+library = "GstVideo"
+version = "1.0"
+min_cfg_version = "1.0"
+target_path = "gstreamer-video-sys"
+work_mode = "sys"
+
+external_libraries = [
+ "GLib",
+ "GObject",
+ "Gst",
+ "GstBase",
+]
diff --git a/gir-files/GstAudio-1.0.gir b/gir-files/GstAudio-1.0.gir
new file mode 100644
index 000000000..e432547c3
--- /dev/null
+++ b/gir-files/GstAudio-1.0.gir
@@ -0,0 +1,8930 @@
+
+
+
+
+
+
+
+
+
+
+ Maximum range of allowed channels, for use in template caps strings.
+
+
+
+ #GST_TYPE_AUDIO_DITHER_METHOD, The dither method to use when
+changing bit depth.
+Default is #GST_AUDIO_DITHER_NONE.
+
+
+
+ #GST_TYPE_AUDIO_NOISE_SHAPING_METHOD, The noise shaping method to use
+to mask noise from quantization errors.
+Default is #GST_AUDIO_NOISE_SHAPING_NONE.
+
+
+
+ #G_TYPE_UINT, The quantization amount. Components will be
+quantized to multiples of this value.
+Default is 1
+
+
+
+ #GST_TYPE_AUDIO_RESAMPLER_METHOD, The resampler method to use when
+changing sample rates.
+Default is #GST_AUDIO_RESAMPLER_METHOD_BLACKMAN_NUTTALL.
+
+
+
+ Default maximum number of errors tolerated before signaling error.
+
+
+
+ The name of the templates for the sink pad.
+
+
+
+ The name of the templates for the source pad.
+
+
+
+ Standard number of channels used in consumer audio.
+
+
+
+ Standard format used in consumer audio.
+
+
+
+ Standard sampling rate used in consumer audio.
+
+
+
+ the name of the templates for the sink pad
+
+
+
+ the name of the templates for the source pad
+
+
+
+ List of all audio formats, for use in template caps strings.
+
+
+
+ Maximum range of allowed sample rates, for use in template caps strings.
+
+
+
+ G_TYPE_DOUBLE, B parameter of the cubic filter.
+Values between 0.0 and 2.0 are accepted. 1.0 is the default.
+
+Below are some values of popular filters:
+ B C
+Hermite 0.0 0.0
+Spline 1.0 0.0
+Catmull-Rom 0.0 1/2
+
+
+
+ G_TYPE_DOUBLE, C parameter of the cubic filter.
+Values between 0.0 and 2.0 are accepted. 0.0 is the default.
+
+See #GST_AUDIO_RESAMPLER_OPT_CUBIC_B for some more common values
+
+
+
+ G_TYPE_DOUBLE, Cutoff parameter for the filter. 0.940 is the default.
+
+
+
+ GST_TYPE_AUDIO_RESAMPLER_INTERPOLATION: how the filter coeficients should be
+ interpolated.
+GST_AUDIO_RESAMPLER_FILTER_INTERPOLATION_CUBIC is default.
+
+
+
+ GST_TYPE_AUDIO_RESAMPLER_FILTER_MODE: how the filter tables should be
+constructed.
+GST_AUDIO_RESAMPLER_FILTER_MODE_AUTO is the default.
+
+
+
+ G_TYPE_UINT: the amount of memory to use for full filter tables before
+switching to interpolated filter tables.
+1048576 is the default.
+
+
+
+ G_TYPE_UINT, oversampling to use when interpolating filters
+8 is the default.
+
+
+
+ G_TYPE_DOUBLE: The maximum allowed phase error when switching sample
+rates.
+0.1 is the default.
+
+
+
+ G_TYPE_INT: the number of taps to use for the filter.
+0 is the default and selects the taps automatically.
+
+
+
+
+
+
+ G_TYPE_DOUBLE, transition bandwidth. The width of the
+transition band for the kaiser window. 0.087 is the default.
+
+
+
+
+
+
+
+
+
+
+
+
+ This is the base class for audio sinks. Subclasses need to implement the
+::create_ringbuffer vmethod. This base class will then take care of
+writing samples to the ringbuffer, synchronisation, clipping and flushing.
+
+ Create and return the #GstAudioRingBuffer for @sink. This function will
+call the ::create_ringbuffer vmethod and will set @sink as the parent of
+the returned buffer (see gst_object_set_parent()).
+
+ The new ringbuffer of @sink.
+
+
+
+
+ a #GstAudioBaseSink.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Create and return the #GstAudioRingBuffer for @sink. This function will
+call the ::create_ringbuffer vmethod and will set @sink as the parent of
+the returned buffer (see gst_object_set_parent()).
+
+ The new ringbuffer of @sink.
+
+
+
+
+ a #GstAudioBaseSink.
+
+
+
+
+
+ Get the current alignment threshold, in nanoseconds, used by @sink.
+
+ The current alignment threshold used by @sink.
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+
+
+ Get the current discont wait, in nanoseconds, used by @sink.
+
+ The current discont wait used by @sink.
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+
+
+ Get the current drift tolerance, in microseconds, used by @sink.
+
+ The current drift tolerance used by @sink.
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+
+
+ Queries whether @sink will provide a clock or not. See also
+gst_audio_base_sink_set_provide_clock.
+
+ %TRUE if @sink will provide a clock.
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+
+
+ Get the current slave method used by @sink.
+
+ The current slave method used by @sink.
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+
+
+ Informs this base class that the audio output device has failed for
+some reason, causing a discontinuity (for example, because the device
+recovered from the error, but lost all contents of its ring buffer).
+This function is typically called by derived classes, and is useful
+for the custom slave method.
+
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+
+
+ Controls the sink's alignment threshold.
+
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+ the new alignment threshold in nanoseconds
+
+
+
+
+
+ Sets the custom slaving callback. This callback will
+be invoked if the slave-method property is set to
+GST_AUDIO_BASE_SINK_SLAVE_CUSTOM and the audio sink
+receives and plays samples.
+
+Setting the callback to NULL causes the sink to
+behave as if the GST_AUDIO_BASE_SINK_SLAVE_NONE
+method were used.
+
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+ a #GstAudioBaseSinkCustomSlavingCallback
+
+
+
+ user data passed to the callback
+
+
+
+ called when user_data becomes unused
+
+
+
+
+
+ Controls how long the sink will wait before creating a discontinuity.
+
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+ the new discont wait in nanoseconds
+
+
+
+
+
+ Controls the sink's drift tolerance.
+
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+ the new drift tolerance in microseconds
+
+
+
+
+
+ Controls whether @sink will provide a clock or not. If @provide is %TRUE,
+gst_element_provide_clock() will return a clock that reflects the datarate
+of @sink. If @provide is %FALSE, gst_element_provide_clock() will return
+NULL.
+
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+ new state
+
+
+
+
+
+ Controls how clock slaving will be performed in @sink.
+
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+ the new slave method
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ A window of time in nanoseconds to wait before creating a discontinuity as
+a result of breaching the drift-tolerance.
+
+
+
+ Controls the amount of time in microseconds that clocks are allowed
+to drift before resynchronisation happens.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #GstAudioBaseSink class. Override the vmethod to implement
+functionality.
+
+ the parent class.
+
+
+
+
+
+ The new ringbuffer of @sink.
+
+
+
+
+ a #GstAudioBaseSink.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This function is set with gst_audio_base_sink_set_custom_slaving_callback()
+and is called during playback. It receives the current time of external and
+internal clocks, which the callback can then use to apply any custom
+slaving/synchronization schemes.
+
+The external clock is the sink's element clock, the internal one is the
+internal audio clock. The internal audio clock's calibration is applied to
+the timestamps before they are passed to the callback. The difference between
+etime and itime is the skew; how much internal and external clock lie apart
+from each other. A skew of 0 means both clocks are perfectly in sync.
+itime > etime means the external clock is going slower, while itime < etime
+means it is going faster than the internal clock. etime and itime are always
+valid timestamps, except for when a discontinuity happens.
+
+requested_skew is an output value the callback can write to. It informs the
+sink of whether or not it should move the playout pointer, and if so, by how
+much. This pointer is only NULL if a discontinuity occurs; otherwise, it is
+safe to write to *requested_skew. The default skew is 0.
+
+The sink may experience discontinuities. If one happens, discont is TRUE,
+itime, etime are set to GST_CLOCK_TIME_NONE, and requested_skew is NULL.
+This makes it possible to reset custom clock slaving algorithms when a
+discontinuity happens.
+
+
+
+
+
+ a #GstAudioBaseSink
+
+
+
+ external clock time
+
+
+
+ internal clock time
+
+
+
+ skew amount requested by the callback
+
+
+
+ reason for discontinuity (if any)
+
+
+
+ user data
+
+
+
+
+
+ Different possible reasons for discontinuities. This enum is useful for the custom
+slave method.
+
+ No discontinuity occurred
+
+
+ New caps are set, causing renegotiotion
+
+
+ Samples have been flushed
+
+
+ Sink was synchronized to the estimated latency (occurs during initialization)
+
+
+ Aligning buffers failed because the timestamps are too discontinuous
+
+
+ Audio output device experienced and recovered from an error but introduced latency in the process (see also @gst_audio_base_sink_report_device_failure())
+
+
+
+
+
+ Different possible clock slaving algorithms used when the internal audio
+clock is not selected as the pipeline master clock.
+
+ Resample to match the master clock
+
+
+ Adjust playout pointer when master clock
+drifts too much.
+
+
+ No adjustment is done.
+
+
+ Use custom clock slaving algorithm (Since: 1.6)
+
+
+
+ This is the base class for audio sources. Subclasses need to implement the
+::create_ringbuffer vmethod. This base class will then take care of
+reading samples from the ringbuffer, synchronisation and flushing.
+
+ Create and return the #GstAudioRingBuffer for @src. This function will call
+the ::create_ringbuffer vmethod and will set @src as the parent of the
+returned buffer (see gst_object_set_parent()).
+
+ The new ringbuffer of @src.
+
+
+
+
+ a #GstAudioBaseSrc.
+
+
+
+
+
+ Create and return the #GstAudioRingBuffer for @src. This function will call
+the ::create_ringbuffer vmethod and will set @src as the parent of the
+returned buffer (see gst_object_set_parent()).
+
+ The new ringbuffer of @src.
+
+
+
+
+ a #GstAudioBaseSrc.
+
+
+
+
+
+ Queries whether @src will provide a clock or not. See also
+gst_audio_base_src_set_provide_clock.
+
+ %TRUE if @src will provide a clock.
+
+
+
+
+ a #GstAudioBaseSrc
+
+
+
+
+
+ Get the current slave method used by @src.
+
+ The current slave method used by @src.
+
+
+
+
+ a #GstAudioBaseSrc
+
+
+
+
+
+ Controls whether @src will provide a clock or not. If @provide is %TRUE,
+gst_element_provide_clock() will return a clock that reflects the datarate
+of @src. If @provide is %FALSE, gst_element_provide_clock() will return NULL.
+
+
+
+
+
+ a #GstAudioBaseSrc
+
+
+
+ new state
+
+
+
+
+
+ Controls how clock slaving will be performed in @src.
+
+
+
+
+
+ a #GstAudioBaseSrc
+
+
+
+ the new slave method
+
+
+
+
+
+ Actual configured size of audio buffer in microseconds.
+
+
+
+ Actual configured audio latency in microseconds.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #GstAudioBaseSrc class. Override the vmethod to implement
+functionality.
+
+ the parent class.
+
+
+
+
+
+ The new ringbuffer of @src.
+
+
+
+
+ a #GstAudioBaseSrc.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Different possible clock slaving algorithms when the internal audio clock was
+not selected as the pipeline clock.
+
+ Resample to match the master clock.
+
+
+ Retimestamp output buffers with master
+clock time.
+
+
+ Adjust capture pointer when master clock
+drifts too much.
+
+
+ No adjustment is done.
+
+
+
+ Provides a base class for CD digital audio (CDDA) sources, which handles
+things like seeking, querying, discid calculation, tags, and buffer
+timestamping.
+
+## Using GstAudioCdSrc-based elements in applications
+
+GstAudioCdSrc registers two #GstFormat<!-- -->s of its own, namely
+the "track" format and the "sector" format. Applications will usually
+only find the "track" format interesting. You can retrieve that #GstFormat
+for use in seek events or queries with gst_format_get_by_nick("track").
+
+In order to query the number of tracks, for example, an application would
+set the CDDA source element to READY or PAUSED state and then query the
+the number of tracks via gst_element_query_duration() using the track
+format acquired above. Applications can query the currently playing track
+in the same way.
+
+Alternatively, applications may retrieve the currently playing track and
+the total number of tracks from the taglist that will posted on the bus
+whenever the CD is opened or the currently playing track changes. The
+taglist will contain GST_TAG_TRACK_NUMBER and GST_TAG_TRACK_COUNT tags.
+
+Applications playing back CD audio using playbin and cdda://n URIs should
+issue a seek command in track format to change between tracks, rather than
+setting a new cdda://n+1 URI on playbin (as setting a new URI on playbin
+involves closing and re-opening the CD device, which is much much slower).
+
+## Tags and meta-information
+
+CDDA sources will automatically emit a number of tags, details about which
+can be found in the libgsttag documentation. Those tags are:
+#GST_TAG_CDDA_CDDB_DISCID, #GST_TAG_CDDA_CDDB_DISCID_FULL,
+#GST_TAG_CDDA_MUSICBRAINZ_DISCID, #GST_TAG_CDDA_MUSICBRAINZ_DISCID_FULL,
+among others.
+
+## Tracks and Table of Contents (TOC)
+
+Applications will be informed of the available tracks via a TOC message
+on the pipeline's #GstBus. The #GstToc will contain a #GstTocEntry for
+each track, with information about each track. The duration for each
+track can be retrieved via the #GST_TAG_DURATION tag from each entry's
+tag list, or calculated via gst_toc_entry_get_start_stop_times().
+The track entries in the TOC will be sorted by track number.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ CDDA sources use this function from their start vfunc to announce the
+available data and audio tracks to the base source class. The caller
+should allocate @track on the stack, the base source will do a shallow
+copy of the structure (and take ownership of the taglist if there is one).
+
+ FALSE on error, otherwise TRUE.
+
+
+
+
+ a #GstAudioCdSrc
+
+
+
+ address of #GstAudioCdSrcTrack to add
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Audio CD source base class.
+
+ the parent class
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Mode in which the CD audio source operates. Influences timestamping,
+EOS handling and seeking.
+
+ each single track is a stream
+
+
+ the entire disc is a single stream
+
+
+
+
+
+ CD track abstraction to communicate TOC entries to the base class.
+
+This structure is only for use by sub-classed in connection with
+gst_audio_cd_src_add_track().
+
+Applications will be informed of the available tracks via a TOC message
+on the pipeline's #GstBus instead.
+
+ Whether this is an audio track
+
+
+
+ Track number in TOC (usually starts from 1, but not always)
+
+
+
+ The first sector of this track (LBA)
+
+
+
+ The last sector of this track (LBA)
+
+
+
+ Track-specific tags (e.g. from cd-text information), or NULL
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Free memory allocated by @mix.
+
+
+
+
+
+ a #GstAudioChannelMixer
+
+
+
+
+
+ Check if @mix is in passthrough.
+
+ %TRUE is @mix is passthrough.
+
+
+
+
+ a #GstAudioChannelMixer
+
+
+
+
+
+ In case the samples are interleaved, @in and @out must point to an
+array with a single element pointing to a block of interleaved samples.
+
+If non-interleaved samples are used, @in and @out must point to an
+array with pointers to memory blocks, one for each channel.
+
+Perform channel mixing on @in_data and write the result to @out_data.
+@in_data and @out_data need to be in @format and @layout.
+
+
+
+
+
+ a #GstAudioChannelMixer
+
+
+
+ input samples
+
+
+
+ output samples
+
+
+
+ number of samples
+
+
+
+
+
+ Create a new channel mixer object for the given parameters.
+
+ a new #GstAudioChannelMixer object. Free with gst_audio_channel_mixer_free()
+after usage.
+
+
+
+
+ #GstAudioChannelMixerFlags
+
+
+
+
+
+
+ number of input channels
+
+
+
+ positions of input channels
+
+
+
+ number of output channels
+
+
+
+ positions of output channels
+
+
+
+
+
+
+ Flags passed to gst_audio_channel_mixer_new()
+
+ no flag
+
+
+ input channels are not interleaved
+
+
+ output channels are not interleaved
+
+
+ input channels are explicitly unpositioned
+
+
+ output channels are explicitly unpositioned
+
+
+
+ Audio channel positions.
+
+These are the channels defined in SMPTE 2036-2-2008
+Table 1 for 22.2 audio systems with the Surround and Wide channels from
+DTS Coherent Acoustics (v.1.3.1) and 10.2 and 7.1 layouts. In the caps the
+actual channel layout is expressed with a channel count and a channel mask,
+which describes the existing channels. The positions in the bit mask correspond
+to the enum values.
+For negotiation it is allowed to have more bits set in the channel mask than
+the number of channels to specify the allowed channel positions but this is
+not allowed in negotiated caps. It is not allowed in any situation other
+than the one mentioned below to have less bits set in the channel mask than
+the number of channels.
+
+@GST_AUDIO_CHANNEL_POSITION_MONO can only be used with a single mono channel that
+has no direction information and would be mixed into all directional channels.
+This is expressed in caps by having a single channel and no channel mask.
+
+@GST_AUDIO_CHANNEL_POSITION_NONE can only be used if all channels have this position.
+This is expressed in caps by having a channel mask with no bits set.
+
+As another special case it is allowed to have two channels without a channel mask.
+This implicitely means that this is a stereo stream with a front left and front right
+channel.
+
+ used for position-less channels, e.g.
+ from a sound card that records 1024 channels; mutually exclusive with
+ any other channel position
+
+
+ Mono without direction;
+ can only be used with 1 channel
+
+
+ invalid position
+
+
+ Front left
+
+
+ Front right
+
+
+ Front center
+
+
+ Low-frequency effects 1 (subwoofer)
+
+
+ Rear left
+
+
+ Rear right
+
+
+ Front left of center
+
+
+ Front right of center
+
+
+ Rear center
+
+
+ Low-frequency effects 2 (subwoofer)
+
+
+ Side left
+
+
+ Side right
+
+
+ Top front left
+
+
+ Top front right
+
+
+ Top front center
+
+
+ Top center
+
+
+ Top rear left
+
+
+ Top rear right
+
+
+ Top side right
+
+
+ Top rear right
+
+
+ Top rear center
+
+
+ Bottom front center
+
+
+ Bottom front left
+
+
+ Bottom front right
+
+
+ Wide left (between front left and side left)
+
+
+ Wide right (between front right and side right)
+
+
+ Surround left (between rear left and side left)
+
+
+ Surround right (between rear right and side right)
+
+
+
+ Extra buffer metadata describing how much audio has to be clipped from
+the start or end of a buffer. This is used for compressed formats, where
+the first frame usually has some additional samples due to encoder and
+decoder delays, and the last frame usually has some additional samples to
+be able to fill the complete last frame.
+
+This is used to ensure that decoded data in the end has the same amount of
+samples, and multiply decoded streams can be gaplessly concatenated.
+
+Note: If clipping of the start is done by adjusting the segment, this meta
+has to be dropped from buffers as otherwise clipping could happen twice.
+
+ parent #GstMeta
+
+
+
+ GstFormat of @start and @stop, GST_FORMAT_DEFAULT is samples
+
+
+
+ Amount of audio to clip from start of buffer
+
+
+
+ Amount of to clip from end of buffer
+
+
+
+
+
+
+
+
+
+ #GstAudioClock makes it easy for elements to implement a #GstClock, they
+simply need to provide a function that returns the current clock time.
+
+This object is internally used to implement the clock in #GstAudioBaseSink.
+
+ Create a new #GstAudioClock instance. Whenever the clock time should be
+calculated it will call @func with @user_data. When @func returns
+#GST_CLOCK_TIME_NONE, the clock will return the last reported time.
+
+ a new #GstAudioClock casted to a #GstClock.
+
+
+
+
+ the name of the clock
+
+
+
+ a function
+
+
+
+ user data
+
+
+
+ #GDestroyNotify for @user_data
+
+
+
+
+
+ Adjust @time with the internal offset of the audio clock.
+
+ @time adjusted with the internal offset.
+
+
+
+
+ a #GstAudioClock
+
+
+
+ a #GstClockTime
+
+
+
+
+
+ Report the time as returned by the #GstAudioClockGetTimeFunc without applying
+any offsets.
+
+ the time as reported by the time function of the audio clock
+
+
+
+
+ a #GstAudioClock
+
+
+
+
+
+ Invalidate the clock function. Call this function when the provided
+#GstAudioClockGetTimeFunc cannot be called anymore, for example, when the
+user_data becomes invalid.
+
+After calling this function, @clock will return the last returned time for
+the rest of its lifetime.
+
+
+
+
+
+ a #GstAudioClock
+
+
+
+
+
+ Inform @clock that future calls to #GstAudioClockGetTimeFunc will return values
+starting from @time. The clock will update an internal offset to make sure that
+future calls to internal_time will return an increasing result as required by
+the #GstClock object.
+
+
+
+
+
+ a #GstAudioClock
+
+
+
+ a #GstClockTime
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This function will be called whenever the current clock time needs to be
+calculated. If this function returns #GST_CLOCK_TIME_NONE, the last reported
+time will be returned by the clock.
+
+ the current time or #GST_CLOCK_TIME_NONE if the previous time should
+be used.
+
+
+
+
+ the #GstAudioClock
+
+
+
+ user data
+
+
+
+
+
+
+ Free a previously allocated @convert instance.
+
+
+
+
+
+ a #GstAudioConverter
+
+
+
+
+
+ Get the current configuration of @convert.
+
+ a #GstStructure that remains valid for as long as @convert is valid
+ or until gst_audio_converter_update_config() is called.
+
+
+
+
+ a #GstAudioConverter
+
+
+
+ result input rate
+
+
+
+ result output rate
+
+
+
+
+
+ Calculate how many input frames are currently needed by @convert to produce
+@out_frames of output frames.
+
+ the number of input frames
+
+
+
+
+ a #GstAudioConverter
+
+
+
+ number of output frames
+
+
+
+
+
+ Get the maximum number of input frames that the converter would
+need before producing output.
+
+ the latency of @convert as expressed in the number of
+frames.
+
+
+
+
+ a #GstAudioConverter
+
+
+
+
+
+ Calculate how many output frames can be produced when @in_frames input
+frames are given to @convert.
+
+ the number of output frames
+
+
+
+
+ a #GstAudioConverter
+
+
+
+ number of input frames
+
+
+
+
+
+ Reset @convert to the state it was when it was first created, clearing
+any history it might currently have.
+
+
+
+
+
+ a #GstAudioConverter
+
+
+
+
+
+ Perform the conversion with @in_frames in @in to @out_frames in @out
+using @convert.
+
+In case the samples are interleaved, @in and @out must point to an
+array with a single element pointing to a block of interleaved samples.
+
+If non-interleaved samples are used, @in and @out must point to an
+array with pointers to memory blocks, one for each channel.
+
+@in may be %NULL, in which case @in_frames of silence samples are processed
+by the converter.
+
+This function always produces @out_frames of output and consumes @in_frames of
+input. Use gst_audio_converter_get_out_frames() and
+gst_audio_converter_get_in_frames() to make sure @in_frames and @out_frames
+are matching and @in and @out point to enough memory.
+
+ %TRUE is the conversion could be performed.
+
+
+
+
+ a #GstAudioConverter
+
+
+
+ extra #GstAudioConverterFlags
+
+
+
+ input frames
+
+
+
+ number of input frames
+
+
+
+ output frames
+
+
+
+ number of output frames
+
+
+
+
+
+ Returns whether the audio converter can perform the conversion in-place.
+The return value would be typically input to gst_base_transform_set_in_place()
+
+ %TRUE when the conversion can be done in place.
+
+
+
+
+ a #GstAudioConverter
+
+
+
+
+
+ Set @in_rate, @out_rate and @config as extra configuration for @convert.
+
+@in_rate and @out_rate specify the new sample rates of input and output
+formats. A value of 0 leaves the sample rate unchanged.
+
+@config can be %NULL, in which case, the current configuration is not
+changed.
+
+If the parameters in @config can not be set exactly, this function returns
+%FALSE and will try to update as much state as possible. The new state can
+then be retrieved and refined with gst_audio_converter_get_config().
+
+Look at the #GST_AUDIO_CONVERTER_OPT_* fields to check valid configuration
+option and values.
+
+ %TRUE when the new parameters could be set
+
+
+
+
+ a #GstAudioConverter
+
+
+
+ input rate
+
+
+
+ output rate
+
+
+
+ a #GstStructure or %NULL
+
+
+
+
+
+ Create a new #GstAudioConverter that is able to convert between @in and @out
+audio formats.
+
+@config contains extra configuration options, see #GST_VIDEO_CONVERTER_OPT_*
+parameters for details about the options and values.
+
+ a #GstAudioConverter or %NULL if conversion is not possible.
+
+
+
+
+ extra #GstAudioConverterFlags
+
+
+
+ a source #GstAudioInfo
+
+
+
+ a destination #GstAudioInfo
+
+
+
+ a #GstStructure with configuration options
+
+
+
+
+
+
+ Extra flags passed to gst_audio_converter_new() and gst_audio_converter_samples().
+
+ no flag
+
+
+ the input sample arrays are writable and can be
+ used as temporary storage during conversion.
+
+
+ allow arbitrary rate updates with
+ gst_audio_converter_update_config().
+
+
+
+ This base class is for audio decoders turning encoded data into
+raw audio samples.
+
+GstAudioDecoder and subclass should cooperate as follows.
+
+## Configuration
+
+ * Initially, GstAudioDecoder calls @start when the decoder element
+ is activated, which allows subclass to perform any global setup.
+ Base class (context) parameters can already be set according to subclass
+ capabilities (or possibly upon receive more information in subsequent
+ @set_format).
+ * GstAudioDecoder calls @set_format to inform subclass of the format
+ of input audio data that it is about to receive.
+ While unlikely, it might be called more than once, if changing input
+ parameters require reconfiguration.
+ * GstAudioDecoder calls @stop at end of all processing.
+
+As of configuration stage, and throughout processing, GstAudioDecoder
+provides various (context) parameters, e.g. describing the format of
+output audio data (valid when output caps have been set) or current parsing state.
+Conversely, subclass can and should configure context to inform
+base class of its expectation w.r.t. buffer handling.
+
+## Data processing
+ * Base class gathers input data, and optionally allows subclass
+ to parse this into subsequently manageable (as defined by subclass)
+ chunks. Such chunks are subsequently referred to as 'frames',
+ though they may or may not correspond to 1 (or more) audio format frame.
+ * Input frame is provided to subclass' @handle_frame.
+ * If codec processing results in decoded data, subclass should call
+ @gst_audio_decoder_finish_frame to have decoded data pushed
+ downstream.
+ * Just prior to actually pushing a buffer downstream,
+ it is passed to @pre_push. Subclass should either use this callback
+ to arrange for additional downstream pushing or otherwise ensure such
+ custom pushing occurs after at least a method call has finished since
+ setting src pad caps.
+ * During the parsing process GstAudioDecoderClass will handle both
+ srcpad and sinkpad events. Sink events will be passed to subclass
+ if @event callback has been provided.
+
+## Shutdown phase
+
+ * GstAudioDecoder class calls @stop to inform the subclass that data
+ parsing will be stopped.
+
+Subclass is responsible for providing pad template caps for
+source and sink pads. The pads need to be named "sink" and "src". It also
+needs to set the fixed caps on srcpad, when the format is ensured. This
+is typically when base class calls subclass' @set_format function, though
+it might be delayed until calling @gst_audio_decoder_finish_frame.
+
+In summary, above process should have subclass concentrating on
+codec data processing while leaving other matters to base class,
+such as most notably timestamp handling. While it may exert more control
+in this area (see e.g. @pre_push), it is very much not recommended.
+
+In particular, base class will try to arrange for perfect output timestamps
+as much as possible while tracking upstream timestamps.
+To this end, if deviation between the next ideal expected perfect timestamp
+and upstream exceeds #GstAudioDecoder:tolerance, then resync to upstream
+occurs (which would happen always if the tolerance mechanism is disabled).
+
+In non-live pipelines, baseclass can also (configurably) arrange for
+output buffer aggregation which may help to redue large(r) numbers of
+small(er) buffers being pushed and processed downstream.
+
+On the other hand, it should be noted that baseclass only provides limited
+seeking support (upon explicit subclass request), as full-fledged support
+should rather be left to upstream demuxer, parser or alike. This simple
+approach caters for seeking and duration reporting using estimated input
+bitrates.
+
+Things that subclass need to take care of:
+
+ * Provide pad templates
+ * Set source pad caps when appropriate
+ * Set user-configurable properties to sane defaults for format and
+ implementing codec at hand, and convey some subclass capabilities and
+ expectations in context.
+
+ * Accept data in @handle_frame and provide encoded results to
+ @gst_audio_decoder_finish_frame. If it is prepared to perform
+ PLC, it should also accept NULL data in @handle_frame and provide for
+ data for indicated duration.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Negotiate with downstream elements to currently configured #GstAudioInfo.
+Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
+negotiate fails.
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Helper function that allocates a buffer to hold an audio frame
+for @dec's current output format.
+
+ allocated buffer
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ size of the buffer
+
+
+
+
+
+ Collects decoded data and pushes it downstream.
+
+@buf may be NULL in which case the indicated number of frames
+are discarded and considered to have produced no output
+(e.g. lead-in or setup frames).
+Otherwise, source pad caps must be set when it is called with valid
+data in @buf.
+
+Note that a frame received in gst_audio_decoder_handle_frame() may be
+invalidated by a call to this function.
+
+ a #GstFlowReturn that should be escalated to caller (of caller)
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ decoded data
+
+
+
+ number of decoded frames represented by decoded data
+
+
+
+
+
+ Lets #GstAudioDecoder sub-classes to know the memory @allocator
+used by the base class and its @params.
+
+Unref the @allocator after use it.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ the #GstAllocator
+used
+
+
+
+ the
+#GstAllocatorParams of @allocator
+
+
+
+
+
+
+ a #GstAudioInfo describing the input audio format
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+
+ currently configured decoder delay
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+ Queries decoder drain handling.
+
+ TRUE if drainable handling is enabled.
+
+MT safe.
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+
+ currently configured byte to time conversion setting
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+ Sets the variables pointed to by @min and @max to the currently configured
+latency.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ a pointer to storage to hold minimum latency
+
+
+
+ a pointer to storage to hold maximum latency
+
+
+
+
+
+
+ currently configured decoder tolerated error count.
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+ Queries decoder's latency aggregation.
+
+ aggregation latency.
+
+MT safe.
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+ Queries decoder required format handling.
+
+ TRUE if required format handling is enabled.
+
+MT safe.
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+ Return current parsing (sync and eos) state.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ a pointer to a variable to hold the current sync state
+
+
+
+ a pointer to a variable to hold the current eos state
+
+
+
+
+
+ Queries decoder packet loss concealment handling.
+
+ TRUE if packet loss concealment is enabled.
+
+MT safe.
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+
+ currently configured plc handling
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+ Queries current audio jitter tolerance threshold.
+
+ decoder audio jitter tolerance threshold.
+
+MT safe.
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+ Sets the audio decoder tags and how they should be merged with any
+upstream stream tags. This will override any tags previously-set
+with gst_audio_decoder_merge_tags().
+
+Note that this is provided for convenience, and the subclass is
+not required to use this and can still do tag handling on its own.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ a #GstTagList to merge, or NULL
+
+
+
+ the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE
+
+
+
+
+
+ Negotiate with downstream elements to currently configured #GstAudioInfo.
+Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
+negotiate fails.
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+ Returns caps that express @caps (or sink template caps if @caps == NULL)
+restricted to rate/channels/... combinations supported by downstream
+elements.
+
+ a #GstCaps owned by caller
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ initial caps
+
+
+
+ filter caps
+
+
+
+
+
+ Sets a caps in allocation query which are different from the set
+pad's caps. Use this function before calling
+gst_audio_decoder_negotiate(). Setting to %NULL the allocation
+query will use the caps from the pad.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ a #GstCaps or %NULL
+
+
+
+
+
+ Configures decoder drain handling. If drainable, subclass might
+be handed a NULL buffer to have it return any leftover decoded data.
+Otherwise, it is not considered so capable and will only ever be passed
+real data.
+
+MT safe.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ new state
+
+
+
+
+
+ Allows baseclass to perform byte to time estimated conversion.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ whether to enable byte to time conversion
+
+
+
+
+
+ Sets decoder latency.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ minimum latency
+
+
+
+ maximum latency
+
+
+
+
+
+ Sets numbers of tolerated decoder errors, where a tolerated one is then only
+warned about, but more than tolerated will lead to fatal error. You can set
+-1 for never returning fatal errors. Default is set to
+GST_AUDIO_DECODER_MAX_ERRORS.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ max tolerated errors
+
+
+
+
+
+ Sets decoder minimum aggregation latency.
+
+MT safe.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ new minimum latency
+
+
+
+
+
+ Configures decoder format needs. If enabled, subclass needs to be
+negotiated with format caps before it can process any data. It will then
+never be handed any data before it has been configured.
+Otherwise, it might be handed data without having been configured and
+is then expected being able to do so either by default
+or based on the input data.
+
+MT safe.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ new state
+
+
+
+
+
+ Configure output info on the srcpad of @dec.
+
+ %TRUE on success.
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ #GstAudioInfo
+
+
+
+
+
+ Enable or disable decoder packet loss concealment, provided subclass
+and codec are capable and allow handling plc.
+
+MT safe.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ new state
+
+
+
+
+
+ Indicates whether or not subclass handles packet loss concealment (plc).
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ new plc state
+
+
+
+
+
+ Configures decoder audio jitter tolerance threshold.
+
+MT safe.
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ new tolerance
+
+
+
+
+
+ Lets #GstAudioDecoder sub-classes decide if they want the sink pad
+to use the default pad query handler to reply to accept-caps queries.
+
+By setting this to true it is possible to further customize the default
+handler with %GST_PAD_SET_ACCEPT_INTERSECT and
+%GST_PAD_SET_ACCEPT_TEMPLATE
+
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+ if the default pad accept-caps query handling should be used
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Subclasses can override any of the available virtual methods or not, as
+needed. At minimum @handle_frame (and likely @set_format) needs to be
+overridden.
+
+ The parent class structure
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstAudioDecoder
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Set of available dithering methods.
+
+ No dithering
+
+
+ Rectangular dithering
+
+
+ Triangular dithering (default)
+
+
+ High frequency triangular dithering
+
+
+
+ Extra buffer metadata describing audio downmixing matrix. This metadata is
+attached to audio buffers and contains a matrix to downmix the buffer number
+of channels to @channels.
+
+@matrix is an two-dimensional array of @to_channels times @from_channels
+coefficients, i.e. the i-th output channels is constructed by multiplicating
+the input channels with the coefficients in @matrix[i] and taking the sum
+of the results.
+
+ parent #GstMeta
+
+
+
+ the channel positions of the source
+
+
+
+ the channel positions of the destination
+
+
+
+ the number of channels of the source
+
+
+
+ the number of channels of the destination
+
+
+
+ the matrix coefficients.
+
+
+
+
+
+
+
+
+
+ This base class is for audio encoders turning raw audio samples into
+encoded audio data.
+
+GstAudioEncoder and subclass should cooperate as follows.
+
+## Configuration
+
+ * Initially, GstAudioEncoder calls @start when the encoder element
+ is activated, which allows subclass to perform any global setup.
+
+ * GstAudioEncoder calls @set_format to inform subclass of the format
+ of input audio data that it is about to receive. Subclass should
+ setup for encoding and configure various base class parameters
+ appropriately, notably those directing desired input data handling.
+ While unlikely, it might be called more than once, if changing input
+ parameters require reconfiguration.
+
+ * GstAudioEncoder calls @stop at end of all processing.
+
+As of configuration stage, and throughout processing, GstAudioEncoder
+maintains various parameters that provide required context,
+e.g. describing the format of input audio data.
+Conversely, subclass can and should configure these context parameters
+to inform base class of its expectation w.r.t. buffer handling.
+
+## Data processing
+
+ * Base class gathers input sample data (as directed by the context's
+ frame_samples and frame_max) and provides this to subclass' @handle_frame.
+ * If codec processing results in encoded data, subclass should call
+ gst_audio_encoder_finish_frame() to have encoded data pushed
+ downstream. Alternatively, it might also call
+ gst_audio_encoder_finish_frame() (with a NULL buffer and some number of
+ dropped samples) to indicate dropped (non-encoded) samples.
+ * Just prior to actually pushing a buffer downstream,
+ it is passed to @pre_push.
+ * During the parsing process GstAudioEncoderClass will handle both
+ srcpad and sinkpad events. Sink events will be passed to subclass
+ if @event callback has been provided.
+
+## Shutdown phase
+
+ * GstAudioEncoder class calls @stop to inform the subclass that data
+ parsing will be stopped.
+
+Subclass is responsible for providing pad template caps for
+source and sink pads. The pads need to be named "sink" and "src". It also
+needs to set the fixed caps on srcpad, when the format is ensured. This
+is typically when base class calls subclass' @set_format function, though
+it might be delayed until calling @gst_audio_encoder_finish_frame.
+
+In summary, above process should have subclass concentrating on
+codec data processing while leaving other matters to base class,
+such as most notably timestamp handling. While it may exert more control
+in this area (see e.g. @pre_push), it is very much not recommended.
+
+In particular, base class will either favor tracking upstream timestamps
+(at the possible expense of jitter) or aim to arrange for a perfect stream of
+output timestamps, depending on #GstAudioEncoder:perfect-timestamp.
+However, in the latter case, the input may not be so perfect or ideal, which
+is handled as follows. An input timestamp is compared with the expected
+timestamp as dictated by input sample stream and if the deviation is less
+than #GstAudioEncoder:tolerance, the deviation is discarded.
+Otherwise, it is considered a discontuinity and subsequent output timestamp
+is resynced to the new position after performing configured discontinuity
+processing. In the non-perfect-timestamp case, an upstream variation
+exceeding tolerance only leads to marking DISCONT on subsequent outgoing
+(while timestamps are adjusted to upstream regardless of variation).
+While DISCONT is also marked in the perfect-timestamp case, this one
+optionally (see #GstAudioEncoder:hard-resync)
+performs some additional steps, such as clipping of (early) input samples
+or draining all currently remaining input data, depending on the direction
+of the discontuinity.
+
+If perfect timestamps are arranged, it is also possible to request baseclass
+(usually set by subclass) to provide additional buffer metadata (in OFFSET
+and OFFSET_END) fields according to granule defined semantics currently
+needed by oggmux. Specifically, OFFSET is set to granulepos (= sample count
+including buffer) and OFFSET_END to corresponding timestamp (as determined
+by same sample count and sample rate).
+
+Things that subclass need to take care of:
+
+ * Provide pad templates
+ * Set source pad caps when appropriate
+ * Inform base class of buffer processing needs using context's
+ frame_samples and frame_bytes.
+ * Set user-configurable properties to sane defaults for format and
+ implementing codec at hand, e.g. those controlling timestamp behaviour
+ and discontinuity processing.
+ * Accept data in @handle_frame and provide encoded results to
+ gst_audio_encoder_finish_frame().
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Negotiate with downstream elements to currently configured #GstCaps.
+Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
+negotiate fails.
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Helper function that allocates a buffer to hold an encoded audio frame
+for @enc's current output format.
+
+ allocated buffer
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ size of the buffer
+
+
+
+
+
+ Collects encoded data and pushes encoded data downstream.
+Source pad caps must be set when this is called.
+
+If @samples < 0, then best estimate is all samples provided to encoder
+(subclass) so far. @buf may be NULL, in which case next number of @samples
+are considered discarded, e.g. as a result of discontinuous transmission,
+and a discontinuity is marked.
+
+Note that samples received in gst_audio_encoder_handle_frame()
+may be invalidated by a call to this function.
+
+ a #GstFlowReturn that should be escalated to caller (of caller)
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ encoded data
+
+
+
+ number of samples (per channel) represented by encoded data
+
+
+
+
+
+ Lets #GstAudioEncoder sub-classes to know the memory @allocator
+used by the base class and its @params.
+
+Unref the @allocator after use it.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ the #GstAllocator
+used
+
+
+
+ the
+#GstAllocatorParams of @allocator
+
+
+
+
+
+
+ a #GstAudioInfo describing the input audio format
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+ Queries encoder drain handling.
+
+ TRUE if drainable handling is enabled.
+
+MT safe.
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+
+ currently configured maximum handled frames
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+
+ currently maximum requested samples per frame
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+
+ currently minimum requested samples per frame
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+ Queries encoder hard minimum handling.
+
+ TRUE if hard minimum handling is enabled.
+
+MT safe.
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sets the variables pointed to by @min and @max to the currently configured
+latency.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ a pointer to storage to hold minimum latency
+
+
+
+ a pointer to storage to hold maximum latency
+
+
+
+
+
+
+ currently configured encoder lookahead
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+ Queries if the encoder will handle granule marking.
+
+ TRUE if granule marking is enabled.
+
+MT safe.
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+ Queries encoder perfect timestamp behaviour.
+
+ TRUE if perfect timestamp setting enabled.
+
+MT safe.
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+ Queries current audio jitter tolerance threshold.
+
+ encoder audio jitter tolerance threshold.
+
+MT safe.
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+ Sets the audio encoder tags and how they should be merged with any
+upstream stream tags. This will override any tags previously-set
+with gst_audio_encoder_merge_tags().
+
+Note that this is provided for convenience, and the subclass is
+not required to use this and can still do tag handling on its own.
+
+MT safe.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ a #GstTagList to merge, or NULL to unset
+ previously-set tags
+
+
+
+ the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE
+
+
+
+
+
+ Negotiate with downstream elements to currently configured #GstCaps.
+Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
+negotiate fails.
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+ Returns caps that express @caps (or sink template caps if @caps == NULL)
+restricted to channel/rate combinations supported by downstream elements
+(e.g. muxers).
+
+ a #GstCaps owned by caller
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ initial caps
+
+
+
+ filter caps
+
+
+
+
+
+ Sets a caps in allocation query which are different from the set
+pad's caps. Use this function before calling
+gst_audio_encoder_negotiate(). Setting to %NULL the allocation
+query will use the caps from the pad.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ a #GstCaps or %NULL
+
+
+
+
+
+ Configures encoder drain handling. If drainable, subclass might
+be handed a NULL buffer to have it return any leftover encoded data.
+Otherwise, it is not considered so capable and will only ever be passed
+real data.
+
+MT safe.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ new state
+
+
+
+
+
+ Sets max number of frames accepted at once (assumed minimally 1).
+Requires @frame_samples_min and @frame_samples_max to be the equal.
+
+Note: This value will be reset to 0 every time before
+GstAudioEncoder::set_format() is called.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ number of frames
+
+
+
+
+
+ Sets number of samples (per channel) subclass needs to be handed,
+at most or will be handed all available if 0.
+
+If an exact number of samples is required, gst_audio_encoder_set_frame_samples_min()
+must be called with the same number.
+
+Note: This value will be reset to 0 every time before
+GstAudioEncoder::set_format() is called.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ number of samples per frame
+
+
+
+
+
+ Sets number of samples (per channel) subclass needs to be handed,
+at least or will be handed all available if 0.
+
+If an exact number of samples is required, gst_audio_encoder_set_frame_samples_max()
+must be called with the same number.
+
+Note: This value will be reset to 0 every time before
+GstAudioEncoder::set_format() is called.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ number of samples per frame
+
+
+
+
+
+ Configures encoder hard minimum handling. If enabled, subclass
+will never be handed less samples than it configured, which otherwise
+might occur near end-of-data handling. Instead, the leftover samples
+will simply be discarded.
+
+MT safe.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ new state
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Set the codec headers to be sent downstream whenever requested.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ a list of
+ #GstBuffer containing the codec header
+
+
+
+
+
+
+
+ Sets encoder latency.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ minimum latency
+
+
+
+ maximum latency
+
+
+
+
+
+ Sets encoder lookahead (in units of input rate samples)
+
+Note: This value will be reset to 0 every time before
+GstAudioEncoder::set_format() is called.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ lookahead
+
+
+
+
+
+ Enable or disable encoder granule handling.
+
+MT safe.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ new state
+
+
+
+
+
+ Configure output caps on the srcpad of @enc.
+
+ %TRUE on success.
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ #GstCaps
+
+
+
+
+
+ Enable or disable encoder perfect output timestamp preference.
+
+MT safe.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ new state
+
+
+
+
+
+ Configures encoder audio jitter tolerance threshold.
+
+MT safe.
+
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+ new tolerance
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Subclasses can override any of the available virtual methods or not, as
+needed. At minimum @set_format and @handle_frame needs to be overridden.
+
+ The parent class structure
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstAudioEncoder
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #GstAudioFilter is a #GstBaseTransform<!-- -->-derived base class for simple audio
+filters, ie. those that output the same format that they get as input.
+
+#GstAudioFilter will parse the input format for you (with error checking)
+before calling your setup function. Also, elements deriving from
+#GstAudioFilter may use gst_audio_filter_class_add_pad_templates() from
+their class_init function to easily configure the set of caps/formats that
+the element is able to handle.
+
+Derived classes should override the #GstAudioFilterClass.setup() and
+#GstBaseTransformClass.transform_ip() and/or
+#GstBaseTransformClass.transform()
+virtual functions in their class_init function.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ In addition to the @setup virtual function, you should also override the
+GstBaseTransform::transform and/or GstBaseTransform::transform_ip virtual
+function.
+
+ parent class
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Convenience function to add pad templates to this element class, with
+@allowed_caps as the caps that can be handled.
+
+This function is usually used from within a GObject class_init function.
+
+
+
+
+
+ an #GstAudioFilterClass
+
+
+
+ what formats the filter can handle, as #GstCaps
+
+
+
+
+
+
+ Extra audio flags
+
+ no valid flag
+
+
+ the position array explicitly
+ contains unpositioned channels.
+
+
+
+ Enum value describing the most common audio formats.
+
+ unknown or unset audio format
+
+
+ encoded audio format
+
+
+ 8 bits in 8 bits, signed
+
+
+ 8 bits in 8 bits, unsigned
+
+
+ 16 bits in 16 bits, signed, little endian
+
+
+ 16 bits in 16 bits, signed, big endian
+
+
+ 16 bits in 16 bits, unsigned, little endian
+
+
+ 16 bits in 16 bits, unsigned, big endian
+
+
+ 24 bits in 32 bits, signed, little endian
+
+
+ 24 bits in 32 bits, signed, big endian
+
+
+ 24 bits in 32 bits, unsigned, little endian
+
+
+ 24 bits in 32 bits, unsigned, big endian
+
+
+ 32 bits in 32 bits, signed, little endian
+
+
+ 32 bits in 32 bits, signed, big endian
+
+
+ 32 bits in 32 bits, unsigned, little endian
+
+
+ 32 bits in 32 bits, unsigned, big endian
+
+
+ 24 bits in 24 bits, signed, little endian
+
+
+ 24 bits in 24 bits, signed, big endian
+
+
+ 24 bits in 24 bits, unsigned, little endian
+
+
+ 24 bits in 24 bits, unsigned, big endian
+
+
+ 20 bits in 24 bits, signed, little endian
+
+
+ 20 bits in 24 bits, signed, big endian
+
+
+ 20 bits in 24 bits, unsigned, little endian
+
+
+ 20 bits in 24 bits, unsigned, big endian
+
+
+ 18 bits in 24 bits, signed, little endian
+
+
+ 18 bits in 24 bits, signed, big endian
+
+
+ 18 bits in 24 bits, unsigned, little endian
+
+
+ 18 bits in 24 bits, unsigned, big endian
+
+
+ 32-bit floating point samples, little endian
+
+
+ 32-bit floating point samples, big endian
+
+
+ 64-bit floating point samples, little endian
+
+
+ 64-bit floating point samples, big endian
+
+
+ 16 bits in 16 bits, signed, native endianness
+
+
+ 16 bits in 16 bits, unsigned, native endianness
+
+
+ 24 bits in 32 bits, signed, native endianness
+
+
+ 24 bits in 32 bits, unsigned, native endianness
+
+
+ 32 bits in 32 bits, signed, native endianness
+
+
+ 32 bits in 32 bits, unsigned, native endianness
+
+
+ 24 bits in 24 bits, signed, native endianness
+
+
+ 24 bits in 24 bits, unsigned, native endianness
+
+
+ 20 bits in 24 bits, signed, native endianness
+
+
+ 20 bits in 24 bits, unsigned, native endianness
+
+
+ 18 bits in 24 bits, signed, native endianness
+
+
+ 18 bits in 24 bits, unsigned, native endianness
+
+
+ 32-bit floating point samples, native endianness
+
+
+ 64-bit floating point samples, native endianness
+
+
+ Construct a #GstAudioFormat with given parameters.
+
+ a #GstAudioFormat or GST_AUDIO_FORMAT_UNKNOWN when no audio format
+exists with the given parameters.
+
+
+
+
+ signed or unsigned format
+
+
+
+ G_LITTLE_ENDIAN or G_BIG_ENDIAN
+
+
+
+ amount of bits used per sample
+
+
+
+ amount of used bits in @width
+
+
+
+
+
+ Fill @length bytes in @dest with silence samples for @info.
+
+
+
+
+
+ a #GstAudioFormatInfo
+
+
+
+ a destination
+ to fill
+
+
+
+
+
+ the length to fill
+
+
+
+
+
+ Convert the @format string to its #GstAudioFormat.
+
+ the #GstAudioFormat for @format or GST_AUDIO_FORMAT_UNKNOWN when the
+string is not a known format.
+
+
+
+
+ a format string
+
+
+
+
+
+ Get the #GstAudioFormatInfo for @format
+
+ The #GstAudioFormatInfo for @format.
+
+
+
+
+ a #GstAudioFormat
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ The different audio flags that a format info can have.
+
+ integer samples
+
+
+ float samples
+
+
+ signed samples
+
+
+ complex layout
+
+
+ the format can be used in
+#GstAudioFormatUnpack and #GstAudioFormatPack functions
+
+
+
+ Information for an audio format.
+
+ #GstAudioFormat
+
+
+
+ string representation of the format
+
+
+
+ user readable description of the format
+
+
+
+ #GstAudioFormatFlags
+
+
+
+ the endianness
+
+
+
+ amount of bits used for one sample
+
+
+
+ amount of valid bits in @width
+
+
+
+ @width/8 bytes with 1 silent sample
+
+
+
+
+
+ the format of the unpacked samples
+
+
+
+ function to unpack samples
+
+
+
+ function to pack samples
+
+
+
+
+
+
+
+
+
+ Packs @length samples from @src to the data array in format @info.
+The samples from source have each channel interleaved
+and will be packed into @data.
+
+
+
+
+
+ a #GstAudioFormatInfo
+
+
+
+
+
+
+ a source array
+
+
+
+
+
+ pointer to the destination
+ data
+
+
+
+
+
+ the amount of samples to pack.
+
+
+
+
+
+ Unpacks @length samples from the given data of format @info.
+The samples will be unpacked into @dest which each channel
+interleaved. @dest should at least be big enough to hold @length *
+channels * size(unpack_format) bytes.
+
+
+
+
+
+ a #GstAudioFormatInfo
+
+
+
+
+
+
+ a destination array
+
+
+
+
+
+ pointer to the audio data
+
+
+
+
+
+ the amount of samples to unpack.
+
+
+
+
+
+ Information describing audio properties. This information can be filled
+in from GstCaps with gst_audio_info_from_caps().
+
+Use the provided macros to access the info in this structure.
+
+ the format info of the audio
+
+
+
+ additional audio flags
+
+
+
+ audio layout
+
+
+
+ the audio sample rate
+
+
+
+ the number of channels
+
+
+
+ the number of bytes for one frame, this is the size of one
+ sample * @channels
+
+
+
+ the positions for each channel
+
+
+
+
+
+
+
+
+
+
+ Allocate a new #GstAudioInfo that is also initialized with
+gst_audio_info_init().
+
+ a new #GstAudioInfo. free with gst_audio_info_free().
+
+
+
+
+ Converts among various #GstFormat types. This function handles
+GST_FORMAT_BYTES, GST_FORMAT_TIME, and GST_FORMAT_DEFAULT. For
+raw audio, GST_FORMAT_DEFAULT corresponds to audio frames. This
+function can be used to handle pad queries of the type GST_QUERY_CONVERT.
+
+ TRUE if the conversion was successful.
+
+
+
+
+ a #GstAudioInfo
+
+
+
+ #GstFormat of the @src_val
+
+
+
+ value to convert
+
+
+
+ #GstFormat of the @dest_val
+
+
+
+ pointer to destination value
+
+
+
+
+
+ Copy a GstAudioInfo structure.
+
+ a new #GstAudioInfo. free with gst_audio_info_free.
+
+
+
+
+ a #GstAudioInfo
+
+
+
+
+
+ Free a GstAudioInfo structure previously allocated with gst_audio_info_new()
+or gst_audio_info_copy().
+
+
+
+
+
+ a #GstAudioInfo
+
+
+
+
+
+ Parse @caps and update @info.
+
+ TRUE if @caps could be parsed
+
+
+
+
+ a #GstAudioInfo
+
+
+
+ a #GstCaps
+
+
+
+
+
+ Initialize @info with default values.
+
+
+
+
+
+ a #GstAudioInfo
+
+
+
+
+
+ Compares two #GstAudioInfo and returns whether they are equal or not
+
+ %TRUE if @info and @other are equal, else %FALSE.
+
+
+
+
+ a #GstAudioInfo
+
+
+
+ a #GstAudioInfo
+
+
+
+
+
+ Set the default info for the audio info of @format and @rate and @channels.
+
+Note: This initializes @info first, no values are preserved.
+
+
+
+
+
+ a #GstAudioInfo
+
+
+
+ the format
+
+
+
+ the samplerate
+
+
+
+ the number of channels
+
+
+
+ the channel positions
+
+
+
+
+
+ Convert the values of @info into a #GstCaps.
+
+ the new #GstCaps containing the
+ info of @info.
+
+
+
+
+ a #GstAudioInfo
+
+
+
+
+
+
+ Layout of the audio samples for the different channels.
+
+ interleaved audio
+
+
+ non-interleaved audio
+
+
+
+ Set of available noise shaping methods
+
+ No noise shaping (default)
+
+
+ Error feedback
+
+
+ Simple 2-pole noise shaping
+
+
+ Medium 5-pole noise shaping
+
+
+ High 8-pole noise shaping
+
+
+
+ The different flags that can be used when packing and unpacking.
+
+ No flag
+
+
+ When the source has a smaller depth
+ than the target format, set the least significant bits of the target
+ to 0. This is likely sightly faster but less accurate. When this flag
+ is not specified, the most significant bits of the source are duplicated
+ in the least significant bits of the destination.
+
+
+
+
+ Free a #GstAudioQuantize.
+
+
+
+
+
+ a #GstAudioQuantize
+
+
+
+
+
+ Reset @quant to the state is was when created, clearing any
+history it might have.
+
+
+
+
+
+ a #GstAudioQuantize
+
+
+
+
+
+ Perform quantization on @samples in @in and write the result to @out.
+
+In case the samples are interleaved, @in and @out must point to an
+array with a single element pointing to a block of interleaved samples.
+
+If non-interleaved samples are used, @in and @out must point to an
+array with pointers to memory blocks, one for each channel.
+
+@in and @out may point to the same memory location, in which case samples will be
+modified in-place.
+
+
+
+
+
+ a #GstAudioQuantize
+
+
+
+ input samples
+
+
+
+ output samples
+
+
+
+ number of samples
+
+
+
+
+
+ Create a new quantizer object with the given parameters.
+
+Output samples will be quantized to a multiple of @quantizer. Better
+performance is achieved when @quantizer is a power of 2.
+
+Dithering and noise-shaping can be performed during quantization with
+the @dither and @ns parameters.
+
+ a new #GstAudioQuantize. Free with gst_audio_quantize_free().
+
+
+
+
+ a #GstAudioDitherMethod
+
+
+
+ a #GstAudioNoiseShapingMethod
+
+
+
+ #GstAudioQuantizeFlags
+
+
+
+ the #GstAudioFormat of the samples
+
+
+
+ the amount of channels in the samples
+
+
+
+ the quantizer to use
+
+
+
+
+
+
+ Extra flags that can be passed to gst_audio_quantize_new()
+
+ no flags
+
+
+ samples are non-interleaved
+
+
+
+ #GstAudioResampler is a structure which holds the information
+required to perform various kinds of resampling filtering.
+
+ Free a previously allocated #GstAudioResampler @resampler.
+
+
+
+
+
+ a #GstAudioResampler
+
+
+
+
+
+ Get the number of input frames that would currently be needed
+to produce @out_frames from @resampler.
+
+ The number of input frames needed for producing
+@out_frames of data from @resampler.
+
+
+
+
+ a #GstAudioResampler
+
+
+
+ number of input frames
+
+
+
+
+
+ Get the maximum number of input samples that the resampler would
+need before producing output.
+
+ the latency of @resampler as expressed in the number of
+frames.
+
+
+
+
+ a #GstAudioResampler
+
+
+
+
+
+ Get the number of output frames that would be currently available when
+@in_frames are given to @resampler.
+
+ The number of frames that would be availabe after giving
+@in_frames as input to @resampler.
+
+
+
+
+ a #GstAudioResampler
+
+
+
+ number of input frames
+
+
+
+
+
+ Perform resampling on @in_frames frames in @in and write @out_frames to @out.
+
+In case the samples are interleaved, @in and @out must point to an
+array with a single element pointing to a block of interleaved samples.
+
+If non-interleaved samples are used, @in and @out must point to an
+array with pointers to memory blocks, one for each channel.
+
+@in may be %NULL, in which case @in_frames of silence samples are pushed
+into the resampler.
+
+This function always produces @out_frames of output and consumes @in_frames of
+input. Use gst_audio_resampler_get_out_frames() and
+gst_audio_resampler_get_in_frames() to make sure @in_frames and @out_frames
+are matching and @in and @out point to enough memory.
+
+
+
+
+
+ a #GstAudioResampler
+
+
+
+ input samples
+
+
+
+ number of input frames
+
+
+
+ output samples
+
+
+
+ number of output frames
+
+
+
+
+
+ Reset @resampler to the state it was when it was first created, discarding
+all sample history.
+
+
+
+
+
+ a #GstAudioResampler
+
+
+
+
+
+ Update the resampler parameters for @resampler. This function should
+not be called concurrently with any other function on @resampler.
+
+When @in_rate or @out_rate is 0, its value is unchanged.
+
+When @options is %NULL, the previously configured options are reused.
+
+ %TRUE if the new parameters could be set
+
+
+
+
+ a #GstAudioResampler
+
+
+
+ new input rate
+
+
+
+ new output rate
+
+
+
+ new options or %NULL
+
+
+
+
+
+ Make a new resampler.
+
+ %TRUE on success
+
+
+
+
+ a #GstAudioResamplerMethod
+
+
+
+ #GstAudioResamplerFlags
+
+
+
+
+
+
+
+
+
+ input rate
+
+
+
+ output rate
+
+
+
+ extra options
+
+
+
+
+
+ Set the parameters for resampling from @in_rate to @out_rate using @method
+for @quality in @options.
+
+
+
+
+
+ a #GstAudioResamplerMethod
+
+
+
+ the quality
+
+
+
+ the input rate
+
+
+
+ the output rate
+
+
+
+ a #GstStructure
+
+
+
+
+
+
+ The different filter interpolation methods.
+
+ no interpolation
+
+
+ linear interpolation of the
+ filter coeficients.
+
+
+ cubic interpolation of the
+ filter coeficients.
+
+
+
+ Select for the filter tables should be set up.
+
+ Use interpolated filter tables. This
+ uses less memory but more CPU and is slightly less accurate but it allows for more
+ efficient variable rate resampling with gst_audio_resampler_update().
+
+
+ Use full filter table. This uses more memory
+ but less CPU.
+
+
+ Automatically choose between interpolated
+ and full filter tables.
+
+
+
+ Different resampler flags.
+
+ no flags
+
+
+ input samples are non-interleaved.
+ an array of blocks of samples, one for each channel, should be passed to the
+ resample function.
+
+
+ output samples are non-interleaved.
+ an array of blocks of samples, one for each channel, should be passed to the
+ resample function.
+
+
+ optimize for dynamic updates of the sample
+ rates with gst_audio_resampler_update(). This will select an interpolating filter
+ when #GST_AUDIO_RESAMPLER_FILTER_MODE_AUTO is configured.
+
+
+
+ Different subsampling and upsampling methods
+
+ Duplicates the samples when
+ upsampling and drops when downsampling
+
+
+ Uses linear interpolation to reconstruct
+ missing samples and averaging to downsample
+
+
+ Uses cubic interpolation
+
+
+ Uses Blackman-Nuttall windowed sinc interpolation
+
+
+ Uses Kaiser windowed sinc interpolation
+
+
+
+ This object is the base class for audio ringbuffers used by the base
+audio source and sink classes.
+
+The ringbuffer abstracts a circular buffer of data. One reader and
+one writer can operate on the data from different threads in a lockfree
+manner. The base class is sufficiently flexible to be used as an
+abstraction for DMA based ringbuffers as well as a pure software
+implementations.
+
+ Print debug info about the buffer sized in @spec to the debug log.
+
+
+
+
+
+ the spec to debug
+
+
+
+
+
+ Print debug info about the parsed caps in @spec to the debug log.
+
+
+
+
+
+ the spec to debug
+
+
+
+
+
+ Parse @caps into @spec.
+
+ TRUE if the caps could be parsed.
+
+
+
+
+ a spec
+
+
+
+ a #GstCaps
+
+
+
+
+
+ Allocate the resources for the ringbuffer. This function fills
+in the data pointer of the ring buffer with a valid #GstBuffer
+to which samples can be written.
+
+ TRUE if the device could be acquired, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to acquire
+
+
+
+ the specs of the buffer
+
+
+
+
+
+ Activate @buf to start or stop pulling data.
+
+MT safe.
+
+ TRUE if the device could be activated in the requested mode,
+FALSE on error.
+
+
+
+
+ the #GstAudioRingBuffer to activate
+
+
+
+ the new mode
+
+
+
+
+
+ Fill the ringbuffer with silence.
+
+MT safe.
+
+
+
+
+
+ the #GstAudioRingBuffer to clear
+
+
+
+
+
+ Close the audio device associated with the ring buffer. The ring buffer
+should already have been released via gst_audio_ring_buffer_release().
+
+ TRUE if the device could be closed, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+
+
+ Commit @in_samples samples pointed to by @data to the ringbuffer @buf.
+
+@in_samples and @out_samples define the rate conversion to perform on the
+samples in @data. For negative rates, @out_samples must be negative and
+@in_samples positive.
+
+When @out_samples is positive, the first sample will be written at position @sample
+in the ringbuffer. When @out_samples is negative, the last sample will be written to
+@sample in reverse order.
+
+@out_samples does not need to be a multiple of the segment size of the ringbuffer
+although it is recommended for optimal performance.
+
+@accum will hold a temporary accumulator used in rate conversion and should be
+set to 0 when this function is first called. In case the commit operation is
+interrupted, one can resume the processing by passing the previously returned
+@accum value back to this function.
+
+MT safe.
+
+ The number of samples written to the ringbuffer or -1 on error. The
+number of samples written can be less than @out_samples when @buf was interrupted
+with a flush or stop.
+
+
+
+
+ the #GstAudioRingBuffer to commit
+
+
+
+ the sample position of the data
+
+
+
+ the data to commit
+
+
+
+ the number of samples in the data to commit
+
+
+
+ the number of samples to write to the ringbuffer
+
+
+
+ accumulator for rate conversion.
+
+
+
+
+
+ Get the number of samples queued in the audio device. This is
+usually less than the segment size but can be bigger when the
+implementation uses another internal buffer between the audio
+device.
+
+For playback ringbuffers this is the amount of samples transfered from the
+ringbuffer to the device but still not played.
+
+For capture ringbuffers this is the amount of samples in the device that are
+not yet transfered to the ringbuffer.
+
+ The number of samples queued in the audio device.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to query
+
+
+
+
+
+ Open the audio device associated with the ring buffer. Does not perform any
+setup on the device. You must open the device before acquiring the ring
+buffer.
+
+ TRUE if the device could be opened, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+
+
+ Pause processing samples from the ringbuffer.
+
+ TRUE if the device could be paused, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to pause
+
+
+
+
+
+ Free the resources of the ringbuffer.
+
+ TRUE if the device could be released, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to release
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Start processing samples from the ringbuffer.
+
+ TRUE if the device could be started, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to start
+
+
+
+
+
+ Stop processing samples from the ringbuffer.
+
+ TRUE if the device could be stopped, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to stop
+
+
+
+
+
+ Allocate the resources for the ringbuffer. This function fills
+in the data pointer of the ring buffer with a valid #GstBuffer
+to which samples can be written.
+
+ TRUE if the device could be acquired, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to acquire
+
+
+
+ the specs of the buffer
+
+
+
+
+
+ Activate @buf to start or stop pulling data.
+
+MT safe.
+
+ TRUE if the device could be activated in the requested mode,
+FALSE on error.
+
+
+
+
+ the #GstAudioRingBuffer to activate
+
+
+
+ the new mode
+
+
+
+
+
+ Subclasses should call this function to notify the fact that
+@advance segments are now processed by the device.
+
+MT safe.
+
+
+
+
+
+ the #GstAudioRingBuffer to advance
+
+
+
+ the number of segments written
+
+
+
+
+
+ Clear the given segment of the buffer with silence samples.
+This function is used by subclasses.
+
+MT safe.
+
+
+
+
+
+ the #GstAudioRingBuffer to clear
+
+
+
+ the segment to clear
+
+
+
+
+
+ Fill the ringbuffer with silence.
+
+MT safe.
+
+
+
+
+
+ the #GstAudioRingBuffer to clear
+
+
+
+
+
+ Close the audio device associated with the ring buffer. The ring buffer
+should already have been released via gst_audio_ring_buffer_release().
+
+ TRUE if the device could be closed, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+
+
+ Commit @in_samples samples pointed to by @data to the ringbuffer @buf.
+
+@in_samples and @out_samples define the rate conversion to perform on the
+samples in @data. For negative rates, @out_samples must be negative and
+@in_samples positive.
+
+When @out_samples is positive, the first sample will be written at position @sample
+in the ringbuffer. When @out_samples is negative, the last sample will be written to
+@sample in reverse order.
+
+@out_samples does not need to be a multiple of the segment size of the ringbuffer
+although it is recommended for optimal performance.
+
+@accum will hold a temporary accumulator used in rate conversion and should be
+set to 0 when this function is first called. In case the commit operation is
+interrupted, one can resume the processing by passing the previously returned
+@accum value back to this function.
+
+MT safe.
+
+ The number of samples written to the ringbuffer or -1 on error. The
+number of samples written can be less than @out_samples when @buf was interrupted
+with a flush or stop.
+
+
+
+
+ the #GstAudioRingBuffer to commit
+
+
+
+ the sample position of the data
+
+
+
+ the data to commit
+
+
+
+ the number of samples in the data to commit
+
+
+
+ the number of samples to write to the ringbuffer
+
+
+
+ accumulator for rate conversion.
+
+
+
+
+
+ Convert @src_val in @src_fmt to the equivalent value in @dest_fmt. The result
+will be put in @dest_val.
+
+ TRUE if the conversion succeeded.
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+ the source format
+
+
+
+ the source value
+
+
+
+ the destination format
+
+
+
+ a location to store the converted value
+
+
+
+
+
+ Get the number of samples queued in the audio device. This is
+usually less than the segment size but can be bigger when the
+implementation uses another internal buffer between the audio
+device.
+
+For playback ringbuffers this is the amount of samples transfered from the
+ringbuffer to the device but still not played.
+
+For capture ringbuffers this is the amount of samples in the device that are
+not yet transfered to the ringbuffer.
+
+ The number of samples queued in the audio device.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to query
+
+
+
+
+
+ Checks the status of the device associated with the ring buffer.
+
+ TRUE if the device was open, FALSE if it was closed.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+
+
+ Check if the ringbuffer is acquired and ready to use.
+
+ TRUE if the ringbuffer is acquired, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to check
+
+
+
+
+
+ Check if @buf is activated.
+
+MT safe.
+
+ TRUE if the device is active.
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+
+
+ Check if @buf is flushing.
+
+MT safe.
+
+ TRUE if the device is flushing.
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+
+
+ Tell the ringbuffer that it is allowed to start playback when
+the ringbuffer is filled with samples.
+
+MT safe.
+
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+ the new value
+
+
+
+
+
+ Open the audio device associated with the ring buffer. Does not perform any
+setup on the device. You must open the device before acquiring the ring
+buffer.
+
+ TRUE if the device could be opened, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+
+
+ Pause processing samples from the ringbuffer.
+
+ TRUE if the device could be paused, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to pause
+
+
+
+
+
+ Returns a pointer to memory where the data from segment @segment
+can be found. This function is mostly used by subclasses.
+
+ FALSE if the buffer is not started.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to read from
+
+
+
+ the segment to read
+
+
+
+ the pointer to the memory where samples can be read
+
+
+
+ the number of bytes to read
+
+
+
+
+
+ Read @len samples from the ringbuffer into the memory pointed
+to by @data.
+The first sample should be read from position @sample in
+the ringbuffer.
+
+@len should not be a multiple of the segment size of the ringbuffer
+although it is recommended.
+
+@timestamp will return the timestamp associated with the data returned.
+
+ The number of samples read from the ringbuffer or -1 on
+error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to read from
+
+
+
+ the sample position of the data
+
+
+
+ where the data should be read
+
+
+
+ the number of samples in data to read
+
+
+
+ where the timestamp is returned
+
+
+
+
+
+ Free the resources of the ringbuffer.
+
+ TRUE if the device could be released, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to release
+
+
+
+
+
+ Get the number of samples that were processed by the ringbuffer
+since it was last started. This does not include the number of samples not
+yet processed (see gst_audio_ring_buffer_delay()).
+
+ The number of samples processed by the ringbuffer.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to query
+
+
+
+
+
+ Sets the given callback function on the buffer. This function
+will be called every time a segment has been written to a device.
+
+MT safe.
+
+
+
+
+
+ the #GstAudioRingBuffer to set the callback on
+
+
+
+ the callback to set
+
+
+
+ user data passed to the callback
+
+
+
+
+
+ Sets the given callback function on the buffer. This function
+will be called every time a segment has been written to a device.
+
+MT safe.
+
+
+
+
+
+ the #GstAudioRingBuffer to set the callback on
+
+
+
+ the callback to set
+
+
+
+ user data passed to the callback
+
+
+
+ function to be called when @user_data is no longer needed
+
+
+
+
+
+ Tell the ringbuffer about the device's channel positions. This must
+be called in when the ringbuffer is acquired.
+
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+ the device channel positions
+
+
+
+
+
+ Set the ringbuffer to flushing mode or normal mode.
+
+MT safe.
+
+
+
+
+
+ the #GstAudioRingBuffer to flush
+
+
+
+ the new mode
+
+
+
+
+
+ Make sure that the next sample written to the device is
+accounted for as being the @sample sample written to the
+device. This value will be used in reporting the current
+sample position of the ringbuffer.
+
+This function will also clear the buffer with silence.
+
+MT safe.
+
+
+
+
+
+ the #GstAudioRingBuffer to use
+
+
+
+ the sample number to set
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Start processing samples from the ringbuffer.
+
+ TRUE if the device could be started, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to start
+
+
+
+
+
+ Stop processing samples from the ringbuffer.
+
+ TRUE if the device could be stopped, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to stop
+
+
+
+
+
+
+
+
+ used to signal start/stop/pause/resume actions
+
+
+
+ boolean indicating that the ringbuffer is open
+
+
+
+ boolean indicating that the ringbuffer is acquired
+
+
+
+ data in the ringbuffer
+
+
+
+ size of data in the ringbuffer
+
+
+
+
+
+
+ format and layout of the ringbuffer data
+
+
+
+ number of samples in one segment
+
+
+
+ pointer to memory holding one segment of silence samples
+
+
+
+ state of the buffer
+
+
+
+ readpointer in the ringbuffer
+
+
+
+ segment corresponding to segment 0 (unused)
+
+
+
+ is a reader or writer waiting for a free segment
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This function is set with gst_audio_ring_buffer_set_callback() and is
+called to fill the memory at @data with @len bytes of samples.
+
+
+
+
+
+ a #GstAudioRingBuffer
+
+
+
+ target to fill
+
+
+
+
+
+ amount to fill
+
+
+
+ user data
+
+
+
+
+
+ The vmethods that subclasses can override to implement the ringbuffer.
+
+ parent class
+
+
+
+
+
+ TRUE if the device could be opened, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+
+
+
+
+
+ TRUE if the device could be acquired, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to acquire
+
+
+
+ the specs of the buffer
+
+
+
+
+
+
+
+
+ TRUE if the device could be released, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to release
+
+
+
+
+
+
+
+
+ TRUE if the device could be closed, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer
+
+
+
+
+
+
+
+
+ TRUE if the device could be started, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to start
+
+
+
+
+
+
+
+
+ TRUE if the device could be paused, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to pause
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ TRUE if the device could be stopped, FALSE on error.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to stop
+
+
+
+
+
+
+
+
+ The number of samples queued in the audio device.
+
+MT safe.
+
+
+
+
+ the #GstAudioRingBuffer to query
+
+
+
+
+
+
+
+
+ TRUE if the device could be activated in the requested mode,
+FALSE on error.
+
+
+
+
+ the #GstAudioRingBuffer to activate
+
+
+
+ the new mode
+
+
+
+
+
+
+
+
+ The number of samples written to the ringbuffer or -1 on error. The
+number of samples written can be less than @out_samples when @buf was interrupted
+with a flush or stop.
+
+
+
+
+ the #GstAudioRingBuffer to commit
+
+
+
+ the sample position of the data
+
+
+
+ the data to commit
+
+
+
+ the number of samples in the data to commit
+
+
+
+ the number of samples to write to the ringbuffer
+
+
+
+ accumulator for rate conversion.
+
+
+
+
+
+
+
+
+
+
+
+
+ the #GstAudioRingBuffer to clear
+
+
+
+
+
+
+
+
+
+
+
+
+ The format of the samples in the ringbuffer.
+
+ samples in linear or float
+
+
+ samples in mulaw
+
+
+ samples in alaw
+
+
+ samples in ima adpcm
+
+
+ samples in mpeg audio (but not AAC) format
+
+
+ samples in gsm format
+
+
+ samples in IEC958 frames (e.g. AC3)
+
+
+ samples in AC3 format
+
+
+ samples in EAC3 format
+
+
+ samples in DTS format
+
+
+ samples in MPEG-2 AAC ADTS format
+
+
+ samples in MPEG-4 AAC ADTS format
+
+
+ samples in MPEG-2 AAC raw format (Since 1.12)
+
+
+ samples in MPEG-4 AAC raw format (Since 1.12)
+
+
+ samples in FLAC format (Since 1.12)
+
+
+
+ The structure containing the format specification of the ringbuffer.
+
+ The caps that generated the Spec.
+
+
+
+ the sample type
+
+
+
+ the #GstAudioInfo
+
+
+
+ the latency in microseconds
+
+
+
+ the total buffer size in microseconds
+
+
+
+ the size of one segment in bytes
+
+
+
+ the total number of segments
+
+
+
+ number of segments queued in the lower level device,
+ defaults to segtotal
+
+
+
+
+
+
+
+
+
+ The state of the ringbuffer.
+
+ The ringbuffer is stopped
+
+
+ The ringbuffer is paused
+
+
+ The ringbuffer is started
+
+
+ The ringbuffer has encountered an
+ error after it has been started, e.g. because the device was
+ disconnected (Since 1.2)
+
+
+
+ This is the most simple base class for audio sinks that only requires
+subclasses to implement a set of simple functions:
+
+* `open()` :Open the device.
+
+* `prepare()` :Configure the device with the specified format.
+
+* `write()` :Write samples to the device.
+
+* `reset()` :Unblock writes and flush the device.
+
+* `delay()` :Get the number of samples written but not yet played
+by the device.
+
+* `unprepare()` :Undo operations done by prepare.
+
+* `close()` :Close the device.
+
+All scheduling of samples and timestamps is done in this base class
+together with #GstAudioBaseSink using a default implementation of a
+#GstAudioRingBuffer that uses threads.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #GstAudioSink class. Override the vmethods to implement functionality.
+
+ the parent class structure.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This is the most simple base class for audio sources that only requires
+subclasses to implement a set of simple functions:
+
+* `open()` :Open the device.
+* `prepare()` :Configure the device with the specified format.
+* `read()` :Read samples from the device.
+* `reset()` :Unblock reads and flush the device.
+* `delay()` :Get the number of samples in the device but not yet read.
+* `unprepare()` :Undo operations done by prepare.
+* `close()` :Close the device.
+
+All scheduling of samples and timestamps is done in this base class
+together with #GstAudioBaseSrc using a default implementation of a
+#GstAudioRingBuffer that uses threads.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #GstAudioSrc class. Override the vmethod to implement
+functionality.
+
+ the parent class.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This metadata stays relevant as long as channels are unchanged.
+
+
+
+ This metadata stays relevant as long as sample rate is unchanged.
+
+
+
+ This metadata is relevant for audio streams.
+
+
+
+ This interface is implemented by elements that provide a stream volume. Examples for
+such elements are #volume and #playbin.
+
+Applications can use this interface to get or set the current stream volume. For this
+the "volume" #GObject property can be used or the helper functions gst_stream_volume_set_volume()
+and gst_stream_volume_get_volume(). This volume is always a linear factor, i.e. 0.0 is muted
+1.0 is 100%. For showing the volume in a GUI it might make sense to convert it to
+a different format by using gst_stream_volume_convert_volume(). Volume sliders should usually
+use a cubic volume.
+
+Separate from the volume the stream can also be muted by the "mute" #GObject property or
+gst_stream_volume_set_mute() and gst_stream_volume_get_mute().
+
+Elements that provide some kind of stream volume should implement the "volume" and
+"mute" #GObject properties and handle setting and getting of them properly.
+The volume property is defined to be a linear volume factor.
+
+
+ the converted volume
+
+
+
+
+ #GstStreamVolumeFormat to convert from
+
+
+
+ #GstStreamVolumeFormat to convert to
+
+
+
+ Volume in @from format that should be converted
+
+
+
+
+
+
+ Returns %TRUE if the stream is muted
+
+
+
+
+ #GstStreamVolume that should be used
+
+
+
+
+
+
+ The current stream volume as linear factor
+
+
+
+
+ #GstStreamVolume that should be used
+
+
+
+ #GstStreamVolumeFormat which should be returned
+
+
+
+
+
+
+
+
+
+
+ #GstStreamVolume that should be used
+
+
+
+ Mute state that should be set
+
+
+
+
+
+
+
+
+
+
+ #GstStreamVolume that should be used
+
+
+
+ #GstStreamVolumeFormat of @val
+
+
+
+ Linear volume factor that should be set
+
+
+
+
+
+
+
+
+
+
+
+
+ Different representations of a stream volume. gst_stream_volume_convert_volume()
+allows to convert between the different representations.
+
+Formulas to convert from a linear to a cubic or dB volume are
+cbrt(val) and 20 * log10 (val).
+
+ Linear scale factor, 1.0 = 100%
+
+
+ Cubic volume scale
+
+
+ Logarithmic volume scale (dB, amplitude not power)
+
+
+
+
+
+
+
+
+ Clip the buffer to the given %GstSegment.
+
+After calling this function the caller does not own a reference to
+@buffer anymore.
+
+ %NULL if the buffer is completely outside the configured segment,
+otherwise the clipped buffer is returned.
+
+If the buffer has no timestamp, it is assumed to be inside the segment and
+is not clipped
+
+
+
+
+ The buffer to clip.
+
+
+
+ Segment in %GST_FORMAT_TIME or %GST_FORMAT_DEFAULT to which
+ the buffer should be clipped.
+
+
+
+ sample rate.
+
+
+
+ size of one audio frame in bytes. This is the size of one sample *
+number of channels.
+
+
+
+
+
+ Reorders @buffer from the channel positions @from to the channel
+positions @to. @from and @to must contain the same number of
+positions and the same positions, only in a different order.
+@buffer must be writable.
+
+ %TRUE if the reordering was possible.
+
+
+
+
+ The buffer to reorder.
+
+
+
+ The %GstAudioFormat of the buffer.
+
+
+
+ The number of channels.
+
+
+
+ The channel positions in the buffer.
+
+
+
+
+
+ The channel positions to convert to.
+
+
+
+
+
+
+
+ Get the fallback channel-mask for the given number of channels.
+
+This function returns a reasonable fallback channel-mask and should be
+called as a last resort when the specific channel map is unknown.
+
+ a fallback channel-mask for @channels or 0 when there is no
+mask and mono.
+
+
+
+
+ the number of channels
+
+
+
+
+
+ Create a new channel mixer object for the given parameters.
+
+ a new #GstAudioChannelMixer object. Free with gst_audio_channel_mixer_free()
+after usage.
+
+
+
+
+ #GstAudioChannelMixerFlags
+
+
+
+
+
+
+ number of input channels
+
+
+
+ positions of input channels
+
+
+
+ number of output channels
+
+
+
+ positions of output channels
+
+
+
+
+
+ Convert the @channels present in @channel_mask to a @position array
+(which should have at least @channels entries ensured by caller).
+If @channel_mask is set to 0, it is considered as 'not present' for purpose
+of conversion.
+A partially valid @channel_mask with less bits set than the number
+of channels is considered valid.
+
+ %TRUE if channel and channel mask are valid and could be converted
+
+
+
+
+ The number of channels
+
+
+
+ The input channel_mask
+
+
+
+ The
+ %GstAudioChannelPosition<!-- -->s
+
+
+
+
+
+
+
+ Convert the @position array of @channels channels to a bitmask.
+
+If @force_order is %TRUE it additionally checks if the channels are
+in the order required by GStreamer.
+
+ %TRUE if the channel positions are valid and could be converted.
+
+
+
+
+ The %GstAudioChannelPositions
+
+
+
+
+
+ The number of channels.
+
+
+
+ Only consider the GStreamer channel order.
+
+
+
+ the output channel mask
+
+
+
+
+
+ Converts @position to a human-readable string representation for
+debugging purposes.
+
+ a newly allocated string representing
+@position
+
+Since 1.10
+
+
+
+
+ The %GstAudioChannelPositions
+ to convert.
+
+
+
+
+
+ The number of channels.
+
+
+
+
+
+ Reorders the channel positions in @position from any order to
+the GStreamer channel order.
+
+ %TRUE if the channel positions are valid and reordering
+was successful.
+
+
+
+
+ The channel positions to
+ reorder to.
+
+
+
+
+
+ The number of channels.
+
+
+
+
+
+ Checks if @position contains valid channel positions for
+@channels channels. If @force_order is %TRUE it additionally
+checks if the channels are in the order required by GStreamer.
+
+ %TRUE if the channel positions are valid.
+
+
+
+
+ The %GstAudioChannelPositions
+ to check.
+
+
+
+
+
+ The number of channels.
+
+
+
+ Only consider the GStreamer channel order.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Create a new #GstAudioConverter that is able to convert between @in and @out
+audio formats.
+
+@config contains extra configuration options, see #GST_VIDEO_CONVERTER_OPT_*
+parameters for details about the options and values.
+
+ a #GstAudioConverter or %NULL if conversion is not possible.
+
+
+
+
+ extra #GstAudioConverterFlags
+
+
+
+ a source #GstAudioInfo
+
+
+
+ a destination #GstAudioInfo
+
+
+
+ a #GstStructure with configuration options
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Construct a #GstAudioFormat with given parameters.
+
+ a #GstAudioFormat or GST_AUDIO_FORMAT_UNKNOWN when no audio format
+exists with the given parameters.
+
+
+
+
+ signed or unsigned format
+
+
+
+ G_LITTLE_ENDIAN or G_BIG_ENDIAN
+
+
+
+ amount of bits used per sample
+
+
+
+ amount of used bits in @width
+
+
+
+
+
+ Fill @length bytes in @dest with silence samples for @info.
+
+
+
+
+
+ a #GstAudioFormatInfo
+
+
+
+ a destination
+ to fill
+
+
+
+
+
+ the length to fill
+
+
+
+
+
+ Convert the @format string to its #GstAudioFormat.
+
+ the #GstAudioFormat for @format or GST_AUDIO_FORMAT_UNKNOWN when the
+string is not a known format.
+
+
+
+
+ a format string
+
+
+
+
+
+ Get the #GstAudioFormatInfo for @format
+
+ The #GstAudioFormatInfo for @format.
+
+
+
+
+ a #GstAudioFormat
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Returns a reorder map for @from to @to that can be used in
+custom channel reordering code, e.g. to convert from or to the
+GStreamer channel order. @from and @to must contain the same
+number of positions and the same positions, only in a
+different order.
+
+The resulting @reorder_map can be used for reordering by assigning
+channel i of the input to channel reorder_map[i] of the output.
+
+ %TRUE if the channel positions are valid and reordering
+is possible.
+
+
+
+
+ The number of channels.
+
+
+
+ The channel positions to reorder from.
+
+
+
+
+
+ The channel positions to reorder to.
+
+
+
+
+
+ Pointer to the reorder map.
+
+
+
+
+
+
+
+ Calculated the size of the buffer expected by gst_audio_iec61937_payload() for
+payloading type from @spec.
+
+ the size or 0 if the given @type is not supported or cannot be
+payloaded.
+
+
+
+
+ the ringbufer spec
+
+
+
+
+
+ Payloads @src in the form specified by IEC 61937 for the type from @spec and
+stores the result in @dst. @src must contain exactly one frame of data and
+the frame is not checked for errors.
+
+ transfer-full: %TRUE if the payloading was successful, %FALSE
+otherwise.
+
+
+
+
+ a buffer containing the data to payload
+
+
+
+
+
+ size of @src in bytes
+
+
+
+ the destination buffer to store the
+ payloaded contents in. Should not overlap with @src
+
+
+
+
+
+ size of @dst in bytes
+
+
+
+ the ringbufer spec for @src
+
+
+
+ the expected byte order of the payloaded data
+
+
+
+
+
+ Create a new quantizer object with the given parameters.
+
+Output samples will be quantized to a multiple of @quantizer. Better
+performance is achieved when @quantizer is a power of 2.
+
+Dithering and noise-shaping can be performed during quantization with
+the @dither and @ns parameters.
+
+ a new #GstAudioQuantize. Free with gst_audio_quantize_free().
+
+
+
+
+ a #GstAudioDitherMethod
+
+
+
+ a #GstAudioNoiseShapingMethod
+
+
+
+ #GstAudioQuantizeFlags
+
+
+
+ the #GstAudioFormat of the samples
+
+
+
+ the amount of channels in the samples
+
+
+
+ the quantizer to use
+
+
+
+
+
+ Reorders @data from the channel positions @from to the channel
+positions @to. @from and @to must contain the same number of
+positions and the same positions, only in a different order.
+
+ %TRUE if the reordering was possible.
+
+
+
+
+ The pointer to
+ the memory.
+
+
+
+
+
+ The size of the memory.
+
+
+
+ The %GstAudioFormat of the buffer.
+
+
+
+ The number of channels.
+
+
+
+ The channel positions in the buffer.
+
+
+
+
+
+ The channel positions to convert to.
+
+
+
+
+
+
+
+ Make a new resampler.
+
+ %TRUE on success
+
+
+
+
+ a #GstAudioResamplerMethod
+
+
+
+ #GstAudioResamplerFlags
+
+
+
+
+
+
+
+
+
+ input rate
+
+
+
+ output rate
+
+
+
+ extra options
+
+
+
+
+
+ Set the parameters for resampling from @in_rate to @out_rate using @method
+for @quality in @options.
+
+
+
+
+
+ a #GstAudioResamplerMethod
+
+
+
+ the quality
+
+
+
+ the input rate
+
+
+
+ the output rate
+
+
+
+ a #GstStructure
+
+
+
+
+
+ Attaches #GstAudioClippingMeta metadata to @buffer with the given parameters.
+
+ the #GstAudioClippingMeta on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ GstFormat of @start and @stop, GST_FORMAT_DEFAULT is samples
+
+
+
+ Amount of audio to clip from start of buffer
+
+
+
+ Amount of to clip from end of buffer
+
+
+
+
+
+ Attaches #GstAudioDownmixMeta metadata to @buffer with the given parameters.
+
+@matrix is an two-dimensional array of @to_channels times @from_channels
+coefficients, i.e. the i-th output channels is constructed by multiplicating
+the input channels with the coefficients in @matrix[i] and taking the sum
+of the results.
+
+ the #GstAudioDownmixMeta on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ the channel positions
+ of the source
+
+
+
+
+
+ The number of channels of the source
+
+
+
+ the channel positions of
+ the destination
+
+
+
+
+
+ The number of channels of the destination
+
+
+
+ The matrix coefficients.
+
+
+
+
+
+ Find the #GstAudioDownmixMeta on @buffer for the given destination
+channel positions.
+
+ the #GstAudioDownmixMeta on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ the channel positions of
+ the destination
+
+
+
+
+
+ The number of channels of the destination
+
+
+
+
+
+
+ the converted volume
+
+
+
+
+ #GstStreamVolumeFormat to convert from
+
+
+
+ #GstStreamVolumeFormat to convert to
+
+
+
+ Volume in @from format that should be converted
+
+
+
+
+
+
diff --git a/gir-files/GstBase-1.0.gir b/gir-files/GstBase-1.0.gir
new file mode 100644
index 000000000..5c49a74cb
--- /dev/null
+++ b/gir-files/GstBase-1.0.gir
@@ -0,0 +1,11120 @@
+
+
+
+
+
+
+
+
+ This class is for elements that receive buffers in an undesired size.
+While for example raw video contains one image per buffer, the same is not
+true for a lot of other formats, especially those that come directly from
+a file. So if you have undefined buffer sizes and require a specific size,
+this object is for you.
+
+An adapter is created with gst_adapter_new(). It can be freed again with
+g_object_unref().
+
+The theory of operation is like this: All buffers received are put
+into the adapter using gst_adapter_push() and the data is then read back
+in chunks of the desired size using gst_adapter_map()/gst_adapter_unmap()
+and/or gst_adapter_copy(). After the data has been processed, it is freed
+using gst_adapter_unmap().
+
+Other methods such as gst_adapter_take() and gst_adapter_take_buffer()
+combine gst_adapter_map() and gst_adapter_unmap() in one method and are
+potentially more convenient for some use cases.
+
+For example, a sink pad's chain function that needs to pass data to a library
+in 512-byte chunks could be implemented like this:
+|[<!-- language="C" -->
+static GstFlowReturn
+sink_pad_chain (GstPad *pad, GstObject *parent, GstBuffer *buffer)
+{
+ MyElement *this;
+ GstAdapter *adapter;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ this = MY_ELEMENT (parent);
+
+ adapter = this->adapter;
+
+ // put buffer into adapter
+ gst_adapter_push (adapter, buffer);
+
+ // while we can read out 512 bytes, process them
+ while (gst_adapter_available (adapter) >= 512 && ret == GST_FLOW_OK) {
+ const guint8 *data = gst_adapter_map (adapter, 512);
+ // use flowreturn as an error value
+ ret = my_library_foo (data);
+ gst_adapter_unmap (adapter);
+ gst_adapter_flush (adapter, 512);
+ }
+ return ret;
+}
+]|
+
+For another example, a simple element inside GStreamer that uses #GstAdapter
+is the libvisual element.
+
+An element using #GstAdapter in its sink pad chain function should ensure that
+when the FLUSH_STOP event is received, that any queued data is cleared using
+gst_adapter_clear(). Data should also be cleared or processed on EOS and
+when changing state from %GST_STATE_PAUSED to %GST_STATE_READY.
+
+Also check the GST_BUFFER_FLAG_DISCONT flag on the buffer. Some elements might
+need to clear the adapter after a discontinuity.
+
+The adapter will keep track of the timestamps of the buffers
+that were pushed. The last seen timestamp before the current position
+can be queried with gst_adapter_prev_pts(). This function can
+optionally return the number of bytes between the start of the buffer that
+carried the timestamp and the current adapter position. The distance is
+useful when dealing with, for example, raw audio samples because it allows
+you to calculate the timestamp of the current adapter position by using the
+last seen timestamp and the amount of bytes since. Additionally, the
+gst_adapter_prev_pts_at_offset() can be used to determine the last
+seen timestamp at a particular offset in the adapter.
+
+The adapter will also keep track of the offset of the buffers
+(#GST_BUFFER_OFFSET) that were pushed. The last seen offset before the
+current position can be queried with gst_adapter_prev_offset(). This function
+can optionally return the number of bytes between the start of the buffer
+that carried the offset and the current adapter position.
+
+Additionally the adapter also keeps track of the PTS, DTS and buffer offset
+at the last discontinuity, which can be retrieved with
+gst_adapter_pts_at_discont(), gst_adapter_dts_at_discont() and
+gst_adapter_offset_at_discont(). The number of bytes that were consumed
+since then can be queried with gst_adapter_distance_from_discont().
+
+A last thing to note is that while #GstAdapter is pretty optimized,
+merging buffers still might be an operation that requires a malloc() and
+memcpy() operation, and these operations are not the fastest. Because of
+this, some functions like gst_adapter_available_fast() are provided to help
+speed up such cases should you want to. To avoid repeated memory allocations,
+gst_adapter_copy() can be used to copy data into a (statically allocated)
+user provided buffer.
+
+#GstAdapter is not MT safe. All operations on an adapter must be serialized by
+the caller. This is not normally a problem, however, as the normal use case
+of #GstAdapter is inside one pad's chain function, in which case access is
+serialized via the pad's STREAM_LOCK.
+
+Note that gst_adapter_push() takes ownership of the buffer passed. Use
+gst_buffer_ref() before pushing it into the adapter if you still want to
+access the buffer later. The adapter will never modify the data in the
+buffer pushed in it.
+
+ Creates a new #GstAdapter. Free with g_object_unref().
+
+ a new #GstAdapter
+
+
+
+
+ Gets the maximum amount of bytes available, that is it returns the maximum
+value that can be supplied to gst_adapter_map() without that function
+returning %NULL.
+
+ number of bytes available in @adapter
+
+
+
+
+ a #GstAdapter
+
+
+
+
+
+ Gets the maximum number of bytes that are immediately available without
+requiring any expensive operations (like copying the data into a
+temporary buffer).
+
+ number of bytes that are available in @adapter without expensive
+operations
+
+
+
+
+ a #GstAdapter
+
+
+
+
+
+ Removes all buffers from @adapter.
+
+
+
+
+
+ a #GstAdapter
+
+
+
+
+
+ Copies @size bytes of data starting at @offset out of the buffers
+contained in #GstAdapter into an array @dest provided by the caller.
+
+The array @dest should be large enough to contain @size bytes.
+The user should check that the adapter has (@offset + @size) bytes
+available before calling this function.
+
+
+
+
+
+ a #GstAdapter
+
+
+
+
+ the memory to copy into
+
+
+
+
+
+ the bytes offset in the adapter to start from
+
+
+
+ the number of bytes to copy
+
+
+
+
+
+ Similar to gst_adapter_copy, but more suitable for language bindings. @size
+bytes of data starting at @offset will be copied out of the buffers contained
+in @adapter and into a new #GBytes structure which is returned. Depending on
+the value of the @size argument an empty #GBytes structure may be returned.
+
+ A new #GBytes structure containing the copied data.
+
+
+
+
+ a #GstAdapter
+
+
+
+ the bytes offset in the adapter to start from
+
+
+
+ the number of bytes to copy
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Get the DTS that was on the last buffer with the GST_BUFFER_FLAG_DISCONT
+flag, or GST_CLOCK_TIME_NONE.
+
+ The DTS at the last discont or GST_CLOCK_TIME_NONE.
+
+
+
+
+ a #GstAdapter
+
+
+
+
+
+ Flushes the first @flush bytes in the @adapter. The caller must ensure that
+at least this many bytes are available.
+
+See also: gst_adapter_map(), gst_adapter_unmap()
+
+
+
+
+
+ a #GstAdapter
+
+
+
+ the number of bytes to flush
+
+
+
+
+
+ Returns a #GstBuffer containing the first @nbytes of the @adapter, but
+does not flush them from the adapter. See gst_adapter_take_buffer()
+for details.
+
+Caller owns a reference to the returned buffer. gst_buffer_unref() after
+usage.
+
+Free-function: gst_buffer_unref
+
+ a #GstBuffer containing the first
+ @nbytes of the adapter, or %NULL if @nbytes bytes are not available.
+ gst_buffer_unref() when no longer needed.
+
+
+
+
+ a #GstAdapter
+
+
+
+ the number of bytes to get
+
+
+
+
+
+ Returns a #GstBuffer containing the first @nbytes of the @adapter, but
+does not flush them from the adapter. See gst_adapter_take_buffer_fast()
+for details.
+
+Caller owns a reference to the returned buffer. gst_buffer_unref() after
+usage.
+
+Free-function: gst_buffer_unref
+
+ a #GstBuffer containing the first
+ @nbytes of the adapter, or %NULL if @nbytes bytes are not available.
+ gst_buffer_unref() when no longer needed.
+
+
+
+
+ a #GstAdapter
+
+
+
+ the number of bytes to get
+
+
+
+
+
+ Returns a #GstBufferList of buffers containing the first @nbytes bytes of
+the @adapter but does not flush them from the adapter. See
+gst_adapter_take_buffer_list() for details.
+
+Caller owns the returned list. Call gst_buffer_list_unref() to free
+the list after usage.
+
+ a #GstBufferList of buffers containing
+ the first @nbytes of the adapter, or %NULL if @nbytes bytes are not
+ available
+
+
+
+
+ a #GstAdapter
+
+
+
+ the number of bytes to get
+
+
+
+
+
+ Returns a #GList of buffers containing the first @nbytes bytes of the
+@adapter, but does not flush them from the adapter. See
+gst_adapter_take_list() for details.
+
+Caller owns returned list and contained buffers. gst_buffer_unref() each
+buffer in the list before freeing the list after usage.
+
+ a #GList of
+ buffers containing the first @nbytes of the adapter, or %NULL if @nbytes
+ bytes are not available
+
+
+
+
+
+
+ a #GstAdapter
+
+
+
+ the number of bytes to get
+
+
+
+
+
+ Gets the first @size bytes stored in the @adapter. The returned pointer is
+valid until the next function is called on the adapter.
+
+Note that setting the returned pointer as the data of a #GstBuffer is
+incorrect for general-purpose plugins. The reason is that if a downstream
+element stores the buffer so that it has access to it outside of the bounds
+of its chain function, the buffer will have an invalid data pointer after
+your element flushes the bytes. In that case you should use
+gst_adapter_take(), which returns a freshly-allocated buffer that you can set
+as #GstBuffer memory or the potentially more performant
+gst_adapter_take_buffer().
+
+Returns %NULL if @size bytes are not available.
+
+
+ a pointer to the first @size bytes of data, or %NULL
+
+
+
+
+
+
+ a #GstAdapter
+
+
+
+ the number of bytes to map/peek
+
+
+
+
+
+ Scan for pattern @pattern with applied mask @mask in the adapter data,
+starting from offset @offset.
+
+The bytes in @pattern and @mask are interpreted left-to-right, regardless
+of endianness. All four bytes of the pattern must be present in the
+adapter for it to match, even if the first or last bytes are masked out.
+
+It is an error to call this function without making sure that there is
+enough data (offset+size bytes) in the adapter.
+
+This function calls gst_adapter_masked_scan_uint32_peek() passing %NULL
+for value.
+
+ offset of the first match, or -1 if no match was found.
+
+Example:
+|[
+// Assume the adapter contains 0x00 0x01 0x02 ... 0xfe 0xff
+
+gst_adapter_masked_scan_uint32 (adapter, 0xffffffff, 0x00010203, 0, 256);
+// -> returns 0
+gst_adapter_masked_scan_uint32 (adapter, 0xffffffff, 0x00010203, 1, 255);
+// -> returns -1
+gst_adapter_masked_scan_uint32 (adapter, 0xffffffff, 0x01020304, 1, 255);
+// -> returns 1
+gst_adapter_masked_scan_uint32 (adapter, 0xffff, 0x0001, 0, 256);
+// -> returns -1
+gst_adapter_masked_scan_uint32 (adapter, 0xffff, 0x0203, 0, 256);
+// -> returns 0
+gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0x02030000, 0, 256);
+// -> returns 2
+gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0x02030000, 0, 4);
+// -> returns -1
+]|
+
+
+
+
+ a #GstAdapter
+
+
+
+ mask to apply to data before matching against @pattern
+
+
+
+ pattern to match (after mask is applied)
+
+
+
+ offset into the adapter data from which to start scanning, returns
+ the last scanned position.
+
+
+
+ number of bytes to scan from offset
+
+
+
+
+
+ Scan for pattern @pattern with applied mask @mask in the adapter data,
+starting from offset @offset. If a match is found, the value that matched
+is returned through @value, otherwise @value is left untouched.
+
+The bytes in @pattern and @mask are interpreted left-to-right, regardless
+of endianness. All four bytes of the pattern must be present in the
+adapter for it to match, even if the first or last bytes are masked out.
+
+It is an error to call this function without making sure that there is
+enough data (offset+size bytes) in the adapter.
+
+ offset of the first match, or -1 if no match was found.
+
+
+
+
+ a #GstAdapter
+
+
+
+ mask to apply to data before matching against @pattern
+
+
+
+ pattern to match (after mask is applied)
+
+
+
+ offset into the adapter data from which to start scanning, returns
+ the last scanned position.
+
+
+
+ number of bytes to scan from offset
+
+
+
+ pointer to uint32 to return matching data
+
+
+
+
+
+ Get the offset that was on the last buffer with the GST_BUFFER_FLAG_DISCONT
+flag, or GST_BUFFER_OFFSET_NONE.
+
+ The offset at the last discont or GST_BUFFER_OFFSET_NONE.
+
+
+
+
+ a #GstAdapter
+
+
+
+
+
+ Get the dts that was before the current byte in the adapter. When
+@distance is given, the amount of bytes between the dts and the current
+position is returned.
+
+The dts is reset to GST_CLOCK_TIME_NONE and the distance is set to 0 when
+the adapter is first created or when it is cleared. This also means that before
+the first byte with a dts is removed from the adapter, the dts
+and distance returned are GST_CLOCK_TIME_NONE and 0 respectively.
+
+ The previously seen dts.
+
+
+
+
+ a #GstAdapter
+
+
+
+ pointer to location for distance, or %NULL
+
+
+
+
+
+ Get the dts that was before the byte at offset @offset in the adapter. When
+@distance is given, the amount of bytes between the dts and the current
+position is returned.
+
+The dts is reset to GST_CLOCK_TIME_NONE and the distance is set to 0 when
+the adapter is first created or when it is cleared. This also means that before
+the first byte with a dts is removed from the adapter, the dts
+and distance returned are GST_CLOCK_TIME_NONE and 0 respectively.
+
+ The previously seen dts at given offset.
+
+
+
+
+ a #GstAdapter
+
+
+
+ the offset in the adapter at which to get timestamp
+
+
+
+ pointer to location for distance, or %NULL
+
+
+
+
+
+ Get the offset that was before the current byte in the adapter. When
+@distance is given, the amount of bytes between the offset and the current
+position is returned.
+
+The offset is reset to GST_BUFFER_OFFSET_NONE and the distance is set to 0
+when the adapter is first created or when it is cleared. This also means that
+before the first byte with an offset is removed from the adapter, the offset
+and distance returned are GST_BUFFER_OFFSET_NONE and 0 respectively.
+
+ The previous seen offset.
+
+
+
+
+ a #GstAdapter
+
+
+
+ pointer to a location for distance, or %NULL
+
+
+
+
+
+ Get the pts that was before the current byte in the adapter. When
+@distance is given, the amount of bytes between the pts and the current
+position is returned.
+
+The pts is reset to GST_CLOCK_TIME_NONE and the distance is set to 0 when
+the adapter is first created or when it is cleared. This also means that before
+the first byte with a pts is removed from the adapter, the pts
+and distance returned are GST_CLOCK_TIME_NONE and 0 respectively.
+
+ The previously seen pts.
+
+
+
+
+ a #GstAdapter
+
+
+
+ pointer to location for distance, or %NULL
+
+
+
+
+
+ Get the pts that was before the byte at offset @offset in the adapter. When
+@distance is given, the amount of bytes between the pts and the current
+position is returned.
+
+The pts is reset to GST_CLOCK_TIME_NONE and the distance is set to 0 when
+the adapter is first created or when it is cleared. This also means that before
+the first byte with a pts is removed from the adapter, the pts
+and distance returned are GST_CLOCK_TIME_NONE and 0 respectively.
+
+ The previously seen pts at given offset.
+
+
+
+
+ a #GstAdapter
+
+
+
+ the offset in the adapter at which to get timestamp
+
+
+
+ pointer to location for distance, or %NULL
+
+
+
+
+
+ Get the PTS that was on the last buffer with the GST_BUFFER_FLAG_DISCONT
+flag, or GST_CLOCK_TIME_NONE.
+
+ The PTS at the last discont or GST_CLOCK_TIME_NONE.
+
+
+
+
+ a #GstAdapter
+
+
+
+
+
+ Adds the data from @buf to the data stored inside @adapter and takes
+ownership of the buffer.
+
+
+
+
+
+ a #GstAdapter
+
+
+
+ a #GstBuffer to add to queue in the adapter
+
+
+
+
+
+ Returns a freshly allocated buffer containing the first @nbytes bytes of the
+@adapter. The returned bytes will be flushed from the adapter.
+
+Caller owns returned value. g_free after usage.
+
+Free-function: g_free
+
+
+ oven-fresh hot data, or %NULL if @nbytes bytes are not available
+
+
+
+
+
+
+ a #GstAdapter
+
+
+
+ the number of bytes to take
+
+
+
+
+
+ Returns a #GstBuffer containing the first @nbytes bytes of the
+@adapter. The returned bytes will be flushed from the adapter.
+This function is potentially more performant than
+gst_adapter_take() since it can reuse the memory in pushed buffers
+by subbuffering or merging. This function will always return a
+buffer with a single memory region.
+
+Note that no assumptions should be made as to whether certain buffer
+flags such as the DISCONT flag are set on the returned buffer, or not.
+The caller needs to explicitly set or unset flags that should be set or
+unset.
+
+Since 1.6 this will also copy over all GstMeta of the input buffers except
+for meta with the %GST_META_FLAG_POOLED flag or with the "memory" tag.
+
+Caller owns a reference to the returned buffer. gst_buffer_unref() after
+usage.
+
+Free-function: gst_buffer_unref
+
+ a #GstBuffer containing the first
+ @nbytes of the adapter, or %NULL if @nbytes bytes are not available.
+ gst_buffer_unref() when no longer needed.
+
+
+
+
+ a #GstAdapter
+
+
+
+ the number of bytes to take
+
+
+
+
+
+ Returns a #GstBuffer containing the first @nbytes of the @adapter.
+The returned bytes will be flushed from the adapter. This function
+is potentially more performant than gst_adapter_take_buffer() since
+it can reuse the memory in pushed buffers by subbuffering or
+merging. Unlike gst_adapter_take_buffer(), the returned buffer may
+be composed of multiple non-contiguous #GstMemory objects, no
+copies are made.
+
+Note that no assumptions should be made as to whether certain buffer
+flags such as the DISCONT flag are set on the returned buffer, or not.
+The caller needs to explicitly set or unset flags that should be set or
+unset.
+
+This will also copy over all GstMeta of the input buffers except
+for meta with the %GST_META_FLAG_POOLED flag or with the "memory" tag.
+
+This function can return buffer up to the return value of
+gst_adapter_available() without making copies if possible.
+
+Caller owns a reference to the returned buffer. gst_buffer_unref() after
+usage.
+
+Free-function: gst_buffer_unref
+
+ a #GstBuffer containing the first
+ @nbytes of the adapter, or %NULL if @nbytes bytes are not available.
+ gst_buffer_unref() when no longer needed.
+
+
+
+
+ a #GstAdapter
+
+
+
+ the number of bytes to take
+
+
+
+
+
+ Returns a #GstBufferList of buffers containing the first @nbytes bytes of
+the @adapter. The returned bytes will be flushed from the adapter.
+When the caller can deal with individual buffers, this function is more
+performant because no memory should be copied.
+
+Caller owns the returned list. Call gst_buffer_list_unref() to free
+the list after usage.
+
+ a #GstBufferList of buffers containing
+ the first @nbytes of the adapter, or %NULL if @nbytes bytes are not
+ available
+
+
+
+
+ a #GstAdapter
+
+
+
+ the number of bytes to take
+
+
+
+
+
+ Returns a #GList of buffers containing the first @nbytes bytes of the
+@adapter. The returned bytes will be flushed from the adapter.
+When the caller can deal with individual buffers, this function is more
+performant because no memory should be copied.
+
+Caller owns returned list and contained buffers. gst_buffer_unref() each
+buffer in the list before freeing the list after usage.
+
+ a #GList of
+ buffers containing the first @nbytes of the adapter, or %NULL if @nbytes
+ bytes are not available
+
+
+
+
+
+
+ a #GstAdapter
+
+
+
+ the number of bytes to take
+
+
+
+
+
+ Releases the memory obtained with the last gst_adapter_map().
+
+
+
+
+
+ a #GstAdapter
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ The name of the templates for the sink pad.
+
+
+
+ The name of the templates for the source pad.
+
+
+
+ This base class is for parser elements that process data and splits it
+into separate audio/video/whatever frames.
+
+It provides for:
+
+ * provides one sink pad and one source pad
+ * handles state changes
+ * can operate in pull mode or push mode
+ * handles seeking in both modes
+ * handles events (SEGMENT/EOS/FLUSH)
+ * handles queries (POSITION/DURATION/SEEKING/FORMAT/CONVERT)
+ * handles flushing
+
+The purpose of this base class is to provide the basic functionality of
+a parser and share a lot of rather complex code.
+
+# Description of the parsing mechanism:
+
+## Set-up phase
+
+ * #GstBaseParse calls @start to inform subclass that data processing is
+ about to start now.
+
+ * #GstBaseParse class calls @set_sink_caps to inform the subclass about
+ incoming sinkpad caps. Subclass could already set the srcpad caps
+ accordingly, but this might be delayed until calling
+ gst_base_parse_finish_frame() with a non-queued frame.
+
+ * At least at this point subclass needs to tell the #GstBaseParse class
+ how big data chunks it wants to receive (min_frame_size). It can do
+ this with gst_base_parse_set_min_frame_size().
+
+ * #GstBaseParse class sets up appropriate data passing mode (pull/push)
+ and starts to process the data.
+
+## Parsing phase
+
+ * #GstBaseParse gathers at least min_frame_size bytes of data either
+ by pulling it from upstream or collecting buffers in an internal
+ #GstAdapter.
+
+ * A buffer of (at least) min_frame_size bytes is passed to subclass with
+ @handle_frame. Subclass checks the contents and can optionally
+ return GST_FLOW_OK along with an amount of data to be skipped to find
+ a valid frame (which will result in a subsequent DISCONT).
+ If, otherwise, the buffer does not hold a complete frame,
+ @handle_frame can merely return and will be called again when additional
+ data is available. In push mode this amounts to an
+ additional input buffer (thus minimal additional latency), in pull mode
+ this amounts to some arbitrary reasonable buffer size increase.
+ Of course, gst_base_parse_set_min_frame_size() could also be used if a
+ very specific known amount of additional data is required.
+ If, however, the buffer holds a complete valid frame, it can pass
+ the size of this frame to gst_base_parse_finish_frame().
+ If acting as a converter, it can also merely indicate consumed input data
+ while simultaneously providing custom output data.
+ Note that baseclass performs some processing (such as tracking
+ overall consumed data rate versus duration) for each finished frame,
+ but other state is only updated upon each call to @handle_frame
+ (such as tracking upstream input timestamp).
+
+ Subclass is also responsible for setting the buffer metadata
+ (e.g. buffer timestamp and duration, or keyframe if applicable).
+ (although the latter can also be done by #GstBaseParse if it is
+ appropriately configured, see below). Frame is provided with
+ timestamp derived from upstream (as much as generally possible),
+ duration obtained from configuration (see below), and offset
+ if meaningful (in pull mode).
+
+ Note that @check_valid_frame might receive any small
+ amount of input data when leftover data is being drained (e.g. at EOS).
+
+ * As part of finish frame processing,
+ just prior to actually pushing the buffer in question,
+ it is passed to @pre_push_frame which gives subclass yet one
+ last chance to examine buffer metadata, or to send some custom (tag)
+ events, or to perform custom (segment) filtering.
+
+ * During the parsing process #GstBaseParseClass will handle both srcpad
+ and sinkpad events. They will be passed to subclass if @event or
+ @src_event callbacks have been provided.
+
+## Shutdown phase
+
+* #GstBaseParse class calls @stop to inform the subclass that data
+ parsing will be stopped.
+
+Subclass is responsible for providing pad template caps for
+source and sink pads. The pads need to be named "sink" and "src". It also
+needs to set the fixed caps on srcpad, when the format is ensured (e.g.
+when base class calls subclass' @set_sink_caps function).
+
+This base class uses %GST_FORMAT_DEFAULT as a meaning of frames. So,
+subclass conversion routine needs to know that conversion from
+%GST_FORMAT_TIME to %GST_FORMAT_DEFAULT must return the
+frame number that can be found from the given byte position.
+
+#GstBaseParse uses subclasses conversion methods also for seeking (or
+otherwise uses its own default one, see also below).
+
+Subclass @start and @stop functions will be called to inform the beginning
+and end of data processing.
+
+Things that subclass need to take care of:
+
+* Provide pad templates
+* Fixate the source pad caps when appropriate
+* Inform base class how big data chunks should be retrieved. This is
+ done with gst_base_parse_set_min_frame_size() function.
+* Examine data chunks passed to subclass with @handle_frame and pass
+ proper frame(s) to gst_base_parse_finish_frame(), and setting src pad
+ caps and timestamps on frame.
+* Provide conversion functions
+* Update the duration information with gst_base_parse_set_duration()
+* Optionally passthrough using gst_base_parse_set_passthrough()
+* Configure various baseparse parameters using
+ gst_base_parse_set_average_bitrate(), gst_base_parse_set_syncable()
+ and gst_base_parse_set_frame_rate().
+
+* In particular, if subclass is unable to determine a duration, but
+ parsing (or specs) yields a frames per seconds rate, then this can be
+ provided to #GstBaseParse to enable it to cater for
+ buffer time metadata (which will be taken from upstream as much as
+ possible). Internally keeping track of frame durations and respective
+ sizes that have been pushed provides #GstBaseParse with an estimated
+ bitrate. A default @convert (used if not overridden) will then use these
+ rates to perform obvious conversions. These rates are also used to
+ update (estimated) duration at regular frame intervals.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Adds an entry to the index associating @offset to @ts. It is recommended
+to only add keyframe entries. @force allows to bypass checks, such as
+whether the stream is (upstream) seekable, another entry is already "close"
+to the new entry, etc.
+
+ #gboolean indicating whether entry was added
+
+
+
+
+ #GstBaseParse.
+
+
+
+ offset of entry
+
+
+
+ timestamp associated with offset
+
+
+
+ whether entry refers to keyframe
+
+
+
+ add entry disregarding sanity checks
+
+
+
+
+
+ Default implementation of "convert" vmethod in #GstBaseParse class.
+
+ %TRUE if conversion was successful.
+
+
+
+
+ #GstBaseParse.
+
+
+
+ #GstFormat describing the source format.
+
+
+
+ Source value to be converted.
+
+
+
+ #GstFormat defining the converted format.
+
+
+
+ Pointer where the conversion result will be put.
+
+
+
+
+
+ Drains the adapter until it is empty. It decreases the min_frame_size to
+match the current adapter size and calls chain method until the adapter
+is emptied or chain returns with error.
+
+
+
+
+
+ a #GstBaseParse
+
+
+
+
+
+ Collects parsed data and pushes this downstream.
+Source pad caps must be set when this is called.
+
+If @frame's out_buffer is set, that will be used as subsequent frame data.
+Otherwise, @size samples will be taken from the input and used for output,
+and the output's metadata (timestamps etc) will be taken as (optionally)
+set by the subclass on @frame's (input) buffer (which is otherwise
+ignored for any but the above purpose/information).
+
+Note that the latter buffer is invalidated by this call, whereas the
+caller retains ownership of @frame.
+
+ a #GstFlowReturn that should be escalated to caller (of caller)
+
+
+
+
+ a #GstBaseParse
+
+
+
+ a #GstBaseParseFrame
+
+
+
+ consumed input data represented by frame
+
+
+
+
+
+ Sets the parser subclass's tags and how they should be merged with any
+upstream stream tags. This will override any tags previously-set
+with gst_base_parse_merge_tags().
+
+Note that this is provided for convenience, and the subclass is
+not required to use this and can still do tag handling on its own.
+
+
+
+
+
+ a #GstBaseParse
+
+
+
+ a #GstTagList to merge, or NULL to unset
+ previously-set tags
+
+
+
+ the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE
+
+
+
+
+
+ Pushes the frame's buffer downstream, sends any pending events and
+does some timestamp and segment handling. Takes ownership of
+frame's buffer, though caller retains ownership of @frame.
+
+This must be called with sinkpad STREAM_LOCK held.
+
+ #GstFlowReturn
+
+
+
+
+ #GstBaseParse.
+
+
+
+ a #GstBaseParseFrame
+
+
+
+
+
+ Optionally sets the average bitrate detected in media (if non-zero),
+e.g. based on metadata, as it will be posted to the application.
+
+By default, announced average bitrate is estimated. The average bitrate
+is used to estimate the total duration of the stream and to estimate
+a seek position, if there's no index and the format is syncable
+(see gst_base_parse_set_syncable()).
+
+
+
+
+
+ #GstBaseParse.
+
+
+
+ average bitrate in bits/second
+
+
+
+
+
+ Sets the duration of the currently playing media. Subclass can use this
+when it is able to determine duration and/or notices a change in the media
+duration. Alternatively, if @interval is non-zero (default), then stream
+duration is determined based on estimated bitrate, and updated every @interval
+frames.
+
+
+
+
+
+ #GstBaseParse.
+
+
+
+ #GstFormat.
+
+
+
+ duration value.
+
+
+
+ how often to update the duration estimate based on bitrate, or 0.
+
+
+
+
+
+ If frames per second is configured, parser can take care of buffer duration
+and timestamping. When performing segment clipping, or seeking to a specific
+location, a corresponding decoder might need an initial @lead_in and a
+following @lead_out number of frames to ensure the desired segment is
+entirely filled upon decoding.
+
+
+
+
+
+ the #GstBaseParse to set
+
+
+
+ frames per second (numerator).
+
+
+
+ frames per second (denominator).
+
+
+
+ frames needed before a segment for subsequent decode
+
+
+
+ frames needed after a segment
+
+
+
+
+
+ Set if frames carry timing information which the subclass can (generally)
+parse and provide. In particular, intrinsic (rather than estimated) time
+can be obtained following a seek.
+
+
+
+
+
+ a #GstBaseParse
+
+
+
+ whether frames carry timing information
+
+
+
+
+
+ By default, the base class might try to infer PTS from DTS and vice
+versa. While this is generally correct for audio data, it may not
+be otherwise. Sub-classes implementing such formats should disable
+timestamp inferring.
+
+
+
+
+
+ a #GstBaseParse
+
+
+
+ %TRUE if parser should infer DTS/PTS from each other
+
+
+
+
+
+ Sets the minimum and maximum (which may likely be equal) latency introduced
+by the parsing process. If there is such a latency, which depends on the
+particular parsing of the format, it typically corresponds to 1 frame duration.
+
+
+
+
+
+ a #GstBaseParse
+
+
+
+ minimum parse latency
+
+
+
+ maximum parse latency
+
+
+
+
+
+ Subclass can use this function to tell the base class that it needs to
+give at least #min_size buffers.
+
+
+
+
+
+ #GstBaseParse.
+
+
+
+ Minimum size of the data that this base class should give to
+ subclass.
+
+
+
+
+
+ Set if the nature of the format or configuration does not allow (much)
+parsing, and the parser should operate in passthrough mode (which only
+applies when operating in push mode). That is, incoming buffers are
+pushed through unmodified, i.e. no @check_valid_frame or @parse_frame
+callbacks will be invoked, but @pre_push_frame will still be invoked,
+so subclass can perform as much or as little is appropriate for
+passthrough semantics in @pre_push_frame.
+
+
+
+
+
+ a #GstBaseParse
+
+
+
+ %TRUE if parser should run in passthrough mode
+
+
+
+
+
+ By default, the base class will guess PTS timestamps using a simple
+interpolation (previous timestamp + duration), which is incorrect for
+data streams with reordering, where PTS can go backward. Sub-classes
+implementing such formats should disable PTS interpolation.
+
+
+
+
+
+ a #GstBaseParse
+
+
+
+ %TRUE if parser should interpolate PTS timestamps
+
+
+
+
+
+ Set if frame starts can be identified. This is set by default and
+determines whether seeking based on bitrate averages
+is possible for a format/stream.
+
+
+
+
+
+ a #GstBaseParse
+
+
+
+ set if frame starts can be identified
+
+
+
+
+
+ This function should only be called from a @handle_frame implementation.
+
+#GstBaseParse creates initial timestamps for frames by using the last
+timestamp seen in the stream before the frame starts. In certain
+cases, the correct timestamps will occur in the stream after the
+start of the frame, but before the start of the actual picture data.
+This function can be used to set the timestamps based on the offset
+into the frame data that the picture starts.
+
+
+
+
+
+ a #GstBaseParse
+
+
+
+ offset into current buffer
+
+
+
+
+
+ If set to %TRUE, baseparse will unconditionally force parsing of the
+incoming data. This can be required in the rare cases where the incoming
+side-data (caps, pts, dts, ...) is not trusted by the user and wants to
+force validation and parsing of the incoming data.
+If set to %FALSE, decision of whether to parse the data or not is up to
+the implementation (standard behaviour).
+
+
+
+ the parent element.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Subclasses can override any of the available virtual methods or not, as
+needed. At minimum @handle_frame needs to be overridden.
+
+ the parent class
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Frame (context) data passed to each frame parsing virtual methods. In
+addition to providing the data to be checked for a valid frame or an already
+identified frame, it conveys additional metadata or control information
+from and to the subclass w.r.t. the particular frame in question (rather
+than global parameters). Some of these may apply to each parsing stage, others
+only to some a particular one. These parameters are effectively zeroed at start
+of each frame's processing, i.e. parsing virtual method invocation sequence.
+
+ input data to be parsed for frames.
+
+
+
+ output data.
+
+
+
+ a combination of input and output #GstBaseParseFrameFlags that
+ convey additional context to subclass or allow subclass to tune
+ subsequent #GstBaseParse actions.
+
+
+
+ media specific offset of input frame
+ Note that a converter may have a different one on the frame's buffer.
+
+
+
+ subclass can set this to indicates the metadata overhead
+ for the given frame, which is then used to enable more accurate bitrate
+ computations. If this is -1, it is assumed that this frame should be
+ skipped in bitrate calculation.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Allocates a new #GstBaseParseFrame. This function is mainly for bindings,
+elements written in C should usually allocate the frame on the stack and
+then use gst_base_parse_frame_init() to initialise it.
+
+ a newly-allocated #GstBaseParseFrame. Free with
+ gst_base_parse_frame_free() when no longer needed.
+
+
+
+
+ a #GstBuffer
+
+
+
+ the flags
+
+
+
+ number of bytes in this frame which should be counted as
+ metadata overhead, ie. not used to calculate the average bitrate.
+ Set to -1 to mark the entire frame as metadata. If in doubt, set to 0.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sets a #GstBaseParseFrame to initial state. Currently this means
+all public fields are zero-ed and a private flag is set to make
+sure gst_base_parse_frame_free() only frees the contents but not
+the actual frame. Use this function to initialise a #GstBaseParseFrame
+allocated on the stack.
+
+
+
+
+
+ #GstBaseParseFrame.
+
+
+
+
+
+
+ Flags to be used in a #GstBaseParseFrame.
+
+ no flag
+
+
+ set by baseclass if current frame
+ is passed for processing to the subclass for the first time
+ (and not set on subsequent calls with same data).
+
+
+ set to indicate this buffer should not be
+ counted as frame, e.g. if this frame is dependent on a previous one.
+ As it is not counted as a frame, bitrate increases but frame to time
+ conversions are maintained.
+
+
+ @pre_push_frame can set this to indicate
+ that regular segment clipping can still be performed (as opposed to
+ any custom one having been done).
+
+
+ indicates to @finish_frame that the
+ the frame should be dropped (and might be handled internally by subclass)
+
+
+ indicates to @finish_frame that the
+ the frame should be queued for now and processed fully later
+ when the first non-queued frame is finished
+
+
+
+
+
+ #GstBaseSink is the base class for sink elements in GStreamer, such as
+xvimagesink or filesink. It is a layer on top of #GstElement that provides a
+simplified interface to plugin writers. #GstBaseSink handles many details
+for you, for example: preroll, clock synchronization, state changes,
+activation in push or pull mode, and queries.
+
+In most cases, when writing sink elements, there is no need to implement
+class methods from #GstElement or to set functions on pads, because the
+#GstBaseSink infrastructure should be sufficient.
+
+#GstBaseSink provides support for exactly one sink pad, which should be
+named "sink". A sink implementation (subclass of #GstBaseSink) should
+install a pad template in its class_init function, like so:
+|[<!-- language="C" -->
+static void
+my_element_class_init (GstMyElementClass *klass)
+{
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ // sinktemplate should be a #GstStaticPadTemplate with direction
+ // %GST_PAD_SINK and name "sink"
+ gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Sink name",
+ "Sink",
+ "My Sink element",
+ "The author <my.sink@my.email>");
+}
+]|
+
+#GstBaseSink will handle the prerolling correctly. This means that it will
+return %GST_STATE_CHANGE_ASYNC from a state change to PAUSED until the first
+buffer arrives in this element. The base class will call the
+#GstBaseSinkClass.preroll() vmethod with this preroll buffer and will then
+commit the state change to the next asynchronously pending state.
+
+When the element is set to PLAYING, #GstBaseSink will synchronise on the
+clock using the times returned from #GstBaseSinkClass.get_times(). If this
+function returns %GST_CLOCK_TIME_NONE for the start time, no synchronisation
+will be done. Synchronisation can be disabled entirely by setting the object
+#GstBaseSink:sync property to %FALSE.
+
+After synchronisation the virtual method #GstBaseSinkClass.render() will be
+called. Subclasses should minimally implement this method.
+
+Subclasses that synchronise on the clock in the #GstBaseSinkClass.render()
+method are supported as well. These classes typically receive a buffer in
+the render method and can then potentially block on the clock while
+rendering. A typical example is an audiosink.
+These subclasses can use gst_base_sink_wait_preroll() to perform the
+blocking wait.
+
+Upon receiving the EOS event in the PLAYING state, #GstBaseSink will wait
+for the clock to reach the time indicated by the stop time of the last
+#GstBaseSinkClass.get_times() call before posting an EOS message. When the
+element receives EOS in PAUSED, preroll completes, the event is queued and an
+EOS message is posted when going to PLAYING.
+
+#GstBaseSink will internally use the %GST_EVENT_SEGMENT events to schedule
+synchronisation and clipping of buffers. Buffers that fall completely outside
+of the current segment are dropped. Buffers that fall partially in the
+segment are rendered (and prerolled). Subclasses should do any subbuffer
+clipping themselves when needed.
+
+#GstBaseSink will by default report the current playback position in
+%GST_FORMAT_TIME based on the current clock time and segment information.
+If no clock has been set on the element, the query will be forwarded
+upstream.
+
+The #GstBaseSinkClass.set_caps() function will be called when the subclass
+should configure itself to process a specific media type.
+
+The #GstBaseSinkClass.start() and #GstBaseSinkClass.stop() virtual methods
+will be called when resources should be allocated. Any
+#GstBaseSinkClass.preroll(), #GstBaseSinkClass.render() and
+#GstBaseSinkClass.set_caps() function will be called between the
+#GstBaseSinkClass.start() and #GstBaseSinkClass.stop() calls.
+
+The #GstBaseSinkClass.event() virtual method will be called when an event is
+received by #GstBaseSink. Normally this method should only be overridden by
+very specific elements (such as file sinks) which need to handle the
+newsegment event specially.
+
+The #GstBaseSinkClass.unlock() method is called when the elements should
+unblock any blocking operations they perform in the
+#GstBaseSinkClass.render() method. This is mostly useful when the
+#GstBaseSinkClass.render() method performs a blocking write on a file
+descriptor, for example.
+
+The #GstBaseSink:max-lateness property affects how the sink deals with
+buffers that arrive too late in the sink. A buffer arrives too late in the
+sink when the presentation time (as a combination of the last segment, buffer
+timestamp and element base_time) plus the duration is before the current
+time of the clock.
+If the frame is later than max-lateness, the sink will drop the buffer
+without calling the render method.
+This feature is disabled if sync is disabled, the
+#GstBaseSinkClass.get_times() method does not return a valid start time or
+max-lateness is set to -1 (the default).
+Subclasses can use gst_base_sink_set_max_lateness() to configure the
+max-lateness value.
+
+The #GstBaseSink:qos property will enable the quality-of-service features of
+the basesink which gather statistics about the real-time performance of the
+clock synchronisation. For each buffer received in the sink, statistics are
+gathered and a QOS event is sent upstream with these numbers. This
+information can then be used by upstream elements to reduce their processing
+rate, for example.
+
+The #GstBaseSink:async property can be used to instruct the sink to never
+perform an ASYNC state change. This feature is mostly usable when dealing
+with non-synchronized streams or sparse streams.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ If the @sink spawns its own thread for pulling buffers from upstream it
+should call this method after it has pulled a buffer. If the element needed
+to preroll, this function will perform the preroll and will then block
+until the element state is changed.
+
+This function should be called with the PREROLL_LOCK held.
+
+ %GST_FLOW_OK if the preroll completed and processing can
+continue. Any other return value should be returned from the render vmethod.
+
+
+
+
+ the sink
+
+
+
+ the mini object that caused the preroll
+
+
+
+
+
+ Get the number of bytes that the sink will pull when it is operating in pull
+mode.
+
+ the number of bytes @sink will pull in pull mode.
+
+
+
+
+ a #GstBaseSink
+
+
+
+
+
+ Checks if @sink is currently configured to drop buffers which are outside
+the current segment
+
+ %TRUE if the sink is configured to drop buffers outside the
+current segment.
+
+
+
+
+ the sink
+
+
+
+
+
+ Get the last sample that arrived in the sink and was used for preroll or for
+rendering. This property can be used to generate thumbnails.
+
+The #GstCaps on the sample can be used to determine the type of the buffer.
+
+Free-function: gst_sample_unref
+
+ a #GstSample. gst_sample_unref() after
+ usage. This function returns %NULL when no buffer has arrived in the
+ sink yet or when the sink is not in PAUSED or PLAYING.
+
+
+
+
+ the sink
+
+
+
+
+
+ Get the currently configured latency.
+
+ The configured latency.
+
+
+
+
+ the sink
+
+
+
+
+
+ Get the maximum amount of bits per second that the sink will render.
+
+ the maximum number of bits per second @sink will render.
+
+
+
+
+ a #GstBaseSink
+
+
+
+
+
+ Gets the max lateness value. See gst_base_sink_set_max_lateness() for
+more details.
+
+ The maximum time in nanoseconds that a buffer can be late
+before it is dropped and not rendered. A value of -1 means an
+unlimited time.
+
+
+
+
+ the sink
+
+
+
+
+
+ Get the render delay of @sink. see gst_base_sink_set_render_delay() for more
+information about the render delay.
+
+ the render delay of @sink.
+
+
+
+
+ a #GstBaseSink
+
+
+
+
+
+ Checks if @sink is currently configured to synchronize against the
+clock.
+
+ %TRUE if the sink is configured to synchronize against the clock.
+
+
+
+
+ the sink
+
+
+
+
+
+ Get the time that will be inserted between frames to control the
+maximum buffers per second.
+
+ the number of nanoseconds @sink will put between frames.
+
+
+
+
+ a #GstBaseSink
+
+
+
+
+
+ Get the synchronisation offset of @sink.
+
+ The synchronisation offset.
+
+
+
+
+ the sink
+
+
+
+
+
+ Checks if @sink is currently configured to perform asynchronous state
+changes to PAUSED.
+
+ %TRUE if the sink is configured to perform asynchronous state
+changes.
+
+
+
+
+ the sink
+
+
+
+
+
+ Checks if @sink is currently configured to store the last received sample in
+the last-sample property.
+
+ %TRUE if the sink is configured to store the last received sample.
+
+
+
+
+ the sink
+
+
+
+
+
+ Checks if @sink is currently configured to send Quality-of-Service events
+upstream.
+
+ %TRUE if the sink is configured to perform Quality-of-Service.
+
+
+
+
+ the sink
+
+
+
+
+
+ Query the sink for the latency parameters. The latency will be queried from
+the upstream elements. @live will be %TRUE if @sink is configured to
+synchronize against the clock. @upstream_live will be %TRUE if an upstream
+element is live.
+
+If both @live and @upstream_live are %TRUE, the sink will want to compensate
+for the latency introduced by the upstream elements by setting the
+@min_latency to a strictly positive value.
+
+This function is mostly used by subclasses.
+
+ %TRUE if the query succeeded.
+
+
+
+
+ the sink
+
+
+
+ if the sink is live
+
+
+
+ if an upstream element is live
+
+
+
+ the min latency of the upstream elements
+
+
+
+ the max latency of the upstream elements
+
+
+
+
+
+ Configures @sink to perform all state changes asynchronously. When async is
+disabled, the sink will immediately go to PAUSED instead of waiting for a
+preroll buffer. This feature is useful if the sink does not synchronize
+against the clock or when it is dealing with sparse streams.
+
+
+
+
+
+ the sink
+
+
+
+ the new async value.
+
+
+
+
+
+ Set the number of bytes that the sink will pull when it is operating in pull
+mode.
+
+
+
+
+
+ a #GstBaseSink
+
+
+
+ the blocksize in bytes
+
+
+
+
+
+ Configure @sink to drop buffers which are outside the current segment
+
+
+
+
+
+ the sink
+
+
+
+ drop buffers outside the segment
+
+
+
+
+
+ Configures @sink to store the last received sample in the last-sample
+property.
+
+
+
+
+
+ the sink
+
+
+
+ the new enable-last-sample value.
+
+
+
+
+
+ Set the maximum amount of bits per second that the sink will render.
+
+
+
+
+
+ a #GstBaseSink
+
+
+
+ the max_bitrate in bits per second
+
+
+
+
+
+ Sets the new max lateness value to @max_lateness. This value is
+used to decide if a buffer should be dropped or not based on the
+buffer timestamp and the current clock time. A value of -1 means
+an unlimited time.
+
+
+
+
+
+ the sink
+
+
+
+ the new max lateness value.
+
+
+
+
+
+ Configures @sink to send Quality-of-Service events upstream.
+
+
+
+
+
+ the sink
+
+
+
+ the new qos value.
+
+
+
+
+
+ Set the render delay in @sink to @delay. The render delay is the time
+between actual rendering of a buffer and its synchronisation time. Some
+devices might delay media rendering which can be compensated for with this
+function.
+
+After calling this function, this sink will report additional latency and
+other sinks will adjust their latency to delay the rendering of their media.
+
+This function is usually called by subclasses.
+
+
+
+
+
+ a #GstBaseSink
+
+
+
+ the new delay
+
+
+
+
+
+ Configures @sink to synchronize on the clock or not. When
+@sync is %FALSE, incoming samples will be played as fast as
+possible. If @sync is %TRUE, the timestamps of the incoming
+buffers will be used to schedule the exact render time of its
+contents.
+
+
+
+
+
+ the sink
+
+
+
+ the new sync value.
+
+
+
+
+
+ Set the time that will be inserted between rendered buffers. This
+can be used to control the maximum buffers per second that the sink
+will render.
+
+
+
+
+
+ a #GstBaseSink
+
+
+
+ the throttle time in nanoseconds
+
+
+
+
+
+ Adjust the synchronisation of @sink with @offset. A negative value will
+render buffers earlier than their timestamp. A positive value will delay
+rendering. This function can be used to fix playback of badly timestamped
+buffers.
+
+
+
+
+
+ the sink
+
+
+
+ the new offset
+
+
+
+
+
+ This function will wait for preroll to complete and will then block until @time
+is reached. It is usually called by subclasses that use their own internal
+synchronisation but want to let some synchronization (like EOS) be handled
+by the base class.
+
+This function should only be called with the PREROLL_LOCK held (like when
+receiving an EOS event in the ::event vmethod or when handling buffers in
+::render).
+
+The @time argument should be the running_time of when the timeout should happen
+and will be adjusted with any latency and offset configured in the sink.
+
+ #GstFlowReturn
+
+
+
+
+ the sink
+
+
+
+ the running_time to be reached
+
+
+
+ the jitter to be filled with time diff, or %NULL
+
+
+
+
+
+ This function will block until @time is reached. It is usually called by
+subclasses that use their own internal synchronisation.
+
+If @time is not valid, no synchronisation is done and %GST_CLOCK_BADTIME is
+returned. Likewise, if synchronisation is disabled in the element or there
+is no clock, no synchronisation is done and %GST_CLOCK_BADTIME is returned.
+
+This function should only be called with the PREROLL_LOCK held, like when
+receiving an EOS event in the #GstBaseSinkClass.event() vmethod or when
+receiving a buffer in
+the #GstBaseSinkClass.render() vmethod.
+
+The @time argument should be the running_time of when this method should
+return and is not adjusted with any latency or offset configured in the
+sink.
+
+ #GstClockReturn
+
+
+
+
+ the sink
+
+
+
+ the running_time to be reached
+
+
+
+ the jitter to be filled with time diff, or %NULL
+
+
+
+
+
+ If the #GstBaseSinkClass.render() method performs its own synchronisation
+against the clock it must unblock when going from PLAYING to the PAUSED state
+and call this method before continuing to render the remaining data.
+
+If the #GstBaseSinkClass.render() method can block on something else than
+the clock, it must also be ready to unblock immediately on
+the #GstBaseSinkClass.unlock() method and cause the
+#GstBaseSinkClass.render() method to immediately call this function.
+In this case, the subclass must be prepared to continue rendering where it
+left off if this function returns %GST_FLOW_OK.
+
+This function will block until a state change to PLAYING happens (in which
+case this function returns %GST_FLOW_OK) or the processing must be stopped due
+to a state change to READY or a FLUSH event (in which case this function
+returns %GST_FLOW_FLUSHING).
+
+This function should only be called with the PREROLL_LOCK held, like in the
+render function.
+
+ %GST_FLOW_OK if the preroll completed and processing can
+continue. Any other return value should be returned from the render vmethod.
+
+
+
+
+ the sink
+
+
+
+
+
+ If set to %TRUE, the basesink will perform asynchronous state changes.
+When set to %FALSE, the sink will not signal the parent when it prerolls.
+Use this option when dealing with sparse streams or when synchronisation is
+not required.
+
+
+
+ The amount of bytes to pull when operating in pull mode.
+
+
+
+ Enable the last-sample property. If %FALSE, basesink doesn't keep a
+reference to the last buffer arrived and the last-sample property is always
+set to %NULL. This can be useful if you need buffers to be released as soon
+as possible, eg. if you're using a buffer pool.
+
+
+
+ The last buffer that arrived in the sink and was used for preroll or for
+rendering. This property can be used to generate thumbnails. This property
+can be %NULL when the sink has not yet received a buffer.
+
+
+
+ Control the maximum amount of bits that will be rendered per second.
+Setting this property to a value bigger than 0 will make the sink delay
+rendering of the buffers when it would exceed to max-bitrate.
+
+
+
+
+
+
+
+
+
+ The additional delay between synchronisation and actual rendering of the
+media. This property will add additional latency to the device in order to
+make other sinks compensate for the delay.
+
+
+
+
+
+
+ The time to insert between buffers. This property can be used to control
+the maximum amount of buffers per second to render. Setting this property
+to a value bigger than 0 will make the sink create THROTTLE QoS events.
+
+
+
+ Controls the final synchronisation, a negative value will render the buffer
+earlier while a positive value delays playback. This property can be
+used to fix synchronisation in bad files.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Subclasses can override any of the available virtual methods or not, as
+needed. At the minimum, the @render method should be overridden to
+output/present buffers.
+
+ Element parent class
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This is a generic base class for source elements. The following
+types of sources are supported:
+
+ * random access sources like files
+ * seekable sources
+ * live sources
+
+The source can be configured to operate in any #GstFormat with the
+gst_base_src_set_format() method. The currently set format determines
+the format of the internal #GstSegment and any %GST_EVENT_SEGMENT
+events. The default format for #GstBaseSrc is %GST_FORMAT_BYTES.
+
+#GstBaseSrc always supports push mode scheduling. If the following
+conditions are met, it also supports pull mode scheduling:
+
+ * The format is set to %GST_FORMAT_BYTES (default).
+ * #GstBaseSrcClass.is_seekable() returns %TRUE.
+
+If all the conditions are met for operating in pull mode, #GstBaseSrc is
+automatically seekable in push mode as well. The following conditions must
+be met to make the element seekable in push mode when the format is not
+%GST_FORMAT_BYTES:
+
+* #GstBaseSrcClass.is_seekable() returns %TRUE.
+* #GstBaseSrcClass.query() can convert all supported seek formats to the
+ internal format as set with gst_base_src_set_format().
+* #GstBaseSrcClass.do_seek() is implemented, performs the seek and returns
+ %TRUE.
+
+When the element does not meet the requirements to operate in pull mode, the
+offset and length in the #GstBaseSrcClass.create() method should be ignored.
+It is recommended to subclass #GstPushSrc instead, in this situation. If the
+element can operate in pull mode but only with specific offsets and
+lengths, it is allowed to generate an error when the wrong values are passed
+to the #GstBaseSrcClass.create() function.
+
+#GstBaseSrc has support for live sources. Live sources are sources that when
+paused discard data, such as audio or video capture devices. A typical live
+source also produces data at a fixed rate and thus provides a clock to publish
+this rate.
+Use gst_base_src_set_live() to activate the live source mode.
+
+A live source does not produce data in the PAUSED state. This means that the
+#GstBaseSrcClass.create() method will not be called in PAUSED but only in
+PLAYING. To signal the pipeline that the element will not produce data, the
+return value from the READY to PAUSED state will be
+%GST_STATE_CHANGE_NO_PREROLL.
+
+A typical live source will timestamp the buffers it creates with the
+current running time of the pipeline. This is one reason why a live source
+can only produce data in the PLAYING state, when the clock is actually
+distributed and running.
+
+Live sources that synchronize and block on the clock (an audio source, for
+example) can use gst_base_src_wait_playing() when the
+#GstBaseSrcClass.create() function was interrupted by a state change to
+PAUSED.
+
+The #GstBaseSrcClass.get_times() method can be used to implement pseudo-live
+sources. It only makes sense to implement the #GstBaseSrcClass.get_times()
+function if the source is a live source. The #GstBaseSrcClass.get_times()
+function should return timestamps starting from 0, as if it were a non-live
+source. The base class will make sure that the timestamps are transformed
+into the current running_time. The base source will then wait for the
+calculated running_time before pushing out the buffer.
+
+For live sources, the base class will by default report a latency of 0.
+For pseudo live sources, the base class will by default measure the difference
+between the first buffer timestamp and the start time of get_times and will
+report this value as the latency.
+Subclasses should override the query function when this behaviour is not
+acceptable.
+
+There is only support in #GstBaseSrc for exactly one source pad, which
+should be named "src". A source implementation (subclass of #GstBaseSrc)
+should install a pad template in its class_init function, like so:
+|[<!-- language="C" -->
+static void
+my_element_class_init (GstMyElementClass *klass)
+{
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ // srctemplate should be a #GstStaticPadTemplate with direction
+ // %GST_PAD_SRC and name "src"
+ gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Source name",
+ "Source",
+ "My Source element",
+ "The author <my.sink@my.email>");
+}
+]|
+
+## Controlled shutdown of live sources in applications
+
+Applications that record from a live source may want to stop recording
+in a controlled way, so that the recording is stopped, but the data
+already in the pipeline is processed to the end (remember that many live
+sources would go on recording forever otherwise). For that to happen the
+application needs to make the source stop recording and send an EOS
+event down the pipeline. The application would then wait for an
+EOS message posted on the pipeline's bus to know when all data has
+been processed and the pipeline can safely be stopped.
+
+An application may send an EOS event to a source element to make it
+perform the EOS logic (send EOS event downstream or post a
+%GST_MESSAGE_SEGMENT_DONE on the bus). This can typically be done
+with the gst_element_send_event() function on the element or its parent bin.
+
+After the EOS has been sent to the element, the application should wait for
+an EOS message to be posted on the pipeline's bus. Once this EOS message is
+received, it may safely shut down the entire pipeline.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Set new caps on the basesrc source pad.
+
+ %TRUE if the caps could be set
+
+
+
+
+ a #GstBaseSrc
+
+
+
+ a #GstCaps
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Lets #GstBaseSrc sub-classes to know the memory @allocator
+used by the base class and its @params.
+
+Unref the @allocator after usage.
+
+
+
+
+
+ a #GstBaseSrc
+
+
+
+ the #GstAllocator
+used
+
+
+
+ the
+#GstAllocationParams of @allocator
+
+
+
+
+
+ Get the number of bytes that @src will push out with each buffer.
+
+ the number of bytes pushed with each buffer.
+
+
+
+
+ the source
+
+
+
+
+
+
+ the instance of the #GstBufferPool used
+by the src; unref it after usage.
+
+
+
+
+ a #GstBaseSrc
+
+
+
+
+
+ Query if @src timestamps outgoing buffers based on the current running_time.
+
+ %TRUE if the base class will automatically timestamp outgoing buffers.
+
+
+
+
+ the source
+
+
+
+
+
+ Get the current async behaviour of @src. See also gst_base_src_set_async().
+
+ %TRUE if @src is operating in async mode.
+
+
+
+
+ base source instance
+
+
+
+
+
+ Check if an element is in live mode.
+
+ %TRUE if element is in live mode.
+
+
+
+
+ base source instance
+
+
+
+
+
+ Prepare a new seamless segment for emission downstream. This function must
+only be called by derived sub-classes, and only from the create() function,
+as the stream-lock needs to be held.
+
+The format for the new segment will be the current format of the source, as
+configured with gst_base_src_set_format()
+
+ %TRUE if preparation of the seamless segment succeeded.
+
+
+
+
+ The source
+
+
+
+ The new start value for the segment
+
+
+
+ Stop value for the new segment
+
+
+
+ The new time value for the start of the new segment
+
+
+
+
+
+ Query the source for the latency parameters. @live will be %TRUE when @src is
+configured as a live source. @min_latency and @max_latency will be set
+to the difference between the running time and the timestamp of the first
+buffer.
+
+This function is mostly used by subclasses.
+
+ %TRUE if the query succeeded.
+
+
+
+
+ the source
+
+
+
+ if the source is live
+
+
+
+ the min latency of the source
+
+
+
+ the max latency of the source
+
+
+
+
+
+ Configure async behaviour in @src, no state change will block. The open,
+close, start, stop, play and pause virtual methods will be executed in a
+different thread and are thus allowed to perform blocking operations. Any
+blocking operation should be unblocked with the unlock vmethod.
+
+
+
+
+
+ base source instance
+
+
+
+ new async mode
+
+
+
+
+
+ If @automatic_eos is %TRUE, @src will automatically go EOS if a buffer
+after the total size is returned. By default this is %TRUE but sources
+that can't return an authoritative size and only know that they're EOS
+when trying to read more should set this to %FALSE.
+
+
+
+
+
+ base source instance
+
+
+
+ automatic eos
+
+
+
+
+
+ Set the number of bytes that @src will push out with each buffer. When
+@blocksize is set to -1, a default length will be used.
+
+
+
+
+
+ the source
+
+
+
+ the new blocksize in bytes
+
+
+
+
+
+ Set new caps on the basesrc source pad.
+
+ %TRUE if the caps could be set
+
+
+
+
+ a #GstBaseSrc
+
+
+
+ a #GstCaps
+
+
+
+
+
+ Configure @src to automatically timestamp outgoing buffers based on the
+current running_time of the pipeline. This property is mostly useful for live
+sources.
+
+
+
+
+
+ the source
+
+
+
+ enable or disable timestamping
+
+
+
+
+
+ If not @dynamic, size is only updated when needed, such as when trying to
+read past current tracked size. Otherwise, size is checked for upon each
+read.
+
+
+
+
+
+ base source instance
+
+
+
+ new dynamic size mode
+
+
+
+
+
+ Sets the default format of the source. This will be the format used
+for sending SEGMENT events and for performing seeks.
+
+If a format of GST_FORMAT_BYTES is set, the element will be able to
+operate in pull mode if the #GstBaseSrcClass.is_seekable() returns %TRUE.
+
+This function must only be called in states < %GST_STATE_PAUSED.
+
+
+
+
+
+ base source instance
+
+
+
+ the format to use
+
+
+
+
+
+ If the element listens to a live source, @live should
+be set to %TRUE.
+
+A live source will not produce data in the PAUSED state and
+will therefore not be able to participate in the PREROLL phase
+of a pipeline. To signal this fact to the application and the
+pipeline, the state change return value of the live source will
+be GST_STATE_CHANGE_NO_PREROLL.
+
+
+
+
+
+ base source instance
+
+
+
+ new live-mode
+
+
+
+
+
+ Complete an asynchronous start operation. When the subclass overrides the
+start method, it should call gst_base_src_start_complete() when the start
+operation completes either from the same thread or from an asynchronous
+helper thread.
+
+
+
+
+
+ base source instance
+
+
+
+ a #GstFlowReturn
+
+
+
+
+
+ Wait until the start operation completes.
+
+ a #GstFlowReturn.
+
+
+
+
+ base source instance
+
+
+
+
+
+ If the #GstBaseSrcClass.create() method performs its own synchronisation
+against the clock it must unblock when going from PLAYING to the PAUSED state
+and call this method before continuing to produce the remaining data.
+
+This function will block until a state change to PLAYING happens (in which
+case this function returns %GST_FLOW_OK) or the processing must be stopped due
+to a state change to READY or a FLUSH event (in which case this function
+returns %GST_FLOW_FLUSHING).
+
+ %GST_FLOW_OK if @src is PLAYING and processing can
+continue. Any other return value should be returned from the create vmethod.
+
+
+
+
+ the src
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Subclasses can override any of the available virtual methods or not, as
+needed. At the minimum, the @create method should be overridden to produce
+buffers.
+
+ Element parent class
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ %TRUE if the caps could be set
+
+
+
+
+ a #GstBaseSrc
+
+
+
+ a #GstCaps
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ The #GstElement flags that a basesrc element may have.
+
+ has source is starting
+
+
+ has source been started
+
+
+ offset to define more flags
+
+
+
+
+
+ This base class is for filter elements that process data. Elements
+that are suitable for implementation using #GstBaseTransform are ones
+where the size and caps of the output is known entirely from the input
+caps and buffer sizes. These include elements that directly transform
+one buffer into another, modify the contents of a buffer in-place, as
+well as elements that collate multiple input buffers into one output buffer,
+or that expand one input buffer into multiple output buffers. See below
+for more concrete use cases.
+
+It provides for:
+
+* one sinkpad and one srcpad
+* Possible formats on sink and source pad implemented
+ with custom transform_caps function. By default uses
+ same format on sink and source.
+
+* Handles state changes
+* Does flushing
+* Push mode
+* Pull mode if the sub-class transform can operate on arbitrary data
+
+# Use Cases
+
+## Passthrough mode
+
+ * Element has no interest in modifying the buffer. It may want to inspect it,
+ in which case the element should have a transform_ip function. If there
+ is no transform_ip function in passthrough mode, the buffer is pushed
+ intact.
+
+ * The #GstBaseTransformClass.passthrough_on_same_caps variable
+ will automatically set/unset passthrough based on whether the
+ element negotiates the same caps on both pads.
+
+ * #GstBaseTransformClass.passthrough_on_same_caps on an element that
+ doesn't implement a transform_caps function is useful for elements that
+ only inspect data (such as level)
+
+ * Example elements
+
+ * Level
+ * Videoscale, audioconvert, videoconvert, audioresample in certain modes.
+
+## Modifications in-place - input buffer and output buffer are the same thing.
+
+* The element must implement a transform_ip function.
+* Output buffer size must <= input buffer size
+* If the always_in_place flag is set, non-writable buffers will be copied
+ and passed to the transform_ip function, otherwise a new buffer will be
+ created and the transform function called.
+
+* Incoming writable buffers will be passed to the transform_ip function
+ immediately.
+* only implementing transform_ip and not transform implies always_in_place = %TRUE
+
+ * Example elements:
+ * Volume
+ * Audioconvert in certain modes (signed/unsigned conversion)
+ * videoconvert in certain modes (endianness swapping)
+
+## Modifications only to the caps/metadata of a buffer
+
+* The element does not require writable data, but non-writable buffers
+ should be subbuffered so that the meta-information can be replaced.
+
+* Elements wishing to operate in this mode should replace the
+ prepare_output_buffer method to create subbuffers of the input buffer
+ and set always_in_place to %TRUE
+
+* Example elements
+ * Capsfilter when setting caps on outgoing buffers that have
+ none.
+ * identity when it is going to re-timestamp buffers by
+ datarate.
+
+## Normal mode
+ * always_in_place flag is not set, or there is no transform_ip function
+ * Element will receive an input buffer and output buffer to operate on.
+ * Output buffer is allocated by calling the prepare_output_buffer function.
+ * Example elements:
+ * Videoscale, videoconvert, audioconvert when doing
+ scaling/conversions
+
+## Special output buffer allocations
+ * Elements which need to do special allocation of their output buffers
+ beyond allocating output buffers via the negotiated allocator or
+ buffer pool should implement the prepare_output_buffer method.
+
+ * Example elements:
+ * efence
+
+# Sub-class settable flags on GstBaseTransform
+
+* passthrough
+
+ * Implies that in the current configuration, the sub-class is not interested in modifying the buffers.
+ * Elements which are always in passthrough mode whenever the same caps has been negotiated on both pads can set the class variable passthrough_on_same_caps to have this behaviour automatically.
+
+* always_in_place
+ * Determines whether a non-writable buffer will be copied before passing
+ to the transform_ip function.
+
+ * Implied %TRUE if no transform function is implemented.
+ * Implied %FALSE if ONLY transform function is implemented.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Lets #GstBaseTransform sub-classes to know the memory @allocator
+used by the base class and its @params.
+
+Unref the @allocator after use it.
+
+
+
+
+
+ a #GstBaseTransform
+
+
+
+ the #GstAllocator
+used
+
+
+
+ the
+#GstAllocationParams of @allocator
+
+
+
+
+
+
+ the instance of the #GstBufferPool used
+by @trans; free it after use it
+
+
+
+
+ a #GstBaseTransform
+
+
+
+
+
+ See if @trans is configured as a in_place transform.
+
+ %TRUE is the transform is configured in in_place mode.
+
+MT safe.
+
+
+
+
+ the #GstBaseTransform to query
+
+
+
+
+
+ See if @trans is configured as a passthrough transform.
+
+ %TRUE is the transform is configured in passthrough mode.
+
+MT safe.
+
+
+
+
+ the #GstBaseTransform to query
+
+
+
+
+
+ Queries if the transform will handle QoS.
+
+ %TRUE if QoS is enabled.
+
+MT safe.
+
+
+
+
+ a #GstBaseTransform
+
+
+
+
+
+ Instructs @trans to request renegotiation upstream. This function is
+typically called after properties on the transform were set that
+influence the input format.
+
+
+
+
+
+ a #GstBaseTransform
+
+
+
+
+
+ Instructs @trans to renegotiate a new downstream transform on the next
+buffer. This function is typically called after properties on the transform
+were set that influence the output format.
+
+
+
+
+
+ a #GstBaseTransform
+
+
+
+
+
+ If @gap_aware is %FALSE (the default), output buffers will have the
+%GST_BUFFER_FLAG_GAP flag unset.
+
+If set to %TRUE, the element must handle output buffers with this flag set
+correctly, i.e. it can assume that the buffer contains neutral data but must
+unset the flag if the output is no neutral data.
+
+MT safe.
+
+
+
+
+
+ a #GstBaseTransform
+
+
+
+ New state
+
+
+
+
+
+ Determines whether a non-writable buffer will be copied before passing
+to the transform_ip function.
+
+ * Always %TRUE if no transform function is implemented.
+ * Always %FALSE if ONLY transform function is implemented.
+
+MT safe.
+
+
+
+
+
+ the #GstBaseTransform to modify
+
+
+
+ Boolean value indicating that we would like to operate
+on in_place buffers.
+
+
+
+
+
+ Set passthrough mode for this filter by default. This is mostly
+useful for filters that do not care about negotiation.
+
+Always %TRUE for filters which don't implement either a transform
+or transform_ip method.
+
+MT safe.
+
+
+
+
+
+ the #GstBaseTransform to set
+
+
+
+ boolean indicating passthrough mode.
+
+
+
+
+
+ If @prefer_passthrough is %TRUE (the default), @trans will check and
+prefer passthrough caps from the list of caps returned by the
+transform_caps vmethod.
+
+If set to %FALSE, the element must order the caps returned from the
+transform_caps function in such a way that the preferred format is
+first in the list. This can be interesting for transforms that can do
+passthrough transforms but prefer to do something else, like a
+capsfilter.
+
+MT safe.
+
+
+
+
+
+ a #GstBaseTransform
+
+
+
+ New state
+
+
+
+
+
+ Enable or disable QoS handling in the transform.
+
+MT safe.
+
+
+
+
+
+ a #GstBaseTransform
+
+
+
+ new state
+
+
+
+
+
+ Set the QoS parameters in the transform. This function is called internally
+when a QOS event is received but subclasses can provide custom information
+when needed.
+
+MT safe.
+
+
+
+
+
+ a #GstBaseTransform
+
+
+
+ the proportion
+
+
+
+ the diff against the clock
+
+
+
+ the timestamp of the buffer generating the QoS expressed in
+running_time.
+
+
+
+
+
+ Updates the srcpad caps and send the caps downstream. This function
+can be used by subclasses when they have already negotiated their caps
+but found a change in them (or computed new informations). This way,
+they can notify downstream about that change without loosing any
+buffer.
+
+ %TRUE if the caps could be send downstream %FALSE otherwise
+
+
+
+
+ a #GstBaseTransform
+
+
+
+ An updated version of the srcpad caps to be pushed
+downstream
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Subclasses can override any of the available virtual methods or not, as
+needed. At minimum either @transform or @transform_ip need to be overridden.
+If the element can overwrite the input data with the results (data is of the
+same type and quantity) it should provide @transform_ip.
+
+ Element parent class
+
+
+
+ If set to %TRUE, passthrough mode will be
+ automatically enabled if the caps are the same.
+ Set to %FALSE by default.
+
+
+
+ If set to %TRUE, @transform_ip will be called in
+ passthrough mode. The passed buffer might not be
+ writable. When %FALSE, neither @transform nor
+ @transform_ip will be called in passthrough mode.
+ Set to %TRUE by default.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #GstBitReader provides a bit reader that can read any number of bits
+from a memory buffer. It provides functions for reading any number of bits
+into 8, 16, 32 and 64 bit variables.
+
+ Data from which the bit reader will
+ read
+
+
+
+
+
+ Size of @data in bytes
+
+
+
+ Current byte position
+
+
+
+ Bit position in the current byte
+
+
+
+
+
+
+
+
+ Frees a #GstBitReader instance, which was previously allocated by
+gst_bit_reader_new().
+
+
+
+
+
+ a #GstBitReader instance
+
+
+
+
+
+ Read @nbits bits into @val and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+ Pointer to a #guint16 to store the result
+
+
+
+ number of bits to read
+
+
+
+
+
+ Read @nbits bits into @val and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+ Pointer to a #guint32 to store the result
+
+
+
+ number of bits to read
+
+
+
+
+
+ Read @nbits bits into @val and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+ Pointer to a #guint64 to store the result
+
+
+
+ number of bits to read
+
+
+
+
+
+ Read @nbits bits into @val and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+ Pointer to a #guint8 to store the result
+
+
+
+ number of bits to read
+
+
+
+
+
+ Returns the current position of a #GstBitReader instance in bits.
+
+ The current position of @reader in bits.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+
+
+ Returns the remaining number of bits of a #GstBitReader instance.
+
+ The remaining number of bits of @reader instance.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+
+
+ Returns the total number of bits of a #GstBitReader instance.
+
+ The total number of bits of @reader instance.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+
+
+ Initializes a #GstBitReader instance to read from @data. This function
+can be called on already initialized instances.
+
+
+
+
+
+ a #GstBitReader instance
+
+
+
+ data from which the bit reader should read
+
+
+
+
+
+ Size of @data in bytes
+
+
+
+
+
+ Read @nbits bits into @val but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+ Pointer to a #guint16 to store the result
+
+
+
+ number of bits to read
+
+
+
+
+
+ Read @nbits bits into @val but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+ Pointer to a #guint32 to store the result
+
+
+
+ number of bits to read
+
+
+
+
+
+ Read @nbits bits into @val but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+ Pointer to a #guint64 to store the result
+
+
+
+ number of bits to read
+
+
+
+
+
+ Read @nbits bits into @val but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+ Pointer to a #guint8 to store the result
+
+
+
+ number of bits to read
+
+
+
+
+
+ Sets the new position of a #GstBitReader instance to @pos in bits.
+
+ %TRUE if the position could be set successfully, %FALSE
+otherwise.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+ The new position in bits
+
+
+
+
+
+ Skips @nbits bits of the #GstBitReader instance.
+
+ %TRUE if @nbits bits could be skipped, %FALSE otherwise.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+ the number of bits to skip
+
+
+
+
+
+ Skips until the next byte.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstBitReader instance
+
+
+
+
+
+ Create a new #GstBitReader instance, which will read from @data.
+
+Free-function: gst_bit_reader_free
+
+ a new #GstBitReader instance
+
+
+
+
+ Data from which the #GstBitReader
+ should read
+
+
+
+
+
+ Size of @data in bytes
+
+
+
+
+
+
+ #GstByteReader provides a byte reader that can read different integer and
+floating point types from a memory buffer. It provides functions for reading
+signed/unsigned, little/big endian integers of 8, 16, 24, 32 and 64 bits
+and functions for reading little/big endian floating points numbers of
+32 and 64 bits. It also provides functions to read NUL-terminated strings
+in various character encodings.
+
+ Data from which the bit reader will
+ read
+
+
+
+
+
+ Size of @data in bytes
+
+
+
+ Current byte position
+
+
+
+
+
+
+
+
+ Free-function: g_free
+
+Returns a newly-allocated copy of the current data
+position if at least @size bytes are left and
+updates the current position. Free with g_free() when no longer needed.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Size in bytes
+
+
+
+ address of a
+ #guint8 pointer variable in which to store the result
+
+
+
+
+
+
+
+ Free-function: g_free
+
+Returns a newly-allocated copy of the current data position if there is
+a NUL-terminated UTF-16 string in the data (this could be an empty string
+as well), and advances the current position.
+
+No input checking for valid UTF-16 is done. This function is endianness
+agnostic - you should not assume the UTF-16 characters are in host
+endianness.
+
+This function will fail if no NUL-terminator was found in in the data.
+
+Note: there is no peek or get variant of this function to ensure correct
+byte alignment of the UTF-16 string.
+
+ %TRUE if a string could be read, %FALSE otherwise. The
+ string put into @str must be freed with g_free() when no longer needed.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ address of a
+ #guint16 pointer variable in which to store the result
+
+
+
+
+
+
+
+ Free-function: g_free
+
+Returns a newly-allocated copy of the current data position if there is
+a NUL-terminated UTF-32 string in the data (this could be an empty string
+as well), and advances the current position.
+
+No input checking for valid UTF-32 is done. This function is endianness
+agnostic - you should not assume the UTF-32 characters are in host
+endianness.
+
+This function will fail if no NUL-terminator was found in in the data.
+
+Note: there is no peek or get variant of this function to ensure correct
+byte alignment of the UTF-32 string.
+
+ %TRUE if a string could be read, %FALSE otherwise. The
+ string put into @str must be freed with g_free() when no longer needed.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ address of a
+ #guint32 pointer variable in which to store the result
+
+
+
+
+
+
+
+ Free-function: g_free
+
+FIXME:Reads (copies) a NUL-terminated string in the #GstByteReader instance,
+advancing the current position to the byte after the string. This will work
+for any NUL-terminated string with a character width of 8 bits, so ASCII,
+UTF-8, ISO-8859-N etc. No input checking for valid UTF-8 is done.
+
+This function will fail if no NUL-terminator was found in in the data.
+
+ %TRUE if a string could be read into @str, %FALSE otherwise. The
+ string put into @str must be freed with g_free() when no longer needed.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ address of a
+ #gchar pointer variable in which to store the result
+
+
+
+
+
+
+
+ Frees a #GstByteReader instance, which was previously allocated by
+gst_byte_reader_new().
+
+
+
+
+
+ a #GstByteReader instance
+
+
+
+
+
+ Returns a constant pointer to the current data
+position if at least @size bytes are left and
+updates the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Size in bytes
+
+
+
+ address of a
+ #guint8 pointer variable in which to store the result
+
+
+
+
+
+
+
+ Read a 32 bit big endian floating point value into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gfloat to store the result
+
+
+
+
+
+ Read a 32 bit little endian floating point value into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gfloat to store the result
+
+
+
+
+
+ Read a 64 bit big endian floating point value into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gdouble to store the result
+
+
+
+
+
+ Read a 64 bit little endian floating point value into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gdouble to store the result
+
+
+
+
+
+ Read a signed 16 bit big endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint16 to store the result
+
+
+
+
+
+ Read a signed 16 bit little endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint16 to store the result
+
+
+
+
+
+ Read a signed 24 bit big endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint32 to store the result
+
+
+
+
+
+ Read a signed 24 bit little endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint32 to store the result
+
+
+
+
+
+ Read a signed 32 bit big endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint32 to store the result
+
+
+
+
+
+ Read a signed 32 bit little endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint32 to store the result
+
+
+
+
+
+ Read a signed 64 bit big endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint64 to store the result
+
+
+
+
+
+ Read a signed 64 bit little endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint64 to store the result
+
+
+
+
+
+ Read a signed 8 bit integer into @val and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint8 to store the result
+
+
+
+
+
+ Returns the current position of a #GstByteReader instance in bytes.
+
+ The current position of @reader in bytes.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+
+
+ Returns the remaining number of bytes of a #GstByteReader instance.
+
+ The remaining number of bytes of @reader instance.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+
+
+ Returns the total number of bytes of a #GstByteReader instance.
+
+ The total number of bytes of @reader instance.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+
+
+ Returns a constant pointer to the current data position if there is
+a NUL-terminated string in the data (this could be just a NUL terminator),
+advancing the current position to the byte after the string. This will work
+for any NUL-terminated string with a character width of 8 bits, so ASCII,
+UTF-8, ISO-8859-N etc.
+
+No input checking for valid UTF-8 is done.
+
+This function will fail if no NUL-terminator was found in in the data.
+
+ %TRUE if a string could be found, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ address of a
+ #gchar pointer variable in which to store the result
+
+
+
+
+
+
+
+ Initializes a #GstByteReader sub-reader instance to contain @size bytes of
+data from the current position of @reader. This is useful to read chunked
+formats and make sure that one doesn't read beyond the size of the sub-chunk.
+
+Unlike gst_byte_reader_peek_sub_reader(), this function also modifies the
+position of @reader and moves it forward by @size bytes.
+
+ FALSE on error or if @reader does not contain @size more bytes from
+ the current position, and otherwise TRUE
+
+
+
+
+ an existing and initialized #GstByteReader instance
+
+
+
+ a #GstByteReader instance to initialize as sub-reader
+
+
+
+ size of @sub_reader in bytes
+
+
+
+
+
+ Read an unsigned 16 bit big endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint16 to store the result
+
+
+
+
+
+ Read an unsigned 16 bit little endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint16 to store the result
+
+
+
+
+
+ Read an unsigned 24 bit big endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint32 to store the result
+
+
+
+
+
+ Read an unsigned 24 bit little endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint32 to store the result
+
+
+
+
+
+ Read an unsigned 32 bit big endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint32 to store the result
+
+
+
+
+
+ Read an unsigned 32 bit little endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint32 to store the result
+
+
+
+
+
+ Read an unsigned 64 bit big endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint64 to store the result
+
+
+
+
+
+ Read an unsigned 64 bit little endian integer into @val
+and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint64 to store the result
+
+
+
+
+
+ Read an unsigned 8 bit integer into @val and update the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint8 to store the result
+
+
+
+
+
+ Initializes a #GstByteReader instance to read from @data. This function
+can be called on already initialized instances.
+
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ data from which
+ the #GstByteReader should read
+
+
+
+
+
+ Size of @data in bytes
+
+
+
+
+
+ Scan for pattern @pattern with applied mask @mask in the byte reader data,
+starting from offset @offset relative to the current position.
+
+The bytes in @pattern and @mask are interpreted left-to-right, regardless
+of endianness. All four bytes of the pattern must be present in the
+byte reader data for it to match, even if the first or last bytes are masked
+out.
+
+It is an error to call this function without making sure that there is
+enough data (offset+size bytes) in the byte reader.
+
+ offset of the first match, or -1 if no match was found.
+
+Example:
+|[
+// Assume the reader contains 0x00 0x01 0x02 ... 0xfe 0xff
+
+gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0x00010203, 0, 256);
+// -> returns 0
+gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0x00010203, 1, 255);
+// -> returns -1
+gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0x01020304, 1, 255);
+// -> returns 1
+gst_byte_reader_masked_scan_uint32 (reader, 0xffff, 0x0001, 0, 256);
+// -> returns -1
+gst_byte_reader_masked_scan_uint32 (reader, 0xffff, 0x0203, 0, 256);
+// -> returns 0
+gst_byte_reader_masked_scan_uint32 (reader, 0xffff0000, 0x02030000, 0, 256);
+// -> returns 2
+gst_byte_reader_masked_scan_uint32 (reader, 0xffff0000, 0x02030000, 0, 4);
+// -> returns -1
+]|
+
+
+
+
+ a #GstByteReader
+
+
+
+ mask to apply to data before matching against @pattern
+
+
+
+ pattern to match (after mask is applied)
+
+
+
+ offset from which to start scanning, relative to the current
+ position
+
+
+
+ number of bytes to scan from offset
+
+
+
+
+
+ Scan for pattern @pattern with applied mask @mask in the byte reader data,
+starting from offset @offset relative to the current position.
+
+The bytes in @pattern and @mask are interpreted left-to-right, regardless
+of endianness. All four bytes of the pattern must be present in the
+byte reader data for it to match, even if the first or last bytes are masked
+out.
+
+It is an error to call this function without making sure that there is
+enough data (offset+size bytes) in the byte reader.
+
+ offset of the first match, or -1 if no match was found.
+
+
+
+
+ a #GstByteReader
+
+
+
+ mask to apply to data before matching against @pattern
+
+
+
+ pattern to match (after mask is applied)
+
+
+
+ offset from which to start scanning, relative to the current
+ position
+
+
+
+ number of bytes to scan from offset
+
+
+
+ pointer to uint32 to return matching data
+
+
+
+
+
+ Returns a constant pointer to the current data
+position if at least @size bytes are left and
+keeps the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Size in bytes
+
+
+
+ address of a
+ #guint8 pointer variable in which to store the result
+
+
+
+
+
+
+
+ Read a 32 bit big endian floating point value into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gfloat to store the result
+
+
+
+
+
+ Read a 32 bit little endian floating point value into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gfloat to store the result
+
+
+
+
+
+ Read a 64 bit big endian floating point value into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gdouble to store the result
+
+
+
+
+
+ Read a 64 bit little endian floating point value into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gdouble to store the result
+
+
+
+
+
+ Read a signed 16 bit big endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint16 to store the result
+
+
+
+
+
+ Read a signed 16 bit little endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint16 to store the result
+
+
+
+
+
+ Read a signed 24 bit big endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint32 to store the result
+
+
+
+
+
+ Read a signed 24 bit little endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint32 to store the result
+
+
+
+
+
+ Read a signed 32 bit big endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint32 to store the result
+
+
+
+
+
+ Read a signed 32 bit little endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint32 to store the result
+
+
+
+
+
+ Read a signed 64 bit big endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint64 to store the result
+
+
+
+
+
+ Read a signed 64 bit little endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint64 to store the result
+
+
+
+
+
+ Read a signed 8 bit integer into @val but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #gint8 to store the result
+
+
+
+
+
+ Returns a constant pointer to the current data position if there is
+a NUL-terminated string in the data (this could be just a NUL terminator).
+The current position will be maintained. This will work for any
+NUL-terminated string with a character width of 8 bits, so ASCII,
+UTF-8, ISO-8859-N etc.
+
+No input checking for valid UTF-8 is done.
+
+This function will fail if no NUL-terminator was found in in the data.
+
+ %TRUE if a string could be skipped, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ address of a
+ #gchar pointer variable in which to store the result
+
+
+
+
+
+
+
+ Initializes a #GstByteReader sub-reader instance to contain @size bytes of
+data from the current position of @reader. This is useful to read chunked
+formats and make sure that one doesn't read beyond the size of the sub-chunk.
+
+Unlike gst_byte_reader_get_sub_reader(), this function does not modify the
+current position of @reader.
+
+ FALSE on error or if @reader does not contain @size more bytes from
+ the current position, and otherwise TRUE
+
+
+
+
+ an existing and initialized #GstByteReader instance
+
+
+
+ a #GstByteReader instance to initialize as sub-reader
+
+
+
+ size of @sub_reader in bytes
+
+
+
+
+
+ Read an unsigned 16 bit big endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint16 to store the result
+
+
+
+
+
+ Read an unsigned 16 bit little endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint16 to store the result
+
+
+
+
+
+ Read an unsigned 24 bit big endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint32 to store the result
+
+
+
+
+
+ Read an unsigned 24 bit little endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint32 to store the result
+
+
+
+
+
+ Read an unsigned 32 bit big endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint32 to store the result
+
+
+
+
+
+ Read an unsigned 32 bit little endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint32 to store the result
+
+
+
+
+
+ Read an unsigned 64 bit big endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint64 to store the result
+
+
+
+
+
+ Read an unsigned 64 bit little endian integer into @val
+but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint64 to store the result
+
+
+
+
+
+ Read an unsigned 8 bit integer into @val but keep the current position.
+
+ %TRUE if successful, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ Pointer to a #guint8 to store the result
+
+
+
+
+
+ Sets the new position of a #GstByteReader instance to @pos in bytes.
+
+ %TRUE if the position could be set successfully, %FALSE
+otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ The new position in bytes
+
+
+
+
+
+ Skips @nbytes bytes of the #GstByteReader instance.
+
+ %TRUE if @nbytes bytes could be skipped, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+ the number of bytes to skip
+
+
+
+
+
+ Skips a NUL-terminated UTF-16 string in the #GstByteReader instance,
+advancing the current position to the byte after the string.
+
+No input checking for valid UTF-16 is done.
+
+This function will fail if no NUL-terminator was found in in the data.
+
+ %TRUE if a string could be skipped, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+
+
+ Skips a NUL-terminated UTF-32 string in the #GstByteReader instance,
+advancing the current position to the byte after the string.
+
+No input checking for valid UTF-32 is done.
+
+This function will fail if no NUL-terminator was found in in the data.
+
+ %TRUE if a string could be skipped, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+
+
+ Skips a NUL-terminated string in the #GstByteReader instance, advancing
+the current position to the byte after the string. This will work for
+any NUL-terminated string with a character width of 8 bits, so ASCII,
+UTF-8, ISO-8859-N etc. No input checking for valid UTF-8 is done.
+
+This function will fail if no NUL-terminator was found in in the data.
+
+ %TRUE if a string could be skipped, %FALSE otherwise.
+
+
+
+
+ a #GstByteReader instance
+
+
+
+
+
+ Create a new #GstByteReader instance, which will read from @data.
+
+Free-function: gst_byte_reader_free
+
+ a new #GstByteReader instance
+
+
+
+
+ data from which the
+ #GstByteReader should read
+
+
+
+
+
+ Size of @data in bytes
+
+
+
+
+
+
+ #GstByteWriter provides a byte writer and reader that can write/read different
+integer and floating point types to/from a memory buffer. It provides functions
+for writing/reading signed/unsigned, little/big endian integers of 8, 16, 24,
+32 and 64 bits and functions for reading little/big endian floating points numbers of
+32 and 64 bits. It also provides functions to write/read NUL-terminated strings
+in various character encodings.
+
+ #GstByteReader parent
+
+
+
+ Allocation size of the data
+
+
+
+ If %TRUE no reallocations are allowed
+
+
+
+ If %FALSE no reallocations are allowed and copies of data are returned
+
+
+
+
+
+
+
+
+ Checks if enough free space from the current write cursor is
+available and reallocates if necessary.
+
+ %TRUE if at least @size bytes are still available
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Number of bytes that should be available
+
+
+
+
+
+ Writes @size bytes containing @value to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to be written
+
+
+
+ Number of bytes to be written
+
+
+
+
+
+ Frees @writer and all memory allocated by it.
+
+
+
+
+
+ #GstByteWriter instance
+
+
+
+
+
+ Frees @writer and all memory allocated by it except
+the current data, which is returned as #GstBuffer.
+
+Free-function: gst_buffer_unref
+
+ the current data as buffer. gst_buffer_unref()
+ after usage.
+
+
+
+
+ #GstByteWriter instance
+
+
+
+
+
+ Frees @writer and all memory allocated by it except
+the current data, which is returned.
+
+Free-function: g_free
+
+ the current data. g_free() after usage.
+
+
+
+
+ #GstByteWriter instance
+
+
+
+
+
+ Returns the remaining size of data that can still be written. If
+-1 is returned the remaining size is only limited by system resources.
+
+ the remaining size of data that can still be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+
+
+ Initializes @writer to an empty instance
+
+
+
+
+
+ #GstByteWriter instance
+
+
+
+
+
+ Initializes @writer with the given
+memory area. If @initialized is %TRUE it is possible to
+read @size bytes from the #GstByteWriter from the beginning.
+
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Memory area for writing
+
+
+
+
+
+ Size of @data in bytes
+
+
+
+ If %TRUE the complete data can be read from the beginning
+
+
+
+
+
+ Initializes @writer with the given initial data size.
+
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Initial size of data
+
+
+
+ If %TRUE the data can't be reallocated
+
+
+
+
+
+ Writes @size bytes of @data to @writer.
+
+ %TRUE if the data could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ source #GstBuffer
+
+
+
+ offset to copy from
+
+
+
+ total size to copy. If -1, all data is copied
+
+
+
+
+
+ Writes @size bytes of @data to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Data to write
+
+
+
+
+
+ Size of @data in bytes
+
+
+
+
+
+ Writes a big endian 32 bit float to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a little endian 32 bit float to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a big endian 64 bit float to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a little endian 64 bit float to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a signed big endian 16 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a signed little endian 16 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a signed big endian 24 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a signed little endian 24 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a signed big endian 32 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a signed little endian 32 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a signed big endian 64 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a signed little endian 64 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a signed 8 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a NUL-terminated UTF16 string to @writer (including the terminator).
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ UTF16 string to write
+
+
+
+
+
+
+
+ Writes a NUL-terminated UTF32 string to @writer (including the terminator).
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ UTF32 string to write
+
+
+
+
+
+
+
+ Writes a NUL-terminated UTF8 string to @writer (including the terminator).
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ UTF8 string to
+ write
+
+
+
+
+
+
+
+ Writes a unsigned big endian 16 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a unsigned little endian 16 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a unsigned big endian 24 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a unsigned little endian 24 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a unsigned big endian 32 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a unsigned little endian 32 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a unsigned big endian 64 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a unsigned little endian 64 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Writes a unsigned 8 bit integer to @writer.
+
+ %TRUE if the value could be written
+
+
+
+
+ #GstByteWriter instance
+
+
+
+ Value to write
+
+
+
+
+
+ Resets @writer and frees the data if it's
+owned by @writer.
+
+
+
+
+
+ #GstByteWriter instance
+
+
+
+
+
+ Resets @writer and returns the current data as buffer.
+
+Free-function: gst_buffer_unref
+
+ the current data as buffer. gst_buffer_unref()
+ after usage.
+
+
+
+
+ #GstByteWriter instance
+
+
+
+
+
+ Resets @writer and returns the current data.
+
+Free-function: g_free
+
+ the current data. g_free() after
+usage.
+
+
+
+
+
+
+ #GstByteWriter instance
+
+
+
+
+
+ Creates a new, empty #GstByteWriter instance
+
+Free-function: gst_byte_writer_free
+
+ a new, empty #GstByteWriter instance
+
+
+
+
+ Creates a new #GstByteWriter instance with the given
+memory area. If @initialized is %TRUE it is possible to
+read @size bytes from the #GstByteWriter from the beginning.
+
+Free-function: gst_byte_writer_free
+
+ a new #GstByteWriter instance
+
+
+
+
+ Memory area for writing
+
+
+
+ Size of @data in bytes
+
+
+
+ If %TRUE the complete data can be read from the beginning
+
+
+
+
+
+ Creates a new #GstByteWriter instance with the given
+initial data size.
+
+Free-function: gst_byte_writer_free
+
+ a new #GstByteWriter instance
+
+
+
+
+ Initial size of data
+
+
+
+ If %TRUE the data can't be reallocated
+
+
+
+
+
+
+ Structure used by the collect_pads.
+
+ owner #GstCollectPads
+
+
+
+ #GstPad managed by this data
+
+
+
+ currently queued buffer.
+
+
+
+ position in the buffer
+
+
+
+ last segment received.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ A function that will be called when the #GstCollectData will be freed.
+It is passed the pointer to the structure and should free any custom
+memory and resources allocated for it.
+
+
+
+
+
+ the #GstCollectData that will be freed
+
+
+
+
+
+
+
+ Manages a set of pads that operate in collect mode. This means that control
+is given to the manager of this object when all pads have data.
+
+ * Collectpads are created with gst_collect_pads_new(). A callback should then
+ be installed with gst_collect_pads_set_function ().
+
+ * Pads are added to the collection with gst_collect_pads_add_pad()/
+ gst_collect_pads_remove_pad(). The pad
+ has to be a sinkpad. The chain and event functions of the pad are
+ overridden. The element_private of the pad is used to store
+ private information for the collectpads.
+
+ * For each pad, data is queued in the _chain function or by
+ performing a pull_range.
+
+ * When data is queued on all pads in waiting mode, the callback function is called.
+
+ * Data can be dequeued from the pad with the gst_collect_pads_pop() method.
+ One can peek at the data with the gst_collect_pads_peek() function.
+ These functions will return %NULL if the pad received an EOS event. When all
+ pads return %NULL from a gst_collect_pads_peek(), the element can emit an EOS
+ event itself.
+
+ * Data can also be dequeued in byte units using the gst_collect_pads_available(),
+ gst_collect_pads_read_buffer() and gst_collect_pads_flush() calls.
+
+ * Elements should call gst_collect_pads_start() and gst_collect_pads_stop() in
+ their state change functions to start and stop the processing of the collectpads.
+ The gst_collect_pads_stop() call should be called before calling the parent
+ element state change function in the PAUSED_TO_READY state change to ensure
+ no pad is blocked and the element can finish streaming.
+
+ * gst_collect_pads_set_waiting() sets a pad to waiting or non-waiting mode.
+ CollectPads element is not waiting for data to be collected on non-waiting pads.
+ Thus these pads may but need not have data when the callback is called.
+ All pads are in waiting mode by default.
+
+ Create a new instance of #GstCollectPads.
+
+MT safe.
+
+ a new #GstCollectPads, or %NULL in case of an error.
+
+
+
+
+ Add a pad to the collection of collect pads. The pad has to be
+a sinkpad. The refcount of the pad is incremented. Use
+gst_collect_pads_remove_pad() to remove the pad from the collection
+again.
+
+You specify a size for the returned #GstCollectData structure
+so that you can use it to store additional information.
+
+You can also specify a #GstCollectDataDestroyNotify that will be called
+just before the #GstCollectData structure is freed. It is passed the
+pointer to the structure and should free any custom memory and resources
+allocated for it.
+
+Keeping a pad locked in waiting state is only relevant when using
+the default collection algorithm (providing the oldest buffer).
+It ensures a buffer must be available on this pad for a collection
+to take place. This is of typical use to a muxer element where
+non-subtitle streams should always be in waiting state,
+e.g. to assure that caps information is available on all these streams
+when initial headers have to be written.
+
+The pad will be automatically activated in push mode when @pads is
+started.
+
+MT safe.
+
+ a new #GstCollectData to identify the
+ new pad. Or %NULL if wrong parameters are supplied.
+
+
+
+
+ the collectpads to use
+
+
+
+ the pad to add
+
+
+
+ the size of the returned #GstCollectData structure
+
+
+
+ function to be called before the returned
+ #GstCollectData structure is freed
+
+
+
+ whether to lock this pad in usual waiting state
+
+
+
+
+
+ Query how much bytes can be read from each queued buffer. This means
+that the result of this call is the maximum number of bytes that can
+be read from each of the pads.
+
+This function should be called with @pads STREAM_LOCK held, such as
+in the callback.
+
+MT safe.
+
+ The maximum number of bytes queued on all pads. This function
+returns 0 if a pad has no queued buffer.
+
+
+
+
+ the collectpads to query
+
+
+
+
+
+ Convenience clipping function that converts incoming buffer's timestamp
+to running time, or clips the buffer if outside configured segment.
+
+Since 1.6, this clipping function also sets the DTS parameter of the
+GstCollectData structure. This version of the running time DTS can be
+negative. G_MININT64 is used to indicate invalid value.
+
+
+
+
+
+ the collectpads to use
+
+
+
+ collect data of corresponding pad
+
+
+
+ buffer being clipped
+
+
+
+ output buffer with running time, or NULL if clipped
+
+
+
+ user data (unused)
+
+
+
+
+
+ Default #GstCollectPads event handling that elements should always
+chain up to to ensure proper operation. Element might however indicate
+event should not be forwarded downstream.
+
+
+
+
+
+ the collectpads to use
+
+
+
+ collect data of corresponding pad
+
+
+
+ event being processed
+
+
+
+ process but do not send event downstream
+
+
+
+
+
+ Flush @size bytes from the pad @data.
+
+This function should be called with @pads STREAM_LOCK held, such as
+in the callback.
+
+MT safe.
+
+ The number of bytes flushed This can be less than @size and
+is 0 if the pad was end-of-stream.
+
+
+
+
+ the collectpads to query
+
+
+
+ the data to use
+
+
+
+ the number of bytes to flush
+
+
+
+
+
+ Peek at the buffer currently queued in @data. This function
+should be called with the @pads STREAM_LOCK held, such as in the callback
+handler.
+
+MT safe.
+
+ The buffer in @data or %NULL if no
+buffer is queued. should unref the buffer after usage.
+
+
+
+
+ the collectpads to peek
+
+
+
+ the data to use
+
+
+
+
+
+ Pop the buffer currently queued in @data. This function
+should be called with the @pads STREAM_LOCK held, such as in the callback
+handler.
+
+MT safe.
+
+ The buffer in @data or %NULL if no
+buffer was queued. You should unref the buffer after usage.
+
+
+
+
+ the collectpads to pop
+
+
+
+ the data to use
+
+
+
+
+
+ Default #GstCollectPads query handling that elements should always
+chain up to to ensure proper operation. Element might however indicate
+query should not be forwarded downstream.
+
+
+
+
+
+ the collectpads to use
+
+
+
+ collect data of corresponding pad
+
+
+
+ query being processed
+
+
+
+ process but do not send event downstream
+
+
+
+
+
+ Get a subbuffer of @size bytes from the given pad @data.
+
+This function should be called with @pads STREAM_LOCK held, such as in the
+callback.
+
+MT safe.
+
+ A sub buffer. The size of the buffer can
+be less that requested. A return of %NULL signals that the pad is
+end-of-stream. Unref the buffer after use.
+
+
+
+
+ the collectpads to query
+
+
+
+ the data to use
+
+
+
+ the number of bytes to read
+
+
+
+
+
+ Remove a pad from the collection of collect pads. This function will also
+free the #GstCollectData and all the resources that were allocated with
+gst_collect_pads_add_pad().
+
+The pad will be deactivated automatically when @pads is stopped.
+
+MT safe.
+
+ %TRUE if the pad could be removed.
+
+
+
+
+ the collectpads to use
+
+
+
+ the pad to remove
+
+
+
+
+
+ Set the callback function and user data that will be called with
+the oldest buffer when all pads have been collected, or %NULL on EOS.
+If a buffer is passed, the callback owns a reference and must unref
+it.
+
+MT safe.
+
+
+
+
+
+ the collectpads to use
+
+
+
+ the function to set
+
+
+
+ user data passed to the function
+
+
+
+
+
+ Install a clipping function that is called right after a buffer is received
+on a pad managed by @pads. See #GstCollectPadsClipFunction for more info.
+
+
+
+
+
+ the collectpads to use
+
+
+
+ clip function to install
+
+
+
+ user data to pass to @clip_func
+
+
+
+
+
+ Set the timestamp comparison function.
+
+MT safe.
+
+
+
+
+
+ the pads to use
+
+
+
+ the function to set
+
+
+
+ user data passed to the function
+
+
+
+
+
+ Set the event callback function and user data that will be called when
+collectpads has received an event originating from one of the collected
+pads. If the event being processed is a serialized one, this callback is
+called with @pads STREAM_LOCK held, otherwise not. As this lock should be
+held when calling a number of CollectPads functions, it should be acquired
+if so (unusually) needed.
+
+MT safe.
+
+
+
+
+
+ the collectpads to use
+
+
+
+ the function to set
+
+
+
+ user data passed to the function
+
+
+
+
+
+ Install a flush function that is called when the internal
+state of all pads should be flushed as part of flushing seek
+handling. See #GstCollectPadsFlushFunction for more info.
+
+
+
+
+
+ the collectpads to use
+
+
+
+ flush function to install
+
+
+
+ user data to pass to @func
+
+
+
+
+
+ Change the flushing state of all the pads in the collection. No pad
+is able to accept anymore data when @flushing is %TRUE. Calling this
+function with @flushing %FALSE makes @pads accept data again.
+Caller must ensure that downstream streaming (thread) is not blocked,
+e.g. by sending a FLUSH_START downstream.
+
+MT safe.
+
+
+
+
+
+ the collectpads to use
+
+
+
+ desired state of the pads
+
+
+
+
+
+ CollectPads provides a default collection algorithm that will determine
+the oldest buffer available on all of its pads, and then delegate
+to a configured callback.
+However, if circumstances are more complicated and/or more control
+is desired, this sets a callback that will be invoked instead when
+all the pads added to the collection have buffers queued.
+Evidently, this callback is not compatible with
+gst_collect_pads_set_buffer_function() callback.
+If this callback is set, the former will be unset.
+
+MT safe.
+
+
+
+
+
+ the collectpads to use
+
+
+
+ the function to set
+
+
+
+ user data passed to the function
+
+
+
+
+
+ Set the query callback function and user data that will be called after
+collectpads has received a query originating from one of the collected
+pads. If the query being processed is a serialized one, this callback is
+called with @pads STREAM_LOCK held, otherwise not. As this lock should be
+held when calling a number of CollectPads functions, it should be acquired
+if so (unusually) needed.
+
+MT safe.
+
+
+
+
+
+ the collectpads to use
+
+
+
+ the function to set
+
+
+
+ user data passed to the function
+
+
+
+
+
+ Sets a pad to waiting or non-waiting mode, if at least this pad
+has not been created with locked waiting state,
+in which case nothing happens.
+
+This function should be called with @pads STREAM_LOCK held, such as
+in the callback.
+
+MT safe.
+
+
+
+
+
+ the collectpads
+
+
+
+ the data to use
+
+
+
+ boolean indicating whether this pad should operate
+ in waiting or non-waiting mode
+
+
+
+
+
+ Default #GstCollectPads event handling for the src pad of elements.
+Elements can chain up to this to let flushing seek event handling
+be done by #GstCollectPads.
+
+
+
+
+
+ the #GstCollectPads to use
+
+
+
+ src #GstPad that received the event
+
+
+
+ event being processed
+
+
+
+
+
+ Starts the processing of data in the collect_pads.
+
+MT safe.
+
+
+
+
+
+ the collectpads to use
+
+
+
+
+
+ Stops the processing of data in the collect_pads. this function
+will also unblock any blocking operations.
+
+MT safe.
+
+
+
+
+
+ the collectpads to use
+
+
+
+
+
+ Get a subbuffer of @size bytes from the given pad @data. Flushes the amount
+of read bytes.
+
+This function should be called with @pads STREAM_LOCK held, such as in the
+callback.
+
+MT safe.
+
+ A sub buffer. The size of the buffer can
+be less that requested. A return of %NULL signals that the pad is
+end-of-stream. Unref the buffer after use.
+
+
+
+
+ the collectpads to query
+
+
+
+ the data to use
+
+
+
+ the number of bytes to read
+
+
+
+
+
+
+
+
+ #GList of #GstCollectData managed
+ by this #GstCollectPads.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ A function that will be called when a (considered oldest) buffer can be muxed.
+If all pads have reached EOS, this function is called with %NULL @buffer
+and %NULL @data.
+
+ %GST_FLOW_OK for success
+
+
+
+
+ the #GstCollectPads that triggered the callback
+
+
+
+ the #GstCollectData of pad that has received the buffer
+
+
+
+ the #GstBuffer
+
+
+
+ user data passed to gst_collect_pads_set_buffer_function()
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ A function that will be called when @inbuffer is received on the pad managed
+by @data in the collectpad object @pads.
+
+The function should use the segment of @data and the negotiated media type on
+the pad to perform clipping of @inbuffer.
+
+This function takes ownership of @inbuffer and should output a buffer in
+@outbuffer or return %NULL in @outbuffer if the buffer should be dropped.
+
+ a #GstFlowReturn that corresponds to the result of clipping.
+
+
+
+
+ a #GstCollectPads
+
+
+
+ a #GstCollectData
+
+
+
+ the input #GstBuffer
+
+
+
+ the output #GstBuffer
+
+
+
+ user data
+
+
+
+
+
+ A function for comparing two timestamps of buffers or newsegments collected on one pad.
+
+ Integer less than zero when first timestamp is deemed older than the second one.
+ Zero if the timestamps are deemed equally old.
+ Integer greater than zero when second timestamp is deemed older than the first one.
+
+
+
+
+ the #GstCollectPads that is comparing the timestamps
+
+
+
+ the first #GstCollectData
+
+
+
+ the first timestamp
+
+
+
+ the second #GstCollectData
+
+
+
+ the second timestamp
+
+
+
+ user data passed to gst_collect_pads_set_compare_function()
+
+
+
+
+
+ A function that will be called while processing an event. It takes
+ownership of the event and is responsible for chaining up (to
+gst_collect_pads_event_default()) or dropping events (such typical cases
+being handled by the default handler).
+
+ %TRUE if the pad could handle the event
+
+
+
+
+ the #GstCollectPads that triggered the callback
+
+
+
+ the #GstPad that received an event
+
+
+
+ the #GstEvent received
+
+
+
+ user data passed to gst_collect_pads_set_event_function()
+
+
+
+
+
+ A function that will be called while processing a flushing seek event.
+
+The function should flush any internal state of the element and the state of
+all the pads. It should clear only the state not directly managed by the
+@pads object. It is therefore not necessary to call
+gst_collect_pads_set_flushing nor gst_collect_pads_clear from this function.
+
+
+
+
+
+ a #GstCollectPads
+
+
+
+ user data
+
+
+
+
+
+ A function that will be called when all pads have received data.
+
+ %GST_FLOW_OK for success
+
+
+
+
+ the #GstCollectPads that triggered the callback
+
+
+
+ user data passed to gst_collect_pads_set_function()
+
+
+
+
+
+
+
+ A function that will be called while processing a query. It takes
+ownership of the query and is responsible for chaining up (to
+events downstream (with gst_pad_event_default()).
+
+ %TRUE if the pad could handle the event
+
+
+
+
+ the #GstCollectPads that triggered the callback
+
+
+
+ the #GstPad that received an event
+
+
+
+ the #GstEvent received
+
+
+
+ user data passed to gst_collect_pads_set_query_function()
+
+
+
+
+
+
+ Set if collectdata's pad is EOS.
+
+
+ Set if collectdata's pad is flushing.
+
+
+ Set if collectdata's pad received a
+ new_segment event.
+
+
+ Set if collectdata's pad must be waited
+ for when collecting.
+
+
+ Set collectdata's pad WAITING state must
+ not be changed.
+#GstCollectPadsStateFlags indicate private state of a collectdata('s pad).
+
+
+
+ #GstDataQueue is an object that handles threadsafe queueing of objects. It
+also provides size-related functionality. This object should be used for
+any #GstElement that wishes to provide some sort of queueing functionality.
+
+ Creates a new #GstDataQueue. If @fullcallback or @emptycallback are supplied, then
+the #GstDataQueue will call the respective callback to signal full or empty condition.
+If the callbacks are NULL the #GstDataQueue will instead emit 'full' and 'empty'
+signals.
+
+ a new #GstDataQueue.
+
+
+
+
+ the callback used to tell if the element considers the queue full
+or not.
+
+
+
+ the callback which will be called when the queue is considered full.
+
+
+
+ the callback which will be called when the queue is considered empty.
+
+
+
+ a #gpointer that will be passed to the @checkfull, @fullcallback,
+ and @emptycallback callbacks.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Pop and unref the head-most #GstMiniObject with the given #GType.
+
+ %TRUE if an element was removed.
+
+
+
+
+ The #GstDataQueue to drop an item from.
+
+
+
+ The #GType of the item to drop.
+
+
+
+
+
+ Flushes all the contents of the @queue. Any call to #gst_data_queue_push and
+#gst_data_queue_pop will be released.
+MT safe.
+
+
+
+
+
+ a #GstDataQueue.
+
+
+
+
+
+ Get the current level of the queue.
+
+
+
+
+
+ The #GstDataQueue
+
+
+
+ the location to store the result
+
+
+
+
+
+ Queries if there are any items in the @queue.
+MT safe.
+
+ %TRUE if @queue is empty.
+
+
+
+
+ a #GstDataQueue.
+
+
+
+
+
+ Queries if @queue is full. This check will be done using the
+#GstDataQueueCheckFullFunction registered with @queue.
+MT safe.
+
+ %TRUE if @queue is full.
+
+
+
+
+ a #GstDataQueue.
+
+
+
+
+
+ Inform the queue that the limits for the fullness check have changed and that
+any blocking gst_data_queue_push() should be unblocked to recheck the limits.
+
+
+
+
+
+ The #GstDataQueue
+
+
+
+
+
+ Retrieves the first @item available on the @queue without removing it.
+If the queue is currently empty, the call will block until at least
+one item is available, OR the @queue is set to the flushing state.
+MT safe.
+
+ %TRUE if an @item was successfully retrieved from the @queue.
+
+
+
+
+ a #GstDataQueue.
+
+
+
+ pointer to store the returned #GstDataQueueItem.
+
+
+
+
+
+ Retrieves the first @item available on the @queue. If the queue is currently
+empty, the call will block until at least one item is available, OR the
+@queue is set to the flushing state.
+MT safe.
+
+ %TRUE if an @item was successfully retrieved from the @queue.
+
+
+
+
+ a #GstDataQueue.
+
+
+
+ pointer to store the returned #GstDataQueueItem.
+
+
+
+
+
+ Pushes a #GstDataQueueItem (or a structure that begins with the same fields)
+on the @queue. If the @queue is full, the call will block until space is
+available, OR the @queue is set to flushing state.
+MT safe.
+
+Note that this function has slightly different semantics than gst_pad_push()
+and gst_pad_push_event(): this function only takes ownership of @item and
+the #GstMiniObject contained in @item if the push was successful. If %FALSE
+is returned, the caller is responsible for freeing @item and its contents.
+
+ %TRUE if the @item was successfully pushed on the @queue.
+
+
+
+
+ a #GstDataQueue.
+
+
+
+ a #GstDataQueueItem.
+
+
+
+
+
+ Pushes a #GstDataQueueItem (or a structure that begins with the same fields)
+on the @queue. It ignores if the @queue is full or not and forces the @item
+to be pushed anyway.
+MT safe.
+
+Note that this function has slightly different semantics than gst_pad_push()
+and gst_pad_push_event(): this function only takes ownership of @item and
+the #GstMiniObject contained in @item if the push was successful. If %FALSE
+is returned, the caller is responsible for freeing @item and its contents.
+
+ %TRUE if the @item was successfully pushed on the @queue.
+
+
+
+
+ a #GstDataQueue.
+
+
+
+ a #GstDataQueueItem.
+
+
+
+
+
+ Sets the queue to flushing state if @flushing is %TRUE. If set to flushing
+state, any incoming data on the @queue will be discarded. Any call currently
+blocking on #gst_data_queue_push or #gst_data_queue_pop will return straight
+away with a return value of %FALSE. While the @queue is in flushing state,
+all calls to those two functions will return %FALSE.
+
+MT Safe.
+
+
+
+
+
+ a #GstDataQueue.
+
+
+
+ a #gboolean stating if the queue will be flushing or not.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ the parent structure
+
+
+
+
+
+
+
+
+
+
+
+ Reports that the queue became empty (empty).
+A queue is empty if the total amount of visible items inside it (num-visible, time,
+size) is lower than the boundary values which can be set through the GObject
+properties.
+
+
+
+
+
+ Reports that the queue became full (full).
+A queue is full if the total amount of data inside it (num-visible, time,
+size) is higher than the boundary values which can be set through the GObject
+properties.
+
+
+
+
+
+
+ The prototype of the function used to inform the queue that it should be
+considered as full.
+
+ %TRUE if the queue should be considered full.
+
+
+
+
+ a #GstDataQueue.
+
+
+
+ The number of visible items currently in the queue.
+
+
+
+ The amount of bytes currently in the queue.
+
+
+
+ The accumulated duration of the items currently in the queue.
+
+
+
+ The #gpointer registered when the #GstDataQueue was created.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Structure used by #GstDataQueue. You can supply a different structure, as
+long as the top of the structure is identical to this structure.
+
+ the #GstMiniObject to queue.
+
+
+
+ the size in bytes of the miniobject.
+
+
+
+ the duration in #GstClockTime of the miniobject. Can not be
+%GST_CLOCK_TIME_NONE.
+
+
+
+ %TRUE if @object should be considered as a visible object.
+
+
+
+ The #GDestroyNotify function to use to free the #GstDataQueueItem.
+This function should also drop the reference to @object the owner of the
+#GstDataQueueItem is assumed to hold.
+
+
+
+
+
+
+
+
+
+
+
+ Structure describing the size of a queue.
+
+ number of buffers
+
+
+
+ number of bytes
+
+
+
+ amount of time
+
+
+
+
+ Utility struct to help handling #GstFlowReturn combination. Useful for
+#GstElement<!-- -->s that have multiple source pads and need to combine
+the different #GstFlowReturn for those pads.
+
+#GstFlowCombiner works by using the last #GstFlowReturn for all #GstPad
+it has in its list and computes the combined return value and provides
+it to the caller.
+
+To add a new pad to the #GstFlowCombiner use gst_flow_combiner_add_pad().
+The new #GstPad is stored with a default value of %GST_FLOW_OK.
+
+In case you want a #GstPad to be removed, use gst_flow_combiner_remove_pad().
+
+Please be aware that this struct isn't thread safe as its designed to be
+ used by demuxers, those usually will have a single thread operating it.
+
+These functions will take refs on the passed #GstPad<!-- -->s.
+
+Aside from reducing the user's code size, the main advantage of using this
+helper struct is to follow the standard rules for #GstFlowReturn combination.
+These rules are:
+
+* %GST_FLOW_EOS: only if all returns are EOS too
+* %GST_FLOW_NOT_LINKED: only if all returns are NOT_LINKED too
+* %GST_FLOW_ERROR or below: if at least one returns an error return
+* %GST_FLOW_NOT_NEGOTIATED: if at least one returns a not-negotiated return
+* %GST_FLOW_FLUSHING: if at least one returns flushing
+* %GST_FLOW_OK: otherwise
+
+%GST_FLOW_ERROR or below, GST_FLOW_NOT_NEGOTIATED and GST_FLOW_FLUSHING are
+returned immediatelly from the gst_flow_combiner_update_flow() function.
+
+ Creates a new #GstFlowCombiner, use gst_flow_combiner_free() to free it.
+
+ A new #GstFlowCombiner
+
+
+
+
+ Adds a new #GstPad to the #GstFlowCombiner.
+
+
+
+
+
+ the #GstFlowCombiner
+
+
+
+ the #GstPad that is being added
+
+
+
+
+
+ Removes all pads from a #GstFlowCombiner and resets it to its initial state.
+
+
+
+
+
+ the #GstFlowCombiner to clear
+
+
+
+
+
+ Frees a #GstFlowCombiner struct and all its internal data.
+
+
+
+
+
+ the #GstFlowCombiner to free
+
+
+
+
+
+ Removes a #GstPad from the #GstFlowCombiner.
+
+
+
+
+
+ the #GstFlowCombiner
+
+
+
+ the #GstPad to remove
+
+
+
+
+
+ Reset flow combiner and all pads to their initial state without removing pads.
+
+
+
+
+
+ the #GstFlowCombiner to clear
+
+
+
+
+
+ Computes the combined flow return for the pads in it.
+
+The #GstFlowReturn parameter should be the last flow return update for a pad
+in this #GstFlowCombiner. It will use this value to be able to shortcut some
+combinations and avoid looking over all pads again. e.g. The last combined
+return is the same as the latest obtained #GstFlowReturn.
+
+ The combined #GstFlowReturn
+
+
+
+
+ the #GstFlowCombiner
+
+
+
+ the latest #GstFlowReturn received for a pad in this #GstFlowCombiner
+
+
+
+
+
+ Sets the provided pad's last flow return to provided value and computes
+the combined flow return for the pads in it.
+
+The #GstFlowReturn parameter should be the last flow return update for a pad
+in this #GstFlowCombiner. It will use this value to be able to shortcut some
+combinations and avoid looking over all pads again. e.g. The last combined
+return is the same as the latest obtained #GstFlowReturn.
+
+ The combined #GstFlowReturn
+
+
+
+
+ the #GstFlowCombiner
+
+
+
+ the #GstPad whose #GstFlowReturn to update
+
+
+
+ the latest #GstFlowReturn received for a pad in this #GstFlowCombiner
+
+
+
+
+
+
+ This class is mostly useful for elements that cannot do
+random access, or at least very slowly. The source usually
+prefers to push out a fixed size buffer.
+
+Subclasses usually operate in a format that is different from the
+default GST_FORMAT_BYTES format of #GstBaseSrc.
+
+Classes extending this base class will usually be scheduled
+in a push based mode. If the peer accepts to operate without
+offsets and within the limits of the allowed block size, this
+class can operate in getrange based mode automatically. To make
+this possible, the subclass should implement and override the
+SCHEDULING query.
+
+The subclass should extend the methods from the baseclass in
+addition to the ::create method.
+
+Seeking, flushing, scheduling and sync is all handled by this
+base class.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Subclasses can override any of the available virtual methods or not, as
+needed. At the minimum, the @fill method should be overridden to produce
+buffers.
+
+ Element parent class
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #GstQueueArray is an object that provides standard queue functionality
+based on an array instead of linked lists. This reduces the overhead
+caused by memory management by a large factor.
+
+ Drops the queue element at position @idx from queue @array.
+
+ the dropped element
+
+
+
+
+ a #GstQueueArray object
+
+
+
+ index to drop
+
+
+
+
+
+ Drops the queue element at position @idx from queue @array and copies the
+data of the element or structure that was removed into @p_struct if
+@p_struct is set (not NULL).
+
+ TRUE on success, or FALSE on error
+
+
+
+
+ a #GstQueueArray object
+
+
+
+ index to drop
+
+
+
+ address into which to store the data of the dropped structure, or NULL
+
+
+
+
+
+ Finds an element in the queue @array, either by comparing every element
+with @func or by looking up @data if no compare function @func is provided,
+and returning the index of the found element.
+
+Note that the index is not 0-based, but an internal index number with a
+random offset. The index can be used in connection with
+gst_queue_array_drop_element(). FIXME: return index 0-based and make
+gst_queue_array_drop_element() take a 0-based index.
+
+ Index of the found element or -1 if nothing was found.
+
+
+
+
+ a #GstQueueArray object
+
+
+
+ comparison function, or %NULL to find @data by value
+
+
+
+ data for comparison function
+
+
+
+
+
+ Frees queue @array and all memory associated to it.
+
+
+
+
+
+ a #GstQueueArray object
+
+
+
+
+
+ Returns the length of the queue @array
+
+ the length of the queue @array.
+
+
+
+
+ a #GstQueueArray object
+
+
+
+
+
+ Checks if the queue @array is empty.
+
+ %TRUE if the queue @array is empty
+
+
+
+
+ a #GstQueueArray object
+
+
+
+
+
+ Returns the head of the queue @array and does not
+remove it from the queue.
+
+ The head of the queue
+
+
+
+
+ a #GstQueueArray object
+
+
+
+
+
+ Returns the head of the queue @array without removing it from the queue.
+
+ pointer to element or struct, or NULL if @array was empty. The
+ data pointed to by the returned pointer stays valid only as long as
+ the queue array is not modified further!
+
+
+
+
+ a #GstQueueArray object
+
+
+
+
+
+ Returns and head of the queue @array and removes
+it from the queue.
+
+ The head of the queue
+
+
+
+
+ a #GstQueueArray object
+
+
+
+
+
+ Returns the head of the queue @array and removes it from the queue.
+
+ pointer to element or struct, or NULL if @array was empty. The
+ data pointed to by the returned pointer stays valid only as long as
+ the queue array is not modified further!
+
+
+
+
+ a #GstQueueArray object
+
+
+
+
+
+ Pushes @data to the tail of the queue @array.
+
+
+
+
+
+ a #GstQueueArray object
+
+
+
+ object to push
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Allocates a new #GstQueueArray object with an initial
+queue size of @initial_size.
+
+ a new #GstQueueArray object
+
+
+
+
+ Initial size of the new queue
+
+
+
+
+
+ Allocates a new #GstQueueArray object for elements (e.g. structures)
+of size @struct_size, with an initial queue size of @initial_size.
+
+ a new #GstQueueArray object
+
+
+
+
+ Size of each element (e.g. structure) in the array
+
+
+
+ Initial size of the new queue
+
+
+
+
+
+
+ This function will be called by gst_type_find_helper_get_range() when
+typefinding functions request to peek at the data of a stream at certain
+offsets. If this function returns GST_FLOW_OK, the result buffer will be
+stored in @buffer. The contents of @buffer is invalid for any other
+return value.
+
+This function is supposed to behave exactly like a #GstPadGetRangeFunction.
+
+ GST_FLOW_OK for success
+
+
+
+
+ a #GstObject that will handle the getrange request
+
+
+
+ the parent of @obj or %NULL
+
+
+
+ the offset of the range
+
+
+
+ the length of the range
+
+
+
+ a memory location to hold the result buffer
+
+
+
+
+
+ Create a new #GstBitReader instance, which will read from @data.
+
+Free-function: gst_bit_reader_free
+
+ a new #GstBitReader instance
+
+
+
+
+ Data from which the #GstBitReader
+ should read
+
+
+
+
+
+ Size of @data in bytes
+
+
+
+
+
+ Create a new #GstByteReader instance, which will read from @data.
+
+Free-function: gst_byte_reader_free
+
+ a new #GstByteReader instance
+
+
+
+
+ data from which the
+ #GstByteReader should read
+
+
+
+
+
+ Size of @data in bytes
+
+
+
+
+
+ Creates a new, empty #GstByteWriter instance
+
+Free-function: gst_byte_writer_free
+
+ a new, empty #GstByteWriter instance
+
+
+
+
+ Creates a new #GstByteWriter instance with the given
+memory area. If @initialized is %TRUE it is possible to
+read @size bytes from the #GstByteWriter from the beginning.
+
+Free-function: gst_byte_writer_free
+
+ a new #GstByteWriter instance
+
+
+
+
+ Memory area for writing
+
+
+
+ Size of @data in bytes
+
+
+
+ If %TRUE the complete data can be read from the beginning
+
+
+
+
+
+ Creates a new #GstByteWriter instance with the given
+initial data size.
+
+Free-function: gst_byte_writer_free
+
+ a new #GstByteWriter instance
+
+
+
+
+ Initial size of data
+
+
+
+ If %TRUE the data can't be reallocated
+
+
+
+
+
+ Allocates a new #GstQueueArray object with an initial
+queue size of @initial_size.
+
+ a new #GstQueueArray object
+
+
+
+
+ Initial size of the new queue
+
+
+
+
+
+ Allocates a new #GstQueueArray object for elements (e.g. structures)
+of size @struct_size, with an initial queue size of @initial_size.
+
+ a new #GstQueueArray object
+
+
+
+
+ Size of each element (e.g. structure) in the array
+
+
+
+ Initial size of the new queue
+
+
+
+
+
+ Tries to find what type of data is flowing from the given source #GstPad.
+
+Free-function: gst_caps_unref
+
+ the #GstCaps corresponding to the data
+ stream. Returns %NULL if no #GstCaps matches the data stream.
+
+
+
+
+ A source #GstPad
+
+
+
+ The length in bytes
+
+
+
+
+
+ Tries to find what type of data is contained in the given #GstBuffer, the
+assumption being that the buffer represents the beginning of the stream or
+file.
+
+All available typefinders will be called on the data in order of rank. If
+a typefinding function returns a probability of %GST_TYPE_FIND_MAXIMUM,
+typefinding is stopped immediately and the found caps will be returned
+right away. Otherwise, all available typefind functions will the tried,
+and the caps with the highest probability will be returned, or %NULL if
+the content of the buffer could not be identified.
+
+Free-function: gst_caps_unref
+
+ the #GstCaps corresponding to the data,
+ or %NULL if no type could be found. The caller should free the caps
+ returned with gst_caps_unref().
+
+
+
+
+ object doing the typefinding, or %NULL (used for logging)
+
+
+
+ a #GstBuffer with data to typefind
+
+
+
+ location to store the probability of the found
+ caps, or %NULL
+
+
+
+
+
+ Tries to find what type of data is contained in the given @data, the
+assumption being that the data represents the beginning of the stream or
+file.
+
+All available typefinders will be called on the data in order of rank. If
+a typefinding function returns a probability of %GST_TYPE_FIND_MAXIMUM,
+typefinding is stopped immediately and the found caps will be returned
+right away. Otherwise, all available typefind functions will the tried,
+and the caps with the highest probability will be returned, or %NULL if
+the content of @data could not be identified.
+
+Free-function: gst_caps_unref
+
+ the #GstCaps corresponding to the data,
+ or %NULL if no type could be found. The caller should free the caps
+ returned with gst_caps_unref().
+
+
+
+
+ object doing the typefinding, or %NULL (used for logging)
+
+
+
+ a pointer with data to typefind
+
+
+
+ the size of @data
+
+
+
+ location to store the probability of the found
+ caps, or %NULL
+
+
+
+
+
+ Tries to find the best #GstCaps associated with @extension.
+
+All available typefinders will be checked against the extension in order
+of rank. The caps of the first typefinder that can handle @extension will be
+returned.
+
+Free-function: gst_caps_unref
+
+ the #GstCaps corresponding to
+ @extension, or %NULL if no type could be found. The caller should free
+ the caps returned with gst_caps_unref().
+
+
+
+
+ object doing the typefinding, or %NULL (used for logging)
+
+
+
+ an extension
+
+
+
+
+
+ Utility function to do pull-based typefinding. Unlike gst_type_find_helper()
+however, this function will use the specified function @func to obtain the
+data needed by the typefind functions, rather than operating on a given
+source pad. This is useful mostly for elements like tag demuxers which
+strip off data at the beginning and/or end of a file and want to typefind
+the stripped data stream before adding their own source pad (the specified
+callback can then call the upstream peer pad with offsets adjusted for the
+tag size, for example).
+
+When @extension is not %NULL, this function will first try the typefind
+functions for the given extension, which might speed up the typefinding
+in many cases.
+
+Free-function: gst_caps_unref
+
+ the #GstCaps corresponding to the data
+ stream. Returns %NULL if no #GstCaps matches the data stream.
+
+
+
+
+ A #GstObject that will be passed as first argument to @func
+
+
+
+ the parent of @obj or %NULL
+
+
+
+ A generic #GstTypeFindHelperGetRangeFunction that will
+ be used to access data at random offsets when doing the typefinding
+
+
+
+ The length in bytes
+
+
+
+ extension of the media
+
+
+
+ location to store the probability of the found
+ caps, or %NULL
+
+
+
+
+
+
diff --git a/gir-files/GstPbutils-1.0.gir b/gir-files/GstPbutils-1.0.gir
new file mode 100644
index 000000000..f39ac67d7
--- /dev/null
+++ b/gir-files/GstPbutils-1.0.gir
@@ -0,0 +1,3976 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ A baseclass for scopes (visualizers). It takes care of re-fitting the
+audio-rate to video-rate and handles renegotiation (downstream video size
+changes).
+
+It also provides several background shading effects. These effects are
+applied to a previous picture before the render() implementation can draw a
+new frame.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Different types of supported background shading functions.
+
+ no shading
+
+
+ plain fading
+
+
+ fade and move up
+
+
+ fade and move down
+
+
+ fade and move left
+
+
+ fade and move right
+
+
+ fade and move horizontally out
+
+
+ fade and move horizontally in
+
+
+ fade and move vertically out
+
+
+ fade and move vertically in
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ The #GstDiscoverer is a utility object which allows to get as much
+information as possible from one or many URIs.
+
+It provides two APIs, allowing usage in blocking or non-blocking mode.
+
+The blocking mode just requires calling gst_discoverer_discover_uri()
+with the URI one wishes to discover.
+
+The non-blocking mode requires a running #GMainLoop iterating a
+#GMainContext, where one connects to the various signals, appends the
+URIs to be processed (through gst_discoverer_discover_uri_async()) and then
+asks for the discovery to begin (through gst_discoverer_start()).
+By default this will use the GLib default main context unless you have
+set a custom context using g_main_context_push_thread_default().
+
+All the information is returned in a #GstDiscovererInfo structure.
+
+ Creates a new #GstDiscoverer with the provided timeout.
+
+ The new #GstDiscoverer.
+If an error occurred when creating the discoverer, @err will be set
+accordingly and %NULL will be returned. If @err is set, the caller must
+free it when no longer needed using g_error_free().
+
+
+
+
+ timeout per file, in nanoseconds. Allowed are values between
+ one second (#GST_SECOND) and one hour (3600 * #GST_SECOND)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Synchronously discovers the given @uri.
+
+A copy of @uri will be made internally, so the caller can safely g_free()
+afterwards.
+
+ the result of the scanning. Can be %NULL if an
+error occurred.
+
+
+
+
+ A #GstDiscoverer
+
+
+
+ The URI to run on.
+
+
+
+
+
+ Appends the given @uri to the list of URIs to discoverer. The actual
+discovery of the @uri will only take place if gst_discoverer_start() has
+been called.
+
+A copy of @uri will be made internally, so the caller can safely g_free()
+afterwards.
+
+ %TRUE if the @uri was successfully appended to the list of pending
+uris, else %FALSE
+
+
+
+
+ A #GstDiscoverer
+
+
+
+ the URI to add.
+
+
+
+
+
+ Allow asynchronous discovering of URIs to take place.
+A #GMainLoop must be available for #GstDiscoverer to properly work in
+asynchronous mode.
+
+
+
+
+
+ A #GstDiscoverer
+
+
+
+
+
+ Stop the discovery of any pending URIs and clears the list of
+pending URIS (if any).
+
+
+
+
+
+ A #GstDiscoverer
+
+
+
+
+
+ The duration (in nanoseconds) after which the discovery of an individual
+URI will timeout.
+
+If the discovery of a URI times out, the %GST_DISCOVERER_TIMEOUT will be
+set on the result flags.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Will be emitted in async mode when all information on a URI could be
+discovered, or an error occurred.
+
+When an error occurs, @info might still contain some partial information,
+depending on the circumstances of the error.
+
+
+
+
+
+ the results #GstDiscovererInfo
+
+
+
+ #GError, which will be non-NULL if an error
+ occurred during discovery. You must not
+ free this #GError, it will be freed by
+ the discoverer.
+
+
+
+
+
+ Will be emitted in async mode when all pending URIs have been processed.
+
+
+
+
+
+ This signal is emitted after the source element has been created for, so
+the URI being discovered, so it can be configured by setting additional
+properties (e.g. set a proxy server for an http source, or set the device
+and read speed for an audio cd source).
+
+This signal is usually emitted from the context of a GStreamer streaming
+thread.
+
+
+
+
+
+ source element
+
+
+
+
+
+ Will be emitted when the discover starts analyzing the pending URIs
+
+
+
+
+
+
+ #GstDiscovererStreamInfo specific to audio streams.
+
+
+ the average or nominal bitrate of the stream in bits/second.
+
+
+
+
+ a #GstDiscovererAudioInfo
+
+
+
+
+
+
+ the number of channels in the stream.
+
+
+
+
+ a #GstDiscovererAudioInfo
+
+
+
+
+
+
+ the number of bits used per sample in each channel.
+
+
+
+
+ a #GstDiscovererAudioInfo
+
+
+
+
+
+
+ the language of the stream, or NULL if unknown.
+
+
+
+
+ a #GstDiscovererAudioInfo
+
+
+
+
+
+
+ the maximum bitrate of the stream in bits/second.
+
+
+
+
+ a #GstDiscovererAudioInfo
+
+
+
+
+
+
+ the sample rate of the stream in Hertz.
+
+
+
+
+ a #GstDiscovererAudioInfo
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #GstDiscovererStreamInfo specific to container streams.
+
+
+ the list of
+#GstDiscovererStreamInfo this container stream offers.
+Free with gst_discoverer_stream_info_list_free() after usage.
+
+
+
+
+
+
+ a #GstDiscovererStreamInfo
+
+
+
+
+
+
+ Structure containing the information of a URI analyzed by #GstDiscoverer.
+
+ Parses a #GVariant as produced by gst_discoverer_info_to_variant()
+back to a #GstDiscovererInfo.
+
+ A newly-allocated #GstDiscovererInfo.
+
+
+
+
+ A #GVariant to deserialize into a #GstDiscovererInfo.
+
+
+
+
+
+
+ A copy of the #GstDiscovererInfo
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+ Finds all the #GstDiscovererAudioInfo contained in @info
+
+ A #GList of
+matching #GstDiscovererStreamInfo. The caller should free it with
+gst_discoverer_stream_info_list_free().
+
+
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+ Finds all the #GstDiscovererContainerInfo contained in @info
+
+ A #GList of
+matching #GstDiscovererStreamInfo. The caller should free it with
+gst_discoverer_stream_info_list_free().
+
+
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+
+ the duration of the URI in #GstClockTime (nanoseconds).
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+ This functions is deprecated since version 1.4, use
+#gst_discoverer_info_get_missing_elements_installer_details
+
+ Miscellaneous information stored as a #GstStructure
+(for example: information about missing plugins). If you wish to use the
+#GstStructure after the life-time of @info, you will need to copy it.
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+ Get the installer details for missing elements
+
+ An array of strings
+containing informations about how to install the various missing elements
+for @info to be usable. If you wish to use the strings after the life-time
+of @info, you will need to copy them.
+
+
+
+
+
+
+ a #GstDiscovererStreamInfo to retrieve installer detail
+for the missing element
+
+
+
+
+
+
+ the result of the discovery as a #GstDiscovererResult.
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+
+ the whether the URI is seekable.
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+
+ the structure (or topology) of the URI as a
+#GstDiscovererStreamInfo.
+This structure can be traversed to see the original hierarchy. Unref with
+gst_discoverer_stream_info_unref() after usage.
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+
+ the list of
+all streams contained in the #info. Free after usage
+with gst_discoverer_stream_info_list_free().
+
+
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+ Finds the #GstDiscovererStreamInfo contained in @info that match the
+given @streamtype.
+
+ A #GList of
+matching #GstDiscovererStreamInfo. The caller should free it with
+gst_discoverer_stream_info_list_free().
+
+
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+ a #GType derived from #GstDiscovererStreamInfo
+
+
+
+
+
+ Finds all the #GstDiscovererSubtitleInfo contained in @info
+
+ A #GList of
+matching #GstDiscovererStreamInfo. The caller should free it with
+gst_discoverer_stream_info_list_free().
+
+
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+
+ all tags contained in the URI. If you wish to use
+the tags after the life-time of @info, you will need to copy them.
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+
+ TOC contained in the URI. If you wish to use
+the TOC after the life-time of @info, you will need to copy it.
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+
+ the URI to which this information corresponds to.
+Copy it if you wish to use it after the life-time of @info.
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+ Finds all the #GstDiscovererVideoInfo contained in @info
+
+ A #GList of
+matching #GstDiscovererStreamInfo. The caller should free it with
+gst_discoverer_stream_info_list_free().
+
+
+
+
+
+
+ a #GstDiscovererInfo
+
+
+
+
+
+ Serializes @info to a #GVariant that can be parsed again
+through gst_discoverer_info_from_variant().
+
+Note that any #GstToc (s) that might have been discovered will not be serialized
+for now.
+
+ A newly-allocated #GVariant representing @info.
+
+
+
+
+ A #GstDiscovererInfo
+
+
+
+ A combination of #GstDiscovererSerializeFlags to specify
+what needs to be serialized.
+
+
+
+
+
+
+
+
+ Result values for the discovery process.
+
+ The discovery was successful
+
+
+ the URI is invalid
+
+
+ an error happened and the GError is set
+
+
+ the discovery timed-out
+
+
+ the discoverer was already discovering a file
+
+
+ Some plugins are missing for full discovery
+
+
+
+ You can use these flags to control what is serialized by
+gst_discoverer_info_to_variant()
+
+ Serialize only basic information, excluding
+caps, tags and miscellaneous information
+
+
+ Serialize the caps for each stream
+
+
+ Serialize the tags for each stream
+
+
+ Serialize miscellaneous information for each stream
+
+
+ Serialize all the available info, including
+caps, tags and miscellaneous information
+
+
+
+ Base structure for information concerning a media stream. Depending on the
+stream type, one can find more media-specific information in
+#GstDiscovererAudioInfo, #GstDiscovererVideoInfo, and
+#GstDiscovererContainerInfo.
+
+The #GstDiscovererStreamInfo represents the topology of the stream. Siblings
+can be iterated over with gst_discoverer_stream_info_get_next() and
+gst_discoverer_stream_info_get_previous(). Children (sub-streams) of a
+stream can be accessed using the #GstDiscovererContainerInfo API.
+
+As a simple example, if you run #GstDiscoverer on an AVI file with one audio
+and one video stream, you will get a #GstDiscovererContainerInfo
+corresponding to the AVI container, which in turn will have a
+#GstDiscovererAudioInfo sub-stream and a #GstDiscovererVideoInfo sub-stream
+for the audio and video streams respectively.
+
+ Decrements the reference count of all contained #GstDiscovererStreamInfo
+and fress the #GList.
+
+
+
+
+
+ a #GList of #GstDiscovererStreamInfo
+
+
+
+
+
+
+
+
+ the #GstCaps of the stream. Unref with
+#gst_caps_unref after usage.
+
+
+
+
+ a #GstDiscovererStreamInfo
+
+
+
+
+
+ This functions is deprecated since version 1.4, use
+#gst_discoverer_info_get_missing_elements_installer_details
+
+ additional information regarding the stream (for
+example codec version, profile, etc..). If you wish to use the #GstStructure
+after the life-time of @info you will need to copy it.
+
+
+
+
+ a #GstDiscovererStreamInfo
+
+
+
+
+
+
+ the next #GstDiscovererStreamInfo in a chain. %NULL
+for final streams.
+Unref with #gst_discoverer_stream_info_unref after usage.
+
+
+
+
+ a #GstDiscovererStreamInfo
+
+
+
+
+
+
+ the previous #GstDiscovererStreamInfo in a chain.
+%NULL for starting points. Unref with #gst_discoverer_stream_info_unref
+after usage.
+
+
+
+
+ a #GstDiscovererStreamInfo
+
+
+
+
+
+
+ the stream ID of this stream. If you wish to
+use the stream ID after the life-time of @info you will need to copy it.
+
+
+
+
+ a #GstDiscovererStreamInfo
+
+
+
+
+
+
+ a human readable name for the stream type of the given @info (ex : "audio",
+"container",...).
+
+
+
+
+ a #GstDiscovererStreamInfo
+
+
+
+
+
+
+ the tags contained in this stream. If you wish to
+use the tags after the life-time of @info you will need to copy them.
+
+
+
+
+ a #GstDiscovererStreamInfo
+
+
+
+
+
+
+ the TOC contained in this stream. If you wish to
+use the TOC after the life-time of @info you will need to copy it.
+
+
+
+
+ a #GstDiscovererStreamInfo
+
+
+
+
+
+
+ #GstDiscovererStreamInfo specific to subtitle streams (this includes text and
+image based ones).
+
+
+ the language of the stream, or NULL if unknown.
+
+
+
+
+ a #GstDiscovererSubtitleInfo
+
+
+
+
+
+
+ #GstDiscovererStreamInfo specific to video streams (this includes images).
+
+
+ the average or nominal bitrate of the video stream in bits/second.
+
+
+
+
+ a #GstDiscovererVideoInfo
+
+
+
+
+
+
+ the depth in bits of the video stream.
+
+
+
+
+ a #GstDiscovererVideoInfo
+
+
+
+
+
+
+ the framerate of the video stream (denominator).
+
+
+
+
+ a #GstDiscovererVideoInfo
+
+
+
+
+
+
+ the framerate of the video stream (numerator).
+
+
+
+
+ a #GstDiscovererVideoInfo
+
+
+
+
+
+
+ the height of the video stream in pixels.
+
+
+
+
+ a #GstDiscovererVideoInfo
+
+
+
+
+
+
+ the maximum bitrate of the video stream in bits/second.
+
+
+
+
+ a #GstDiscovererVideoInfo
+
+
+
+
+
+
+ the Pixel Aspect Ratio (PAR) of the video stream (denominator).
+
+
+
+
+ a #GstDiscovererVideoInfo
+
+
+
+
+
+
+ the Pixel Aspect Ratio (PAR) of the video stream (numerator).
+
+
+
+
+ a #GstDiscovererVideoInfo
+
+
+
+
+
+
+ the width of the video stream in pixels.
+
+
+
+
+ a #GstDiscovererVideoInfo
+
+
+
+
+
+
+ #TRUE if the video stream corresponds to an image (i.e. only contains
+one frame).
+
+
+
+
+ a #GstDiscovererVideoInfo
+
+
+
+
+
+
+ %TRUE if the stream is interlaced, else %FALSE.
+
+
+
+
+ a #GstDiscovererVideoInfo
+
+
+
+
+
+
+ #GstEncodingTarget category for recording and capture.
+Targets within this category are optimized for low latency encoding.
+
+
+
+ #GstEncodingTarget category for device-specific targets.
+The name of the target will usually be the constructor and model of the device,
+and that target will contain #GstEncodingProfiles suitable for that device.
+
+
+
+ #GstEncodingTarget category for file extensions.
+The name of the target will be the name of the file extensions possible
+for a particular target. Those targets are defining like 'default' formats
+usually used for a particular file extension.
+
+
+
+ #GstEncodingTarget category for online-services.
+The name of the target will usually be the name of the online service
+and that target will contain #GstEncodingProfiles suitable for that online
+service.
+
+
+
+ #GstEncodingTarget category for storage, archiving and editing targets.
+Those targets can be lossless and/or provide very fast random access content.
+The name of the target will usually be the container type or editing target,
+and that target will contain #GstEncodingProfiles suitable for editing or
+storage.
+
+
+
+ Variant of #GstEncodingProfile for audio streams.
+
+ Creates a new #GstEncodingAudioProfile
+
+All provided allocatable arguments will be internally copied, so can be
+safely freed/unreferenced after calling this method.
+
+ the newly created #GstEncodingAudioProfile.
+
+
+
+
+ the #GstCaps
+
+
+
+ the preset(s) to use on the encoder, can be #NULL
+
+
+
+ the #GstCaps used to restrict the input to the encoder, can be
+NULL. See gst_encoding_profile_get_restriction() for more details.
+
+
+
+ the number of time this stream must be used. 0 means any number of
+ times (including never)
+
+
+
+
+
+
+
+
+ Encoding profiles for containers. Keeps track of a list of #GstEncodingProfile
+
+ Creates a new #GstEncodingContainerProfile.
+
+ The newly created #GstEncodingContainerProfile.
+
+
+
+
+ The name of the container profile, can be %NULL
+
+
+
+ The description of the container profile,
+ can be %NULL
+
+
+
+ The format to use for this profile
+
+
+
+ The preset to use for this profile.
+
+
+
+
+
+ Add a #GstEncodingProfile to the list of profiles handled by @container.
+
+No copy of @profile will be made, if you wish to use it elsewhere after this
+method you should increment its reference count.
+
+ %TRUE if the @stream was properly added, else %FALSE.
+
+
+
+
+ the #GstEncodingContainerProfile to use
+
+
+
+ the #GstEncodingProfile to add.
+
+
+
+
+
+ Checks if @container contains a #GstEncodingProfile identical to
+@profile.
+
+ %TRUE if @container contains a #GstEncodingProfile identical
+to @profile, else %FALSE.
+
+
+
+
+ a #GstEncodingContainerProfile
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+
+
+the list of contained #GstEncodingProfile.
+
+
+
+
+
+
+ a #GstEncodingContainerProfile
+
+
+
+
+
+
+
+
+ The opaque base class object for all encoding profiles. This contains generic
+information like name, description, format and preset.
+
+ Find the #GstEncodingProfile with the specified name and category.
+
+ The matching #GstEncodingProfile or %NULL.
+
+
+
+
+ The name of the target
+
+
+
+ (allow-none): The name of the profile, if %NULL
+provided, it will default to the encoding profile called `default`.
+
+
+
+ The target category. Can be %NULL
+
+
+
+
+
+ Creates a #GstEncodingProfile matching the formats from the given
+#GstDiscovererInfo. Streams other than audio or video (eg,
+subtitles), are currently ignored.
+
+ The new #GstEncodingProfile or %NULL.
+
+
+
+
+ The #GstDiscovererInfo to read from
+
+
+
+
+
+ Makes a deep copy of @self
+
+ The copy of @self
+
+Since 1.12
+
+
+
+
+ The #GstEncodingProfile to copy
+
+
+
+
+
+ Get whether the format that has been negotiated in at some point can be renegotiated
+later during the encoding.
+
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+
+ the description of the profile, can be %NULL.
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+
+ a suitable file extension for @profile, or NULL.
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+
+ the #GstCaps corresponding to the media format used
+in the profile. Unref after usage.
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+ Computes the full output caps that this @profile will be able to consume.
+
+ The full caps the given @profile can consume. Call
+gst_caps_unref() when you are done with the caps.
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+
+ the name of the profile, can be %NULL.
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+
+ The number of times the profile is used in its parent
+container profile. If 0, it is not a mandatory stream.
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+
+ the name of the #GstPreset to be used in the profile.
+This is the name that has been set when saving the preset.
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+
+ the name of the #GstPreset factory to be used in the profile.
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+
+ The restriction #GstCaps to apply before the encoder
+that will be used in the profile. The fields present in restriction caps are
+properties of the raw stream (that is before encoding), such as height and
+width for video and depth and sampling rate for audio. Does not apply to
+#GstEncodingContainerProfile (since there is no corresponding raw stream).
+Can be %NULL. Unref after usage.
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+
+ the human-readable name of the type of @profile.
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Checks whether the two #GstEncodingProfile are equal
+
+ %TRUE if @a and @b are equal, else %FALSE.
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+ a #GstEncodingProfile
+
+
+
+
+
+ Sets whether the format that has been negotiated in at some point can be renegotiated
+later during the encoding.
+
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+ Whether the format that has been negotiated first can be renegotiated
+during the encoding
+
+
+
+
+
+ Set @description as the given description for the @profile. A copy of
+@description will be made internally.
+
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+ the description to set on the profile
+
+
+
+
+
+ Set whether the profile should be used or not.
+
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+ %FALSE to disable #profile, %TRUE to enable it
+
+
+
+
+
+ Sets the media format used in the profile.
+
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+ the media format to use in the profile.
+
+
+
+
+
+ Set @name as the given name for the @profile. A copy of @name will be made
+internally.
+
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+ the name to set on the profile
+
+
+
+
+
+ Set the number of time the profile is used in its parent
+container profile. If 0, it is not a mandatory stream
+
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+ the number of time the profile can be used
+
+
+
+
+
+ Sets the name of the #GstElement that implements the #GstPreset interface
+to use for the profile.
+This is the name that has been set when saving the preset.
+
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+ the element preset to use
+
+
+
+
+
+ Sets the name of the #GstPreset's factory to be used in the profile.
+
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+ The name of the preset to use in this @profile.
+
+
+
+
+
+ Set the restriction #GstCaps to apply before the encoder
+that will be used in the profile. See gst_encoding_profile_get_restriction()
+for more about restrictions. Does not apply to #GstEncodingContainerProfile.
+
+
+
+
+
+ a #GstEncodingProfile
+
+
+
+ the restriction to apply
+
+
+
+
+
+
+
+
+
+
+
+ Collection of #GstEncodingProfile for a specific target or use-case.
+
+When being stored/loaded, targets come from a specific category, like
+#GST_ENCODING_CATEGORY_DEVICE.
+
+ Creates a new #GstEncodingTarget.
+
+The name and category can only consist of lowercase ASCII letters for the
+first character, followed by either lowercase ASCII letters, digits or
+hyphens ('-').
+
+The @category <emphasis>should</emphasis> be one of the existing
+well-defined categories, like #GST_ENCODING_CATEGORY_DEVICE, but it
+<emphasis>can</emphasis> be a application or user specific category if
+needed.
+
+ The newly created #GstEncodingTarget or %NULL if
+there was an error.
+
+
+
+
+ The name of the target.
+
+
+
+ The name of the category to which this @target
+belongs. For example: #GST_ENCODING_CATEGORY_DEVICE.
+
+
+
+ A description of #GstEncodingTarget in the
+current locale.
+
+
+
+ A #GList of
+#GstEncodingProfile.
+
+
+
+
+
+
+
+ Searches for the #GstEncodingTarget with the given name, loads it
+and returns it.
+
+If the category name is specified only targets from that category will be
+searched for.
+
+ The #GstEncodingTarget if available, else %NULL.
+
+
+
+
+ the name of the #GstEncodingTarget to load (automatically
+converted to lower case internally as capital letters are not
+valid for target names).
+
+
+
+ the name of the target category, like
+#GST_ENCODING_CATEGORY_DEVICE. Can be %NULL
+
+
+
+
+
+ Opens the provided file and returns the contained #GstEncodingTarget.
+
+ The #GstEncodingTarget contained in the file, else
+%NULL
+
+
+
+
+ The file location to load the #GstEncodingTarget from
+
+
+
+
+
+ Adds the given @profile to the @target. Each added profile must have
+a unique name within the profile.
+
+The @target will steal a reference to the @profile. If you wish to use
+the profile after calling this method, you should increase its reference
+count.
+
+ %TRUE if the profile was added, else %FALSE.
+
+
+
+
+ the #GstEncodingTarget to add a profile to
+
+
+
+ the #GstEncodingProfile to add
+
+
+
+
+
+
+ The category of the @target. For example:
+#GST_ENCODING_CATEGORY_DEVICE.
+
+
+
+
+ a #GstEncodingTarget
+
+
+
+
+
+
+ The description of the @target.
+
+
+
+
+ a #GstEncodingTarget
+
+
+
+
+
+
+ The name of the @target.
+
+
+
+
+ a #GstEncodingTarget
+
+
+
+
+
+
+ The matching #GstEncodingProfile, or %NULL.
+
+
+
+
+ a #GstEncodingTarget
+
+
+
+ the name of the profile to retrieve
+
+
+
+
+
+
+ A list of
+#GstEncodingProfile(s) this @target handles.
+
+
+
+
+
+
+ a #GstEncodingTarget
+
+
+
+
+
+ Saves the @target to a default user-local directory.
+
+ %TRUE if the target was correctly saved, else %FALSE.
+
+
+
+
+ a #GstEncodingTarget
+
+
+
+
+
+ Saves the @target to the provided file location.
+
+ %TRUE if the target was correctly saved, else %FALSE.
+
+
+
+
+ a #GstEncodingTarget
+
+
+
+ the location to store the @target at.
+
+
+
+
+
+
+ Variant of #GstEncodingProfile for video streams, allows specifying the @pass.
+
+ Creates a new #GstEncodingVideoProfile
+
+All provided allocatable arguments will be internally copied, so can be
+safely freed/unreferenced after calling this method.
+
+If you wish to control the pass number (in case of multi-pass scenarios),
+please refer to the gst_encoding_video_profile_set_pass() documentation.
+
+If you wish to use/force a constant framerate please refer to the
+gst_encoding_video_profile_set_variableframerate() documentation.
+
+ the newly created #GstEncodingVideoProfile.
+
+
+
+
+ the #GstCaps
+
+
+
+ the preset(s) to use on the encoder, can be #NULL
+
+
+
+ the #GstCaps used to restrict the input to the encoder, can be
+NULL. See gst_encoding_profile_get_restriction() for more details.
+
+
+
+ the number of time this stream must be used. 0 means any number of
+ times (including never)
+
+
+
+
+
+ Get the pass number if this is part of a multi-pass profile.
+
+ The pass number. Starts at 1 for multi-pass. 0 if this is
+not a multi-pass profile
+
+
+
+
+ a #GstEncodingVideoProfile
+
+
+
+
+
+
+ Whether non-constant video framerate is allowed for encoding.
+
+
+
+
+ a #GstEncodingVideoProfile
+
+
+
+
+
+ Sets the pass number of this video profile. The first pass profile should have
+this value set to 1. If this video profile isn't part of a multi-pass profile,
+you may set it to 0 (the default value).
+
+
+
+
+
+ a #GstEncodingVideoProfile
+
+
+
+ the pass number for this profile
+
+
+
+
+
+ If set to %TRUE, then the incoming stream will be allowed to have non-constant
+framerate. If set to %FALSE (default value), then the incoming stream will
+be normalized by dropping/duplicating frames in order to produce a
+constance framerate.
+
+
+
+
+
+ a #GstEncodingVideoProfile
+
+
+
+ a boolean
+
+
+
+
+
+
+
+
+ Opaque context structure for the plugin installation. Use the provided
+API to set details on it.
+
+ Creates a new #GstInstallPluginsContext.
+
+ a new #GstInstallPluginsContext. Free with
+gst_install_plugins_context_free() when no longer needed
+
+
+
+
+ Frees a #GstInstallPluginsContext.
+
+
+
+
+
+ a #GstInstallPluginsContext
+
+
+
+
+
+ This function is used to tell the external installer process whether it
+should ask for confirmation or not before searching for missing plugins.
+
+If set, this option will be passed to the installer via a
+--interaction=[show-confirm-search|hide-confirm-search] command line option.
+
+
+
+
+
+ a #GstInstallPluginsContext
+
+
+
+ whether to ask for confirmation before searching for plugins
+
+
+
+
+
+ This function is used to pass the calling application's desktop file ID to
+the external installer process.
+
+A desktop file ID is the basename of the desktop file, including the
+.desktop extension.
+
+If set, the desktop file ID will be passed to the installer via a
+--desktop-id= command line option.
+
+
+
+
+
+ a #GstInstallPluginsContext
+
+
+
+ the desktop file ID of the calling application
+
+
+
+
+
+ Sets the startup notification ID for the launched process.
+
+This is typically used to to pass the current X11 event timestamp to the
+external installer process.
+
+Startup notification IDs are defined in the
+[FreeDesktop.Org Startup Notifications standard](http://standards.freedesktop.org/startup-notification-spec/startup-notification-latest.txt).
+
+If set, the ID will be passed to the installer via a
+--startup-notification-id= command line option.
+
+GTK+/GNOME applications should be able to create a startup notification ID
+like this:
+|[
+ timestamp = gtk_get_current_event_time ();
+ startup_id = g_strdup_printf ("_TIME%u", timestamp);
+...
+]|
+
+
+
+
+
+ a #GstInstallPluginsContext
+
+
+
+ the startup notification ID
+
+
+
+
+
+ This function is for X11-based applications (such as most Gtk/Qt
+applications on linux/unix) only. You can use it to tell the external
+installer the XID of your main application window. That way the installer
+can make its own window transient to your application window during the
+installation.
+
+If set, the XID will be passed to the installer via a --transient-for=XID
+command line option.
+
+Gtk+/Gnome application should be able to obtain the XID of the top-level
+window like this:
+|[
+##include <gtk/gtk.h>
+##ifdef GDK_WINDOWING_X11
+##include <gdk/gdkx.h>
+##endif
+...
+##ifdef GDK_WINDOWING_X11
+ xid = GDK_WINDOW_XWINDOW (GTK_WIDGET (application_window)->window);
+##endif
+...
+]|
+
+
+
+
+
+ a #GstInstallPluginsContext
+
+
+
+ the XWindow ID (XID) of the top-level application
+
+
+
+
+
+
+ The prototype of the callback function that will be called once the
+external plugin installer program has returned. You only need to provide
+a callback function if you are using the asynchronous interface.
+
+
+
+
+
+ whether the installation of the requested plugins succeeded or not
+
+
+
+ the user data passed to gst_install_plugins_async()
+
+
+
+
+
+ Result codes returned by gst_install_plugins_async() and
+gst_install_plugins_sync(), and also the result code passed to the
+#GstInstallPluginsResultFunc specified with gst_install_plugins_async().
+
+These codes indicate success or failure of starting an external installer
+program and to what extent the requested plugins could be installed.
+
+ all of the requested plugins could be
+ installed
+
+
+ no appropriate installation candidate for
+ any of the requested plugins could be found. Only return this if nothing
+ has been installed. Return #GST_INSTALL_PLUGINS_PARTIAL_SUCCESS if
+ some (but not all) of the requested plugins could be installed.
+
+
+ an error occured during the installation. If
+ this happens, the user has already seen an error message and another
+ one should not be displayed
+
+
+ some of the requested plugins could
+ be installed, but not all
+
+
+ the user has aborted the installation
+
+
+ the installer had an unclean exit code
+ (ie. death by signal)
+
+
+ the helper returned an invalid status code
+
+
+ returned by gst_install_plugins_async() to
+ indicate that everything went fine so far and the provided callback
+ will be called with the result of the installation later
+
+
+ some internal failure has
+ occured when trying to start the installer
+
+
+ the helper script to call the
+ actual installer is not installed
+
+
+ a previously-started plugin
+ installation is still in progress, try again later
+
+
+ Convenience function to return the descriptive string associated
+with a status code. This function returns English strings and
+should not be used for user messages. It is here only to assist
+in debugging.
+
+ a descriptive string for the status code in @ret
+
+
+
+
+ the return status code
+
+
+
+
+
+
+ The major version of GStreamer's gst-plugins-base libraries at compile time.
+
+
+
+ The micro version of GStreamer's gst-plugins-base libraries at compile time.
+
+
+
+ The minor version of GStreamer's gst-plugins-base libraries at compile time.
+
+
+
+ The nano version of GStreamer's gst-plugins-base libraries at compile time.
+Actual releases have 0, GIT versions have 1, prerelease versions have 2-...
+
+
+
+ Sets the level and profile on @caps if it can be determined from
+@audio_config. See gst_codec_utils_aac_get_level() and
+gst_codec_utils_aac_get_profile() for more details on the parameters.
+@caps must be audio/mpeg caps with an "mpegversion" field of either 2 or 4.
+If mpegversion is 4, the "base-profile" field is also set in @caps.
+
+ %TRUE if the level and profile could be set, %FALSE otherwise.
+
+
+
+
+ the #GstCaps to which level and profile fields are to be added
+
+
+
+ a pointer to the AudioSpecificConfig as specified in the
+ Elementary Stream Descriptor (esds) in ISO/IEC 14496-1 (see
+ below for a more details).
+
+
+
+ Length of @audio_config in bytes
+
+
+
+
+
+ Returns the channels of the given AAC stream.
+
+ The channels or 0 if the channel could not be determined.
+
+Since 1.10
+
+
+
+
+ a pointer to the AudioSpecificConfig as specified in the
+ Elementary Stream Descriptor (esds) in ISO/IEC 14496-1.
+
+
+
+
+
+
+
+
+ Translates the sample rate to the index corresponding to it in AAC spec.
+
+ The AAC index for this sample rate, -1 if the rate is not a
+valid AAC sample rate.
+
+
+
+
+ Sample rate
+
+
+
+
+
+ Determines the level of a stream as defined in ISO/IEC 14496-3. For AAC LC
+streams, the constraints from the AAC audio profile are applied. For AAC
+Main, LTP, SSR and others, the Main profile is used.
+
+The @audio_config parameter follows the following format, starting from the
+most significant bit of the first byte:
+
+ * Bit 0:4 contains the AudioObjectType
+ * Bit 5:8 contains the sample frequency index (if this is 0xf, then the
+ next 24 bits define the actual sample frequency, and subsequent
+ fields are appropriately shifted).
+ * Bit 9:12 contains the channel configuration
+
+> HE-AAC support has not yet been implemented.
+
+ The level as a const string and %NULL if the level could not be
+determined.
+
+
+
+
+ a pointer to the AudioSpecificConfig as specified in the
+ Elementary Stream Descriptor (esds) in ISO/IEC 14496-1.
+
+
+
+ Length of @audio_config in bytes
+
+
+
+
+
+ Returns the profile of the given AAC stream as a string. The profile is
+determined using the AudioObjectType field which is in the first 5 bits of
+@audio_config.
+
+> HE-AAC support has not yet been implemented.
+
+ The profile as a const string and %NULL if the profile could not be
+determined.
+
+
+
+
+ a pointer to the AudioSpecificConfig as specified in the
+ Elementary Stream Descriptor (esds) in ISO/IEC 14496-1 (see
+ gst_codec_utils_aac_get_level() for a more details).
+
+
+
+ Length of @audio_config in bytes
+
+
+
+
+
+ Translates the sample rate index found in AAC headers to the actual sample
+rate.
+
+ The sample rate if sr_idx is valid, 0 otherwise.
+
+Since 1.10
+
+
+
+
+ a pointer to the AudioSpecificConfig as specified in the
+ Elementary Stream Descriptor (esds) in ISO/IEC 14496-1.
+
+
+
+ Length of @audio_config in bytes
+
+
+
+
+
+ Translates the sample rate index found in AAC headers to the actual sample
+rate.
+
+ The sample rate if @sr_idx is valid, 0 otherwise.
+
+
+
+
+ Sample rate index as from the AudioSpecificConfig (MPEG-4
+ container) or ADTS frame header
+
+
+
+
+
+ Sets the level and profile in @caps if it can be determined from @sps. See
+gst_codec_utils_h264_get_level() and gst_codec_utils_h264_get_profile()
+for more details on the parameters.
+
+ %TRUE if the level and profile could be set, %FALSE otherwise.
+
+
+
+
+ the #GstCaps to which the level and profile are to be added
+
+
+
+ Pointer to the sequence parameter set for the stream.
+
+
+
+ Length of the data available in @sps.
+
+
+
+
+
+ Converts the level indication (level_idc) in the stream's
+sequence parameter set into a string. The SPS is expected to have the
+same format as for gst_codec_utils_h264_get_profile().
+
+ The level as a const string, or %NULL if there is an error.
+
+
+
+
+ Pointer to the sequence parameter set for the stream.
+
+
+
+ Length of the data available in @sps.
+
+
+
+
+
+ Transform a level string from the caps into the level_idc
+
+ the level_idc or 0 if the level is unknown
+
+
+
+
+ A level string from caps
+
+
+
+
+
+ Converts the profile indication (profile_idc) in the stream's
+sequence parameter set into a string. The SPS is expected to have the
+following format, as defined in the H.264 specification. The SPS is viewed
+as a bitstream here, with bit 0 being the most significant bit of the first
+byte.
+
+* Bit 0:7 - Profile indication
+* Bit 8 - constraint_set0_flag
+* Bit 9 - constraint_set1_flag
+* Bit 10 - constraint_set2_flag
+* Bit 11 - constraint_set3_flag
+* Bit 12 - constraint_set3_flag
+* Bit 13:15 - Reserved
+* Bit 16:24 - Level indication
+
+ The profile as a const string, or %NULL if there is an error.
+
+
+
+
+ Pointer to the sequence parameter set for the stream.
+
+
+
+ Length of the data available in @sps.
+
+
+
+
+
+ Sets the level, tier and profile in @caps if it can be determined from
+@profile_tier_level. See gst_codec_utils_h265_get_level(),
+gst_codec_utils_h265_get_tier() and gst_codec_utils_h265_get_profile()
+for more details on the parameters.
+
+ %TRUE if the level, tier, profile could be set, %FALSE otherwise.
+
+Since 1.4
+
+
+
+
+ the #GstCaps to which the level, tier and profile are to be added
+
+
+
+ Pointer to the profile_tier_level struct
+
+
+
+ Length of the data available in @profile_tier_level.
+
+
+
+
+
+ Converts the level indication (general_level_idc) in the stream's
+profile_tier_level structure into a string. The profiel_tier_level is
+expected to have the same format as for gst_codec_utils_h264_get_profile().
+
+ The level as a const string, or %NULL if there is an error.
+
+Since 1.4
+
+
+
+
+ Pointer to the profile_tier_level structure
+ for the stream
+
+
+
+ Length of the data available in @profile_tier_level.
+
+
+
+
+
+ Transform a level string from the caps into the level_idc
+
+ the level_idc or 0 if the level is unknown
+
+Since 1.4
+
+
+
+
+ A level string from caps
+
+
+
+
+
+ Converts the profile indication (general_profile_idc) in the stream's
+profile_level_tier structure into a string. The profile_tier_level is
+expected to have the following format, as defined in the H.265
+specification. The profile_tier_level is viewed as a bitstream here,
+with bit 0 being the most significant bit of the first byte.
+
+* Bit 0:1 - general_profile_space
+* Bit 2 - general_tier_flag
+* Bit 3:7 - general_profile_idc
+* Bit 8:39 - gernal_profile_compatibility_flags
+* Bit 40 - general_progressive_source_flag
+* Bit 41 - general_interlaced_source_flag
+* Bit 42 - general_non_packed_constraint_flag
+* Bit 43 - general_frame_only_constraint_flag
+* Bit 44:87 - general_reserved_zero_44bits
+* Bit 88:95 - general_level_idc
+
+ The profile as a const string, or %NULL if there is an error.
+
+Since 1.4
+
+
+
+
+ Pointer to the profile_tier_level
+ structure for the stream.
+
+
+
+ Length of the data available in @profile_tier_level
+
+
+
+
+
+ Converts the tier indication (general_tier_flag) in the stream's
+profile_tier_level structure into a string. The profile_tier_level
+is expected to have the same format as for gst_codec_utils_h264_get_profile().
+
+ The tier as a const string, or %NULL if there is an error.
+
+Since 1.4
+
+
+
+
+ Pointer to the profile_tier_level structure
+ for the stream.
+
+
+
+ Length of the data available in @profile_tier_level.
+
+
+
+
+
+ Sets the level and profile in @caps if it can be determined from
+@vis_obj_seq. See gst_codec_utils_mpeg4video_get_level() and
+gst_codec_utils_mpeg4video_get_profile() for more details on the
+parameters.
+
+ %TRUE if the level and profile could be set, %FALSE otherwise.
+
+
+
+
+ the #GstCaps to which the level and profile are to be added
+
+
+
+ Pointer to the visual object sequence for the stream.
+
+
+
+ Length of the data available in @sps.
+
+
+
+
+
+ Converts the level indication in the stream's visual object sequence into
+a string. @vis_obj_seq is expected to be the data following the visual
+object sequence start code. Only the first byte
+(profile_and_level_indication) is used.
+
+ The level as a const string, or NULL if there is an error.
+
+
+
+
+ Pointer to the visual object sequence for the stream.
+
+
+
+ Length of the data available in @sps.
+
+
+
+
+
+ Converts the profile indication in the stream's visual object sequence into
+a string. @vis_obj_seq is expected to be the data following the visual
+object sequence start code. Only the first byte
+(profile_and_level_indication) is used.
+
+ The profile as a const string, or NULL if there is an error.
+
+
+
+
+ Pointer to the visual object sequence for the stream.
+
+
+
+ Length of the data available in @sps.
+
+
+
+
+
+ Creates Opus caps from the given parameters.
+
+ The #GstCaps.
+
+
+
+
+ the sample rate
+
+
+
+ the number of channels
+
+
+
+ the channel mapping family
+
+
+
+ the number of independent streams
+
+
+
+ the number of stereo streams
+
+
+
+ the mapping between the streams
+
+
+
+
+
+ Creates Opus caps from the given OpusHead @header and comment header
+@comments.
+
+ The #GstCaps.
+
+
+
+
+ OpusHead header
+
+
+
+ Comment header or NULL
+
+
+
+
+
+ Creates OpusHead header from the given parameters.
+
+ The #GstBuffer containing the OpusHead.
+
+
+
+
+ the sample rate
+
+
+
+ the number of channels
+
+
+
+ the channel mapping family
+
+
+
+ the number of independent streams
+
+
+
+ the number of stereo streams
+
+
+
+ the mapping between the streams
+
+
+
+ Pre-skip in 48kHz samples or 0
+
+
+
+ Output gain or 0
+
+
+
+
+
+ Parses Opus caps and fills the different fields with defaults if possible.
+
+ %TRUE if parsing was successful, %FALSE otherwise.
+
+
+
+
+ the #GstCaps to which the level and profile are to be added
+
+
+
+ the sample rate
+
+
+
+ the number of channels
+
+
+
+ the channel mapping family
+
+
+
+ the number of independent streams
+
+
+
+ the number of stereo streams
+
+
+
+ the mapping between the streams
+
+
+
+
+
+ Parses the OpusHead header.
+
+ %TRUE if parsing was successful, %FALSE otherwise.
+
+
+
+
+ the OpusHead #GstBuffer
+
+
+
+ the sample rate
+
+
+
+ the number of channels
+
+
+
+ the channel mapping family
+
+
+
+ the number of independent streams
+
+
+
+ the number of stereo streams
+
+
+
+ the mapping between the streams
+
+
+
+ Pre-skip in 48kHz samples or 0
+
+
+
+ Output gain or 0
+
+
+
+
+
+ List all available #GstEncodingTarget for the specified category, or all categories
+if @categoryname is %NULL.
+
+ The list of #GstEncodingTarget
+
+
+
+
+
+
+ The category, for ex: #GST_ENCODING_CATEGORY_DEVICE.
+Can be %NULL.
+
+
+
+
+
+ Lists all #GstEncodingTarget categories present on disk.
+
+ A list
+of #GstEncodingTarget categories.
+
+
+
+
+
+
+ Requests plugin installation without blocking. Once the plugins have been
+installed or installation has failed, @func will be called with the result
+of the installation and your provided @user_data pointer.
+
+This function requires a running GLib/Gtk main loop. If you are not
+running a GLib/Gtk main loop, make sure to regularly call
+g_main_context_iteration(NULL,FALSE).
+
+The installer strings that make up @detail are typically obtained by
+calling gst_missing_plugin_message_get_installer_detail() on missing-plugin
+messages that have been caught on a pipeline's bus or created by the
+application via the provided API, such as gst_missing_element_message_new().
+
+It is possible to request the installation of multiple missing plugins in
+one go (as might be required if there is a demuxer for a certain format
+installed but no suitable video decoder and no suitable audio decoder).
+
+ result code whether an external installer could be started
+
+
+
+
+ NULL-terminated array
+ of installer string details (see below)
+
+
+
+
+
+ a #GstInstallPluginsContext, or NULL
+
+
+
+ the function to call when the installer program returns
+
+
+
+ the user data to pass to @func when called, or NULL
+
+
+
+
+
+ Checks whether plugin installation (initiated by this application only)
+is currently in progress.
+
+ TRUE if plugin installation is in progress, otherwise FALSE
+
+
+
+
+ Convenience function to return the descriptive string associated
+with a status code. This function returns English strings and
+should not be used for user messages. It is here only to assist
+in debugging.
+
+ a descriptive string for the status code in @ret
+
+
+
+
+ the return status code
+
+
+
+
+
+ Checks whether plugin installation is likely to be supported by the
+current environment. This currently only checks whether the helper script
+that is to be provided by the distribution or operating system vendor
+exists.
+
+ TRUE if plugin installation is likely to be supported.
+
+
+
+
+ Requests plugin installation and block until the plugins have been
+installed or installation has failed.
+
+This function should almost never be used, it only exists for cases where
+a non-GLib main loop is running and the user wants to run it in a separate
+thread and marshal the result back asynchronously into the main thread
+using the other non-GLib main loop. You should almost always use
+gst_install_plugins_async() instead of this function.
+
+ the result of the installation.
+
+
+
+
+ NULL-terminated array
+ of installer string details
+
+
+
+
+
+ a #GstInstallPluginsContext, or NULL
+
+
+
+
+
+ Checks whether @msg is a missing plugins message.
+
+ %TRUE if @msg is a missing-plugins message, otherwise %FALSE.
+
+
+
+
+ a #GstMessage
+
+
+
+
+
+ Returns an opaque string containing all the details about the missing
+element to be passed to an external installer called via
+gst_install_plugins_async() or gst_install_plugins_sync().
+
+This function is mainly for applications that call external plugin
+installation mechanisms using one of the two above-mentioned functions in
+the case where the application knows exactly what kind of plugin it is
+missing.
+
+ a newly-allocated detail string, or NULL on error. Free string
+ with g_free() when not needed any longer.
+
+
+
+
+ the (fixed) caps for which a decoder element is needed
+
+
+
+
+
+ Creates a missing-plugin message for @element to notify the application
+that a decoder element for a particular set of (fixed) caps is missing.
+This function is mainly for use in plugins.
+
+ a new #GstMessage, or NULL on error
+
+
+
+
+ the #GstElement posting the message
+
+
+
+ the (fixed) caps for which a decoder element is needed
+
+
+
+
+
+ Returns an opaque string containing all the details about the missing
+element to be passed to an external installer called via
+gst_install_plugins_async() or gst_install_plugins_sync().
+
+This function is mainly for applications that call external plugin
+installation mechanisms using one of the two above-mentioned functions in
+the case where the application knows exactly what kind of plugin it is
+missing.
+
+ a newly-allocated detail string, or NULL on error. Free string
+ with g_free() when not needed any longer.
+
+
+
+
+ the name of the missing element (element factory),
+ e.g. "videoscale" or "cdparanoiasrc"
+
+
+
+
+
+ Creates a missing-plugin message for @element to notify the application
+that a certain required element is missing. This function is mainly for
+use in plugins.
+
+ a new #GstMessage, or NULL on error
+
+
+
+
+ the #GstElement posting the message
+
+
+
+ the name of the missing element (element factory),
+ e.g. "videoscale" or "cdparanoiasrc"
+
+
+
+
+
+ Returns an opaque string containing all the details about the missing
+element to be passed to an external installer called via
+gst_install_plugins_async() or gst_install_plugins_sync().
+
+This function is mainly for applications that call external plugin
+installation mechanisms using one of the two above-mentioned functions in
+the case where the application knows exactly what kind of plugin it is
+missing.
+
+ a newly-allocated detail string, or NULL on error. Free string
+ with g_free() when not needed any longer.
+
+
+
+
+ the (fixed) caps for which an encoder element is needed
+
+
+
+
+
+ Creates a missing-plugin message for @element to notify the application
+that an encoder element for a particular set of (fixed) caps is missing.
+This function is mainly for use in plugins.
+
+ a new #GstMessage, or NULL on error
+
+
+
+
+ the #GstElement posting the message
+
+
+
+ the (fixed) caps for which an encoder element is needed
+
+
+
+
+
+ Returns a localised string describing the missing feature, for use in
+error dialogs and the like. Should never return NULL unless @msg is not
+a valid missing-plugin message.
+
+This function is mainly for applications that need a human-readable string
+describing a missing plugin, given a previously collected missing-plugin
+message
+
+ a newly-allocated description string, or NULL on error. Free
+ string with g_free() when not needed any longer.
+
+
+
+
+ a missing-plugin #GstMessage of type #GST_MESSAGE_ELEMENT
+
+
+
+
+
+ Returns an opaque string containing all the details about the missing
+element to be passed to an external installer called via
+gst_install_plugins_async() or gst_install_plugins_sync().
+
+This function is mainly for applications that call external plugin
+installation mechanisms using one of the two above-mentioned functions.
+
+ a newly-allocated detail string, or NULL on error. Free string
+ with g_free() when not needed any longer.
+
+
+
+
+ a missing-plugin #GstMessage of type #GST_MESSAGE_ELEMENT
+
+
+
+
+
+ Returns an opaque string containing all the details about the missing
+element to be passed to an external installer called via
+gst_install_plugins_async() or gst_install_plugins_sync().
+
+This function is mainly for applications that call external plugin
+installation mechanisms using one of the two above-mentioned functions in
+the case where the application knows exactly what kind of plugin it is
+missing.
+
+ a newly-allocated detail string, or NULL on error. Free string
+ with g_free() when not needed any longer.
+
+
+
+
+ the URI protocol the missing source needs to implement,
+ e.g. "http" or "mms"
+
+
+
+
+
+ Creates a missing-plugin message for @element to notify the application
+that a sink element for a particular URI protocol is missing. This
+function is mainly for use in plugins.
+
+ a new #GstMessage, or NULL on error
+
+
+
+
+ the #GstElement posting the message
+
+
+
+ the URI protocol the missing sink needs to implement,
+ e.g. "http" or "smb"
+
+
+
+
+
+ Returns an opaque string containing all the details about the missing
+element to be passed to an external installer called via
+gst_install_plugins_async() or gst_install_plugins_sync().
+
+This function is mainly for applications that call external plugin
+installation mechanisms using one of the two above-mentioned functions in
+the case where the application knows exactly what kind of plugin it is
+missing.
+
+ a newly-allocated detail string, or NULL on error. Free string
+ with g_free() when not needed any longer.
+
+
+
+
+ the URI protocol the missing source needs to implement,
+ e.g. "http" or "mms"
+
+
+
+
+
+ Creates a missing-plugin message for @element to notify the application
+that a source element for a particular URI protocol is missing. This
+function is mainly for use in plugins.
+
+ a new #GstMessage, or NULL on error
+
+
+
+
+ the #GstElement posting the message
+
+
+
+ the URI protocol the missing source needs to implement,
+ e.g. "http" or "mms"
+
+
+
+
+
+ Adds a codec tag describing the format specified by @caps to @taglist.
+
+ TRUE if a codec tag was added, FALSE otherwise.
+
+
+
+
+ a #GstTagList
+
+
+
+ a GStreamer codec tag such as #GST_TAG_AUDIO_CODEC,
+ #GST_TAG_VIDEO_CODEC or #GST_TAG_CODEC. If none is specified,
+ the function will attempt to detect the appropriate category.
+
+
+
+ the (fixed) #GstCaps for which a codec tag should be added.
+
+
+
+
+
+ Returns a localised (as far as this is possible) string describing the
+media format specified in @caps, for use in error dialogs or other messages
+to be seen by the user. Should never return NULL unless @caps is invalid.
+
+Also see the convenience function
+gst_pb_utils_add_codec_description_to_tag_list().
+
+ a newly-allocated description string, or NULL on error. Free
+ string with g_free() when not needed any longer.
+
+
+
+
+ the (fixed) #GstCaps for which an format description is needed
+
+
+
+
+
+ Returns a localised string describing an decoder for the format specified
+in @caps, for use in error dialogs or other messages to be seen by the user.
+Should never return NULL unless @factory_name or @caps are invalid.
+
+This function is mainly for internal use, applications would typically
+use gst_missing_plugin_message_get_description() to get a description of
+a missing feature from a missing-plugin message.
+
+ a newly-allocated description string, or NULL on error. Free
+ string with g_free() when not needed any longer.
+
+
+
+
+ the (fixed) #GstCaps for which an decoder description is needed
+
+
+
+
+
+ Returns a localised string describing the given element, for use in
+error dialogs or other messages to be seen by the user. Should never
+return NULL unless @factory_name is invalid.
+
+This function is mainly for internal use, applications would typically
+use gst_missing_plugin_message_get_description() to get a description of
+a missing feature from a missing-plugin message.
+
+ a newly-allocated description string, or NULL on error. Free
+ string with g_free() when not needed any longer.
+
+
+
+
+ the name of the element, e.g. "giosrc"
+
+
+
+
+
+ Returns a localised string describing an encoder for the format specified
+in @caps, for use in error dialogs or other messages to be seen by the user.
+Should never return NULL unless @factory_name or @caps are invalid.
+
+This function is mainly for internal use, applications would typically
+use gst_missing_plugin_message_get_description() to get a description of
+a missing feature from a missing-plugin message.
+
+ a newly-allocated description string, or NULL on error. Free
+ string with g_free() when not needed any longer.
+
+
+
+
+ the (fixed) #GstCaps for which an encoder description is needed
+
+
+
+
+
+ Returns a localised string describing a sink element handling the protocol
+specified in @protocol, for use in error dialogs or other messages to be
+seen by the user. Should never return NULL unless @protocol is invalid.
+
+This function is mainly for internal use, applications would typically
+use gst_missing_plugin_message_get_description() to get a description of
+a missing feature from a missing-plugin message.
+
+ a newly-allocated description string, or NULL on error. Free
+ string with g_free() when not needed any longer.
+
+
+
+
+ the protocol the sink element needs to handle, e.g. "http"
+
+
+
+
+
+ Returns a localised string describing a source element handling the protocol
+specified in @protocol, for use in error dialogs or other messages to be
+seen by the user. Should never return NULL unless @protocol is invalid.
+
+This function is mainly for internal use, applications would typically
+use gst_missing_plugin_message_get_description() to get a description of
+a missing feature from a missing-plugin message.
+
+ a newly-allocated description string, or NULL on error. Free
+ string with g_free() when not needed any longer.
+
+
+
+
+ the protocol the source element needs to handle, e.g. "http"
+
+
+
+
+
+ Initialises the base utils support library. This function is not
+thread-safe. Applications should call it after calling gst_init(),
+plugins should call it from their plugin_init function.
+
+This function may be called multiple times. It will do nothing if the
+library has already been initialised.
+
+
+
+
+
+ Gets the version number of the GStreamer Plugins Base libraries.
+
+
+
+
+
+ pointer to a guint to store the major version number, or %NULL
+
+
+
+ pointer to a guint to store the minor version number, or %NULL
+
+
+
+ pointer to a guint to store the micro version number, or %NULL
+
+
+
+ pointer to a guint to store the nano version number, or %NULL
+
+
+
+
+
+ This function returns a string that is useful for describing this version
+of GStreamer's gst-plugins-base libraries to the outside world: user agent
+strings, logging, about dialogs ...
+
+ a newly allocated string describing this version of gst-plugins-base
+
+
+
+
+
diff --git a/gir-files/GstTag-1.0.gir b/gir-files/GstTag-1.0.gir
new file mode 100644
index 000000000..93190857d
--- /dev/null
+++ b/gir-files/GstTag-1.0.gir
@@ -0,0 +1,1741 @@
+
+
+
+
+
+
+
+
+
+ Direction of contrast processing applied when capturing an image. (string)
+
+The allowed values are:
+ "normal"
+ "soft"
+ "hard"
+
+
+
+ Digital zoom ratio used when capturing an image. (double)
+
+
+
+ Exposure compensation using when capturing an image in EV. (double)
+
+
+
+ Exposure mode used when capturing an image. (string)
+
+The allowed values are:
+ "auto-exposure"
+ "manual-exposure"
+ "auto-bracket"
+
+
+
+ Type of exposure control used when capturing an image. (string)
+
+The allowed values are:
+ "undefined"
+ "manual"
+ "normal" - automatically controlled
+ "aperture-priority" - user selects aperture value
+ "shutter-priority" - user selects shutter speed
+ "creative" - biased towards depth of field
+ "action" - biased towards fast shutter speed
+ "portrait" - closeup, leaving background out of focus
+ "landscape" - landscape photos, background in focus
+
+
+
+ If flash was fired during the capture of an image. (boolean)
+
+Note that if this tag isn't present, it should not be assumed that
+the flash did not fire. It should be treated as unknown.
+
+
+
+ The flash mode selected during the capture of an image. (string)
+
+The allowed values are:
+ "auto"
+ "always"
+ "never"
+
+
+
+ Focal length used when capturing an image, in mm. (double)
+
+
+
+ 35 mm equivalent focal length used when capturing an image, in mm. (double)
+
+
+
+ Focal ratio (f-number) used when capturing an image. (double)
+
+The value stored is the denominator of the focal ratio (f-number).
+For example, if this tag value is 2, the focal ratio is f/2.
+
+
+
+ Gain adjustment applied to an image. (string)
+
+The allowed values are:
+ "none"
+ "low-gain-up"
+ "high-gain-up"
+ "low-gain-down"
+ "high-gain-down"
+
+
+
+ ISO speed used when capturing an image. (integer)
+
+
+
+ Defines the way a camera determines the exposure. (string)
+
+The allowed values are:
+ "unknown"
+ "average"
+ "center-weighted-average"
+ "spot"
+ "multi-spot"
+ "pattern"
+ "partial"
+ "other"
+
+
+
+ Direction of saturation processing applied when capturing an image. (string)
+
+The allowed values are:
+ "normal"
+ "low-saturation"
+ "high-saturation"
+
+
+
+ Scene mode used when capturing an image. (string)
+
+The allowed values are:
+ "standard"
+ "landscape"
+ "portrait"
+ "night-scene"
+
+
+
+ Direction of sharpness processing applied when capturing an image. (string)
+
+The allowed values are:
+ "normal"
+ "soft"
+ "hard"
+
+
+
+ Shutter speed used when capturing an image, in seconds. (fraction)
+
+
+
+ Indicates the source of capture. The device/medium used to do the
+capture. (string)
+
+Allowed values are:
+ "dsc" (= digital still camera)
+ "transparent-scanner"
+ "reflex-scanner"
+ "other"
+
+
+
+ White balance mode used when capturing an image. (string)
+
+The allowed values are:
+ "auto"
+ "manual"
+ "daylight"
+ "cloudy"
+ "tungsten"
+ "fluorescent"
+ "fluorescent h" (newer daylight-calibrated fluorescents)
+ "flash"
+
+
+
+ CDDB disc id in its short form (e.g. 'aa063d0f')
+
+
+
+ CDDB disc id including all details
+
+
+
+ Musicbrainz disc id (e.g. 'ahg7JUcfR3vCYBphSDIogOOWrr0-')
+
+
+
+ Musicbrainz disc id details
+
+
+
+ Annodex CMML clip element tag
+
+
+
+ Annodex CMML head element tag
+
+
+
+ Annodex CMML stream element tag
+
+
+
+ ID3V2 header size considered minimum input for some functions such as
+gst_tag_list_from_id3v2_tag() and gst_tag_get_id3v2_tag_size() for example.
+
+
+
+ Media (image/video) intended horizontal pixel density in ppi. (double)
+
+
+
+ Media (image/video) intended vertical pixel density in ppi. (double)
+
+
+
+ Musical key in which the sound starts. It is represented as a string
+with a maximum length of three characters. The ground keys are
+represented with "A","B","C","D","E", "F" and "G" and halfkeys
+represented with "b" and "#". Minor is represented as "m" (e.g. "Dbm").
+Off key is represented with an "o" only.
+This notation might be extended in the future to support non-minor/major
+keys.
+
+
+
+ MusicBrainz album artist ID
+
+
+
+ MusicBrainz album ID
+
+
+
+ MusicBrainz artist ID
+
+
+
+ MusicBrainz track ID
+
+
+
+ MusicBrainz track TRM ID
+
+
+
+ Provides a base class for demuxing tags at the beginning or end of a
+stream and handles things like typefinding, querying, seeking, and
+different modes of operation (chain-based, pull_range-based, and providing
+downstream elements with random access if upstream supports that). The tag
+is stripped from the output, and all offsets are adjusted for the tag
+sizes, so that to the downstream element the stream will appear as if
+there was no tag at all. Also, once the tag has been parsed, GstTagDemux
+will try to determine the media type of the resulting stream and add a
+source pad with the appropriate caps in order to facilitate auto-plugging.
+
+## Deriving from GstTagDemux
+
+Subclasses have to do four things:
+
+ * In their base init function, they must add a pad template for the sink
+ pad to the element class, describing the media type they can parse in
+ the caps of the pad template.
+ * In their class init function, they must override
+ GST_TAG_DEMUX_CLASS(demux_klass)->identify_tag with their own identify
+ function.
+ * In their class init function, they must override
+ GST_TAG_DEMUX_CLASS(demux_klass)->parse_tag with their own parse
+ function.
+ * In their class init function, they must also set
+ GST_TAG_DEMUX_CLASS(demux_klass)->min_start_size and/or
+ GST_TAG_DEMUX_CLASS(demux_klass)->min_end_size to the minimum size required
+ for the identify function to decide whether the stream has a supported tag
+ or not. A class parsing ID3v1 tags, for example, would set min_end_size to
+ 128 bytes.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ parent element
+
+
+
+
+
+
+
+
+
+
+
+
+ The #GstTagDemuxClass structure. See documentation at beginning of section
+for details about what subclasses need to override and do.
+
+ the parent class.
+
+
+
+ minimum size required to identify a tag at the start and
+determine its total size. Set to 0 if not interested in start tags.
+Subclasses should set this in their class_init function.
+
+
+
+ minimum size required to identify a tag at the end and
+determine its total size. Set to 0 if not interested in end tags.
+Subclasses should set this in their class_init function.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Result values from the parse_tag virtual function.
+
+ cannot parse tag, just skip it
+
+
+ call again with less or more data
+
+
+ parsed tag successfully
+
+
+
+ Type of image contained in an image tag (specified as "image-type" field in
+the info structure in the image's #GstSample)
+
+ No image type. Can be used to
+ tell functions such as gst_tag_image_data_to_image_sample() that no
+ image type should be set.
+
+
+ Undefined/other image type
+
+
+ Cover (front)
+
+
+ Cover (back)
+
+
+ Leaflet page
+
+
+ Medium (e.g. label side of CD)
+
+
+ Lead artist/lead performer/soloist
+
+
+ Artist/performer
+
+
+ Conductor
+
+
+ Band/orchestra
+
+
+ Composer
+
+
+ Lyricist/text writer
+
+
+ Recording location
+
+
+ During recording
+
+
+ During performance
+
+
+ Movie/video screen capture
+
+
+ A fish as funny as the ID3v2 spec
+
+
+ Illustration
+
+
+ Band/artist logotype
+
+
+ Publisher/studio logotype
+
+
+
+ See http://creativecommons.org/ns for more information.
+
+ making multiple copies
+ is allowed
+
+
+ distribution, public display
+ and public performance are allowed
+
+
+ distribution of derivative
+ works is allowed
+
+
+ commercial derivatives are allowed,
+ but only non-commercial distribution is allowed
+
+
+ copyright and license notices
+ must be kept intact
+
+
+ credit must be given to
+ copyright holder and/or author
+
+
+ derivative works must be
+ licensed under the same terms or compatible terms as the original work
+
+
+ source code (the preferred
+ form for making modifications) must be provided when exercising some
+ rights granted by the license
+
+
+ derivative and combined works
+ must be licensed under specified terms, similar to those of the original
+ work
+
+
+ derivative works must be
+ licensed under specified terms, with at least the same conditions as
+ the original work; combinations with the work may be licensed under
+ different terms
+
+
+ exercising rights for
+ commercial purposes is prohibited
+
+
+ use in a
+ non-developing country is prohibited
+
+
+ this license was created
+ by the Creative Commons project
+
+
+ this license was
+ created by the Free Software Foundation (FSF)
+
+
+
+ Provides a base class for adding tags at the beginning or end of a
+stream.
+
+## Deriving from GstTagMux
+
+Subclasses have to do the following things:
+
+ * In their base init function, they must add pad templates for the sink
+ pad and the source pad to the element class, describing the media type
+ they accept and output in the caps of the pad template.
+ * In their class init function, they must override the
+ GST_TAG_MUX_CLASS(mux_klass)->render_start_tag and/or
+ GST_TAG_MUX_CLASS(mux_klass)->render_end_tag vfuncs and set up a render
+ function.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ parent element
+
+
+
+
+
+
+
+
+
+
+
+
+ The #GstTagMuxClass structure. Subclasses need to override at least one
+of the two render vfuncs.
+
+ the parent class.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This interface is implemented by elements that are able to do XMP serialization. Examples for
+such elements are #jifmux and #qtmux.
+
+Applications can use this interface to configure which XMP schemas should be used when serializing
+tags into XMP. Schemas are represented by their names, a full list of the supported schemas can be
+obtained from gst_tag_xmp_list_schemas(). By default, all schemas are used.
+
+
+ Adds all available XMP schemas to the configuration. Meaning that
+all will be used.
+
+
+
+
+
+ a #GstTagXmpWriter
+
+
+
+
+
+ Adds @schema to the list schemas
+
+
+
+
+
+ a #GstTagXmpWriter
+
+
+
+ the schema to be added
+
+
+
+
+
+ Checks if @schema is going to be used
+
+ %TRUE if it is going to be used
+
+
+
+
+ a #GstTagXmpWriter
+
+
+
+ the schema to test
+
+
+
+
+
+ Removes all schemas from the list of schemas to use. Meaning that no
+XMP will be generated.
+
+
+
+
+
+ a #GstTagXmpWriter
+
+
+
+
+
+ Removes a schema from the list of schemas to use. Nothing is done if
+the schema wasn't in the list
+
+
+
+
+
+ a #GstTagXmpWriter
+
+
+
+ the schema to remove
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Check if a given string contains a known ISO 639 language code.
+
+This is useful in situations where it's not clear whether a given
+string is a language code (which should be put into a #GST_TAG_LANGUAGE_CODE
+tag) or a free-form language name descriptor (which should be put into a
+#GST_TAG_LANGUAGE_NAME tag instead).
+
+ TRUE if the two- or three-letter language code in @lang_code
+ is a valid ISO-639 language code.
+
+
+
+
+ ISO-639 language code (e.g. "deu" or "ger" or "de")
+
+
+
+
+
+ Convenience function to read a string with unknown character encoding. If
+the string is already in UTF-8 encoding, it will be returned right away.
+If not it tries to detect byte-order-mark for UTF-16/32 cases and use that.
+Otherwise, the environment will be searched for a number of environment
+variables (whose names are specified in the NULL-terminated string array
+@env_vars) containing a list of character encodings to try/use. If none
+are specified, the current locale will be tried. If that also doesn't work,
+WINDOWS-1252/ISO-8859-1 is assumed (which will almost always succeed).
+
+ a newly-allocated string in UTF-8 encoding, or NULL
+
+
+
+
+ string data
+
+
+
+ length of string data, or -1 if the string is NUL-terminated
+
+
+
+ a NULL-terminated string array of environment variable names,
+ or NULL
+
+
+
+
+
+ Looks up the GStreamer tag for a ID3v2 tag.
+
+ The corresponding GStreamer tag or NULL if none exists.
+
+
+
+
+ ID3v2 tag to convert to GStreamer tag
+
+
+
+
+
+ Looks up the GStreamer tag for an ID3v2 user tag (e.g. description in
+TXXX frame or owner in UFID frame).
+
+ The corresponding GStreamer tag or NULL if none exists.
+
+
+
+
+ the type of ID3v2 user tag (e.g. "TXXX" or "UDIF")
+
+
+
+ ID3v2 user tag to convert to GStreamer tag
+
+
+
+
+
+ Looks up the GStreamer tag for a vorbiscomment tag.
+
+ The corresponding GStreamer tag or NULL if none exists.
+
+
+
+
+ vorbiscomment tag to convert to GStreamer tag
+
+
+
+
+
+ Determines size of an ID3v2 tag on buffer containing at least ID3v2 header,
+i.e. at least #GST_TAG_ID3V2_HEADER_SIZE (10) bytes;
+
+ Size of tag, or 0 if header is invalid or too small.
+
+
+
+
+ buffer holding ID3v2 tag (or at least the start of one)
+
+
+
+
+
+ Returns two-letter ISO-639-1 language code given a three-letter ISO-639-2
+language code or two-letter ISO-639-1 language code (both are accepted for
+convenience).
+
+Language codes are case-sensitive and expected to be lower case.
+
+ two-letter ISO-639-1 language code string that maps to @lang_code,
+ or NULL if no mapping is known. The returned string must not be
+ modified or freed.
+
+
+
+
+ ISO-639 language code (e.g. "deu" or "ger" or "de")
+
+
+
+
+
+ Returns three-letter ISO-639-2 "bibliographic" language code given a
+two-letter ISO-639-1 language code or a three-letter ISO-639-2 language
+code (both are accepted for convenience).
+
+The "bibliographic" code is derived from the English name of the language
+(e.g. "ger" for German instead of "de" or "deu"). In most scenarios, the
+"terminological" codes are prefered.
+
+Language codes are case-sensitive and expected to be lower case.
+
+ three-letter ISO-639-2 language code string that maps to @lang_code,
+ or NULL if no mapping is known. The returned string must not be
+ modified or freed.
+
+
+
+
+ ISO-639 language code (e.g. "deu" or "ger" or "de")
+
+
+
+
+
+ Returns three-letter ISO-639-2 "terminological" language code given a
+two-letter ISO-639-1 language code or a three-letter ISO-639-2 language
+code (both are accepted for convenience).
+
+The "terminological" code is derived from the local name of the language
+(e.g. "deu" for German instead of "ger"). In most scenarios, the
+"terminological" codes are prefered over the "bibliographic" ones.
+
+Language codes are case-sensitive and expected to be lower case.
+
+ three-letter ISO-639-2 language code string that maps to @lang_code,
+ or NULL if no mapping is known. The returned string must not be
+ modified or freed.
+
+
+
+
+ ISO-639 language code (e.g. "deu" or "ger" or "de")
+
+
+
+
+
+ Returns a list of known language codes (in form of two-letter ISO-639-1
+codes). This is useful for UIs to build a list of available languages for
+tagging purposes (e.g. to tag an audio track appropriately in a video or
+audio editor).
+
+ NULL-terminated string array with two-letter
+ language codes. Free with g_strfreev() when no longer needed.
+
+
+
+
+
+
+ Returns the name of the language given an ISO-639 language code as
+found in a GST_TAG_LANGUAGE_CODE tag. The name will be translated
+according to the current locale (if the library was built against the
+iso-codes package, otherwise the English name will be returned).
+
+Language codes are case-sensitive and expected to be lower case.
+
+ language name in UTF-8 format, or NULL if @language_code could
+ not be mapped to a language name. The returned string must not be
+ modified and does not need to freed; it will stay valid until the
+ application is terminated.
+
+
+
+
+ two or three-letter ISO-639 language code
+
+
+
+
+
+ Get the description of a license, which is a translated description
+of the license's main features.
+
+ the description of the license, or NULL if the license is unknown
+ or a description is not available.
+
+
+
+
+ a license reference string in form of a URI,
+ e.g. "http://creativecommons.org/licenses/by-nc-nd/2.0/"
+
+
+
+
+
+ Get the flags of a license, which describe most of the features of
+a license in their most general form.
+
+ the flags of the license, or 0 if the license is unknown
+
+
+
+
+ a license reference string in form of a URI,
+ e.g. "http://creativecommons.org/licenses/by-nc-nd/2.0/"
+
+
+
+
+
+ Get the jurisdiction code of a license. This is usually a two-letter
+ISO 3166-1 alpha-2 code, but there is also the special case of Scotland,
+for which no code exists and which is thus represented as "scotland".
+
+Known jurisdictions: ar, at, au, be, bg, br, ca, ch, cl, cn, co, de,
+dk, es, fi, fr, hr, hu, il, in, it, jp, kr, mk, mt, mx, my, nl, pe, pl,
+pt, scotland, se, si, tw, uk, us, za.
+
+ the jurisdiction code of the license, or NULL if the license is
+ unknown or is not specific to a particular jurisdiction.
+
+
+
+
+ a license reference string in form of a URI,
+ e.g. "http://creativecommons.org/licenses/by-nc-nd/2.0/"
+
+
+
+
+
+ Get the nick name of a license, which is a short (untranslated) string
+such as e.g. "CC BY-NC-ND 2.0 UK".
+
+ the nick name of the license, or NULL if the license is unknown
+
+
+
+
+ a license reference string in form of a URI,
+ e.g. "http://creativecommons.org/licenses/by-nc-nd/2.0/"
+
+
+
+
+
+ Get the title of a license, which is a short translated description
+of the license's features (generally not very pretty though).
+
+ the title of the license, or NULL if the license is unknown or
+ no title is available.
+
+
+
+
+ a license reference string in form of a URI,
+ e.g. "http://creativecommons.org/licenses/by-nc-nd/2.0/"
+
+
+
+
+
+ Get the version of a license.
+
+ the version of the license, or NULL if the license is not known or
+ has no version
+
+
+
+
+ a license reference string in form of a URI,
+ e.g. "http://creativecommons.org/licenses/by-nc-nd/2.0/"
+
+
+
+
+
+ Returns a list of known license references (in form of URIs). This is
+useful for UIs to build a list of available licenses for tagging purposes
+(e.g. to tag an audio track appropriately in a video or audio editor, or
+an image in a camera application).
+
+ NULL-terminated array of license strings. Free
+ with g_strfreev() when no longer needed.
+
+
+
+
+
+
+ Gets the number of ID3v1 genres that can be identified. Winamp genres are
+included.
+
+ the number of ID3v1 genres that can be identified
+
+
+
+
+ Gets the ID3v1 genre name for a given ID.
+
+ the genre or NULL if no genre is associated with that ID.
+
+
+
+
+ ID of genre to query
+
+
+
+
+
+ Helper function for tag-reading plugins to create a #GstSample suitable to
+add to a #GstTagList as an image tag (such as #GST_TAG_IMAGE or
+#GST_TAG_PREVIEW_IMAGE) from the encoded image data and an (optional) image
+type.
+
+Background: cover art and other images in tags are usually stored as a
+blob of binary image data, often accompanied by a MIME type or some other
+content type string (e.g. 'png', 'jpeg', 'jpg'). Sometimes there is also an
+'image type' to indicate what kind of image this is (e.g. front cover,
+back cover, artist, etc.). The image data may also be an URI to the image
+rather than the image itself.
+
+In GStreamer, image tags are #GstSample<!-- -->s containing the raw image
+data, with the sample caps describing the content type of the image
+(e.g. image/jpeg, image/png, text/uri-list). The sample info may contain
+an additional 'image-type' field of #GST_TYPE_TAG_IMAGE_TYPE to describe
+the type of image (front cover, back cover etc.). #GST_TAG_PREVIEW_IMAGE
+tags should not carry an image type, their type is already indicated via
+the special tag name.
+
+This function will do various checks and typefind the encoded image
+data (we can't trust the declared mime type).
+
+ a newly-allocated image sample for use in tag lists, or NULL
+
+
+
+
+ the (encoded) image
+
+
+
+ the length of the encoded image data at @image_data
+
+
+
+ type of the image, or #GST_TAG_IMAGE_TYPE_UNDEFINED. Pass
+ #GST_TAG_IMAGE_TYPE_NONE if no image type should be set at all (e.g.
+ for preview images)
+
+
+
+
+
+ Adds an image from an ID3 APIC frame (or similar, such as used in FLAC)
+to the given tag list. Also see gst_tag_image_data_to_image_sample() for
+more information on image tags in GStreamer.
+
+ %TRUE if the image was processed, otherwise %FALSE
+
+
+
+
+ a tag list
+
+
+
+ the (encoded) image
+
+
+
+ the length of the encoded image data at @image_data
+
+
+
+ picture type as per the ID3 (v2.4.0) specification for
+ the APIC frame (0 = unknown/other)
+
+
+
+
+
+ Parses the IFD and IFD tags data contained in the buffer and puts it
+on a taglist. The base_offset is used to subtract from the offset in
+the tag entries and be able to get the offset relative to the buffer
+start
+
+ The parsed taglist
+
+
+
+
+ The exif buffer
+
+
+
+ byte order of the data
+
+
+
+ Offset from the tiff header to this buffer
+
+
+
+
+
+ Parses the exif tags starting with a tiff header structure.
+
+ The taglist
+
+
+
+
+ The exif buffer
+
+
+
+
+
+ Creates a new tag list that contains the information parsed out of a
+ID3 tag.
+
+ A new #GstTagList with all tags that could be extracted from the
+ given vorbiscomment buffer or NULL on error.
+
+
+
+
+ buffer to convert
+
+
+
+
+
+ Creates a new tag list that contains the information parsed out of a
+vorbiscomment packet.
+
+ A new #GstTagList with all tags that could be extracted from the
+ given vorbiscomment buffer or NULL on error.
+
+
+
+
+ data to convert
+
+
+
+ size of @data
+
+
+
+ identification data at start of stream
+
+
+
+ length of identification data
+
+
+
+ pointer to a string that should take the vendor string
+ of this vorbis comment or NULL if you don't need it.
+
+
+
+
+
+ Creates a new tag list that contains the information parsed out of a
+vorbiscomment packet.
+
+ A new #GstTagList with all tags that could be extracted from the
+ given vorbiscomment buffer or NULL on error.
+
+
+
+
+ buffer to convert
+
+
+
+ identification data at start of stream
+
+
+
+ length of identification data
+
+
+
+ pointer to a string that should take the vendor string
+ of this vorbis comment or NULL if you don't need it.
+
+
+
+
+
+ Parse a xmp packet into a taglist.
+
+ new taglist or %NULL, free the list when done
+
+
+
+
+ buffer
+
+
+
+
+
+ Parses the data containing an ID3v1 tag and returns a #GstTagList from the
+parsed data.
+
+ A new tag list or NULL if the data was not an ID3v1 tag.
+
+
+
+
+ 128 bytes of data containing the ID3v1 tag
+
+
+
+
+
+ Formats the tags in taglist on exif format. The resulting buffer contains
+the tags IFD and is followed by the data pointed by the tag entries.
+
+ A GstBuffer containing the tag entries followed by the tag data
+
+
+
+
+ The taglist
+
+
+
+ byte order used in writing (G_LITTLE_ENDIAN or G_BIG_ENDIAN)
+
+
+
+ Offset from the tiff header first byte
+
+
+
+
+
+ Formats the tags in taglist into exif structure, a tiff header
+is put in the beginning of the buffer.
+
+ A GstBuffer containing the data
+
+
+
+
+ The taglist
+
+
+
+
+
+ Creates a new vorbiscomment buffer from a tag list.
+
+ A new #GstBuffer containing a vorbiscomment buffer with all tags
+ that could be converted from the given tag list.
+
+
+
+
+ tag list to convert
+
+
+
+ identification data at start of stream
+
+
+
+ length of identification data, may be 0 if @id_data is NULL
+
+
+
+ string that describes the vendor string or NULL
+
+
+
+
+
+ Formats a taglist as a xmp packet using only the selected
+schemas. An empty list (%NULL) means that all schemas should
+be used
+
+ new buffer or %NULL, unref the buffer when done
+
+
+
+
+ tags
+
+
+
+ does the container forbid inplace editing
+
+
+
+ %NULL terminated array of schemas to be used on serialization
+
+
+
+
+
+ Convenience function to parse a GST_TAG_EXTENDED_COMMENT string and
+separate it into its components.
+
+If successful, @key, @lang and/or @value will be set to newly allocated
+strings that you need to free with g_free() when done. @key and @lang
+may also be set to NULL by this function if there is no key or no language
+code in the extended comment string.
+
+ TRUE if the string could be parsed, otherwise FALSE
+
+
+
+
+ an extended comment string, see #GST_TAG_EXTENDED_COMMENT
+
+
+
+ return location for the comment description key, or NULL
+
+
+
+ return location for the comment ISO-639 language code, or NULL
+
+
+
+ return location for the actual comment string, or NULL
+
+
+
+ whether to fail if strings are not in key=value form
+
+
+
+
+
+ Registers additional musicbrainz-specific tags with the GStreamer tag
+system. Plugins and applications that use these tags should call this
+function before using them. Can be called multiple times.
+
+
+
+
+
+ Looks up the ID3v2 tag for a GStreamer tag.
+
+ The corresponding ID3v2 tag or NULL if none exists.
+
+
+
+
+ GStreamer tag to convert to vorbiscomment tag
+
+
+
+
+
+ Creates a new tag list that contains the information parsed out of a
+vorbiscomment packet.
+
+ A #GList of newly-allocated
+ key=value strings. Free with g_list_foreach (list, (GFunc) g_free, NULL)
+ plus g_list_free (list)
+
+
+
+
+
+
+ a #GstTagList
+
+
+
+ a GStreamer tag identifier, such as #GST_TAG_ARTIST
+
+
+
+
+
+ Looks up the vorbiscomment tag for a GStreamer tag.
+
+ The corresponding vorbiscomment tag or NULL if none exists.
+
+
+
+
+ GStreamer tag to convert to vorbiscomment tag
+
+
+
+
+
+ Gets the list of supported schemas in the xmp lib
+
+ a %NULL terminated array of strings with the
+ schema names
+
+
+
+
+
+
+ Convenience function using gst_tag_from_vorbis_tag(), parsing
+a vorbis comment string into the right type and adding it to the
+given taglist @list.
+
+Unknown vorbiscomment tags will be added to the tag list in form
+of a #GST_TAG_EXTENDED_COMMENT.
+
+
+
+
+
+ a #GstTagList
+
+
+
+ a vorbiscomment tag string (key in key=value), must be valid UTF-8
+
+
+
+ a vorbiscomment value string (value in key=value), must be valid UTF-8
+
+
+
+
+
+
diff --git a/gir-files/GstVideo-1.0.gir b/gir-files/GstVideo-1.0.gir
new file mode 100644
index 000000000..2d142ccf7
--- /dev/null
+++ b/gir-files/GstVideo-1.0.gir
@@ -0,0 +1,13325 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ A bufferpool option to enable extra padding. When a bufferpool supports this
+option, gst_buffer_pool_config_set_video_alignment() can be called.
+
+When this option is enabled on the bufferpool,
+#GST_BUFFER_POOL_OPTION_VIDEO_META should also be enabled.
+
+
+
+ An option that can be activated on a bufferpool to request gl texture upload
+meta on buffers from the pool.
+
+When this option is enabled on the bufferpool,
+@GST_BUFFER_POOL_OPTION_VIDEO_META should also be enabled.
+
+
+
+ An option that can be activated on bufferpool to request video metadata
+on buffers from the pool.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This interface is implemented by elements which can perform some color
+balance operation on video frames they process. For example, modifying
+the brightness, contrast, hue or saturation.
+
+Example elements are 'xvimagesink' and 'colorbalance'
+
+ Get the #GstColorBalanceType of this implementation.
+
+ A the #GstColorBalanceType.
+
+
+
+
+ The #GstColorBalance implementation
+
+
+
+
+
+ Retrieve the current value of the indicated channel, between min_value
+and max_value.
+
+See Also: The #GstColorBalanceChannel.min_value and
+ #GstColorBalanceChannel.max_value members of the
+ #GstColorBalanceChannel object.
+
+ The current value of the channel.
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+ A #GstColorBalanceChannel instance
+
+
+
+
+
+ Retrieve a list of the available channels.
+
+ A
+ GList containing pointers to #GstColorBalanceChannel
+ objects. The list is owned by the #GstColorBalance
+ instance and must not be freed.
+
+
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+
+
+ Sets the current value of the channel to the passed value, which must
+be between min_value and max_value.
+
+See Also: The #GstColorBalanceChannel.min_value and
+ #GstColorBalanceChannel.max_value members of the
+ #GstColorBalanceChannel object.
+
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+ A #GstColorBalanceChannel instance
+
+
+
+ The new value for the channel.
+
+
+
+
+
+ A helper function called by implementations of the GstColorBalance
+interface. It fires the #GstColorBalance::value-changed signal on the
+instance, and the #GstColorBalanceChannel::value-changed signal on the
+channel object.
+
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+ A #GstColorBalanceChannel whose value has changed
+
+
+
+ The new value of the channel
+
+
+
+
+
+ Get the #GstColorBalanceType of this implementation.
+
+ A the #GstColorBalanceType.
+
+
+
+
+ The #GstColorBalance implementation
+
+
+
+
+
+ Retrieve the current value of the indicated channel, between min_value
+and max_value.
+
+See Also: The #GstColorBalanceChannel.min_value and
+ #GstColorBalanceChannel.max_value members of the
+ #GstColorBalanceChannel object.
+
+ The current value of the channel.
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+ A #GstColorBalanceChannel instance
+
+
+
+
+
+ Retrieve a list of the available channels.
+
+ A
+ GList containing pointers to #GstColorBalanceChannel
+ objects. The list is owned by the #GstColorBalance
+ instance and must not be freed.
+
+
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+
+
+ Sets the current value of the channel to the passed value, which must
+be between min_value and max_value.
+
+See Also: The #GstColorBalanceChannel.min_value and
+ #GstColorBalanceChannel.max_value members of the
+ #GstColorBalanceChannel object.
+
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+ A #GstColorBalanceChannel instance
+
+
+
+ The new value for the channel.
+
+
+
+
+
+ A helper function called by implementations of the GstColorBalance
+interface. It fires the #GstColorBalance::value-changed signal on the
+instance, and the #GstColorBalanceChannel::value-changed signal on the
+channel object.
+
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+ A #GstColorBalanceChannel whose value has changed
+
+
+
+ The new value of the channel
+
+
+
+
+
+ Fired when the value of the indicated channel has changed.
+
+
+
+
+
+ The #GstColorBalanceChannel
+
+
+
+ The new value
+
+
+
+
+
+
+ The #GstColorBalanceChannel object represents a parameter
+for modifying the color balance implemented by an element providing the
+#GstColorBalance interface. For example, Hue or Saturation.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ A string containing a descriptive name for this channel
+
+
+
+ The minimum valid value for this channel.
+
+
+
+ The maximum valid value for this channel.
+
+
+
+
+
+
+
+
+ Fired when the value of the indicated channel has changed.
+
+
+
+
+
+ The new value
+
+
+
+
+
+
+ Color-balance channel class.
+
+ the parent class
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Color-balance interface.
+
+ the parent interface
+
+
+
+
+
+ A
+ GList containing pointers to #GstColorBalanceChannel
+ objects. The list is owned by the #GstColorBalance
+ instance and must not be freed.
+
+
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+
+
+
+
+
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+ A #GstColorBalanceChannel instance
+
+
+
+ The new value for the channel.
+
+
+
+
+
+
+
+
+ The current value of the channel.
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+ A #GstColorBalanceChannel instance
+
+
+
+
+
+
+
+
+ A the #GstColorBalanceType.
+
+
+
+
+ The #GstColorBalance implementation
+
+
+
+
+
+
+
+
+
+
+
+
+ A #GstColorBalance instance
+
+
+
+ A #GstColorBalanceChannel whose value has changed
+
+
+
+ The new value of the channel
+
+
+
+
+
+
+
+
+
+
+
+
+ An enumeration indicating whether an element implements color balancing
+operations in software or in dedicated hardware. In general, dedicated
+hardware implementations (such as those provided by xvimagesink) are
+preferred.
+
+ Color balance is implemented with dedicated
+ hardware.
+
+
+ Color balance is implemented via software
+ processing.
+
+
+
+ This metadata stays relevant as long as video colorspace is unchanged.
+
+
+
+ This metadata stays relevant as long as video orientation is unchanged.
+
+
+
+ This metadata stays relevant as long as video size is unchanged.
+
+
+
+ This metadata is relevant for video streams.
+
+
+
+ The Navigation interface is used for creating and injecting navigation related
+events such as mouse button presses, cursor motion and key presses. The associated
+library also provides methods for parsing received events, and for sending and
+receiving navigation related bus events. One main usecase is DVD menu navigation.
+
+The main parts of the API are:
+
+* The GstNavigation interface, implemented by elements which provide an application
+ with the ability to create and inject navigation events into the pipeline.
+* GstNavigation event handling API. GstNavigation events are created in response to
+ calls on a GstNavigation interface implementation, and sent in the pipeline. Upstream
+ elements can use the navigation event API functions to parse the contents of received
+ messages.
+
+* GstNavigation message handling API. GstNavigation messages may be sent on the message
+ bus to inform applications of navigation related changes in the pipeline, such as the
+ mouse moving over a clickable region, or the set of available angles changing.
+
+The GstNavigation message functions provide functions for creating and parsing
+custom bus messages for signaling GstNavigation changes.
+
+ Inspect a #GstEvent and return the #GstNavigationEventType of the event, or
+#GST_NAVIGATION_EVENT_INVALID if the event is not a #GstNavigation event.
+
+
+
+
+
+ A #GstEvent to inspect.
+
+
+
+
+
+ Inspect a #GstNavigation command event and retrieve the enum value of the
+associated command.
+
+ TRUE if the navigation command could be extracted, otherwise FALSE.
+
+
+
+
+ A #GstEvent to inspect.
+
+
+
+ Pointer to GstNavigationCommand to receive the type of the
+navigation event.
+
+
+
+
+
+
+
+
+
+
+ A #GstEvent to inspect.
+
+
+
+ A pointer to a location to receive the string identifying the key
+press. The returned string is owned by the event, and valid only until the
+event is unreffed.
+
+
+
+
+
+ Retrieve the details of either a #GstNavigation mouse button press event or
+a mouse button release event. Determine which type the event is using
+gst_navigation_event_get_type() to retrieve the #GstNavigationEventType.
+
+ TRUE if the button number and both coordinates could be extracted,
+ otherwise FALSE.
+
+
+
+
+ A #GstEvent to inspect.
+
+
+
+ Pointer to a gint that will receive the button number associated
+with the event.
+
+
+
+ Pointer to a gdouble to receive the x coordinate of the mouse button
+event.
+
+
+
+ Pointer to a gdouble to receive the y coordinate of the mouse button
+event.
+
+
+
+
+
+ Inspect a #GstNavigation mouse movement event and extract the coordinates
+of the event.
+
+ TRUE if both coordinates could be extracted, otherwise FALSE.
+
+
+
+
+ A #GstEvent to inspect.
+
+
+
+ Pointer to a gdouble to receive the x coordinate of the mouse movement.
+
+
+
+ Pointer to a gdouble to receive the y coordinate of the mouse movement.
+
+
+
+
+
+ Check a bus message to see if it is a #GstNavigation event, and return
+the #GstNavigationMessageType identifying the type of the message if so.
+
+ The type of the #GstMessage, or
+#GST_NAVIGATION_MESSAGE_INVALID if the message is not a #GstNavigation
+notification.
+
+
+
+
+ A #GstMessage to inspect.
+
+
+
+
+
+ Creates a new #GstNavigation message with type
+#GST_NAVIGATION_MESSAGE_ANGLES_CHANGED for notifying an application
+that the current angle, or current number of angles available in a
+multiangle video has changed.
+
+ The new #GstMessage.
+
+
+
+
+ A #GstObject to set as source of the new message.
+
+
+
+ The currently selected angle.
+
+
+
+ The number of viewing angles now available.
+
+
+
+
+
+ Creates a new #GstNavigation message with type
+#GST_NAVIGATION_MESSAGE_COMMANDS_CHANGED
+
+ The new #GstMessage.
+
+
+
+
+ A #GstObject to set as source of the new message.
+
+
+
+
+
+ Creates a new #GstNavigation message with type
+#GST_NAVIGATION_MESSAGE_EVENT.
+
+ The new #GstMessage.
+
+
+
+
+ A #GstObject to set as source of the new message.
+
+
+
+ A navigation #GstEvent
+
+
+
+
+
+ Creates a new #GstNavigation message with type
+#GST_NAVIGATION_MESSAGE_MOUSE_OVER.
+
+ The new #GstMessage.
+
+
+
+
+ A #GstObject to set as source of the new message.
+
+
+
+ %TRUE if the mouse has entered a clickable area of the display.
+%FALSE if it over a non-clickable area.
+
+
+
+
+
+ Parse a #GstNavigation message of type GST_NAVIGATION_MESSAGE_ANGLES_CHANGED
+and extract the @cur_angle and @n_angles parameters.
+
+ %TRUE if the message could be successfully parsed. %FALSE if not.
+
+
+
+
+ A #GstMessage to inspect.
+
+
+
+ A pointer to a #guint to receive the new current angle number,
+or NULL
+
+
+
+ A pointer to a #guint to receive the new angle count, or NULL.
+
+
+
+
+
+ Parse a #GstNavigation message of type #GST_NAVIGATION_MESSAGE_EVENT
+and extract contained #GstEvent. The caller must unref the @event when done
+with it.
+
+ %TRUE if the message could be successfully parsed. %FALSE if not.
+
+
+
+
+ A #GstMessage to inspect.
+
+
+
+ a pointer to a #GstEvent to receive the
+ contained navigation event.
+
+
+
+
+
+ Parse a #GstNavigation message of type #GST_NAVIGATION_MESSAGE_MOUSE_OVER
+and extract the active/inactive flag. If the mouse over event is marked
+active, it indicates that the mouse is over a clickable area.
+
+ %TRUE if the message could be successfully parsed. %FALSE if not.
+
+
+
+
+ A #GstMessage to inspect.
+
+
+
+ A pointer to a gboolean to receive the active/inactive state,
+or NULL.
+
+
+
+
+
+ Inspect a #GstQuery and return the #GstNavigationQueryType associated with
+it if it is a #GstNavigation query.
+
+ The #GstNavigationQueryType of the query, or
+#GST_NAVIGATION_QUERY_INVALID
+
+
+
+
+ The query to inspect
+
+
+
+
+
+ Create a new #GstNavigation angles query. When executed, it will
+query the pipeline for the set of currently available angles, which may be
+greater than one in a multiangle video.
+
+ The new query.
+
+
+
+
+ Create a new #GstNavigation commands query. When executed, it will
+query the pipeline for the set of currently available commands.
+
+ The new query.
+
+
+
+
+ Parse the current angle number in the #GstNavigation angles @query into the
+#guint pointed to by the @cur_angle variable, and the number of available
+angles into the #guint pointed to by the @n_angles variable.
+
+ %TRUE if the query could be successfully parsed. %FALSE if not.
+
+
+
+
+ a #GstQuery
+
+
+
+ Pointer to a #guint into which to store the currently selected
+angle value from the query, or NULL
+
+
+
+ Pointer to a #guint into which to store the number of angles
+value from the query, or NULL
+
+
+
+
+
+ Parse the number of commands in the #GstNavigation commands @query.
+
+ %TRUE if the query could be successfully parsed. %FALSE if not.
+
+
+
+
+ a #GstQuery
+
+
+
+ the number of commands in this query.
+
+
+
+
+
+ Parse the #GstNavigation command query and retrieve the @nth command from
+it into @cmd. If the list contains less elements than @nth, @cmd will be
+set to #GST_NAVIGATION_COMMAND_INVALID.
+
+ %TRUE if the query could be successfully parsed. %FALSE if not.
+
+
+
+
+ a #GstQuery
+
+
+
+ the nth command to retrieve.
+
+
+
+ a pointer to store the nth command into.
+
+
+
+
+
+ Set the #GstNavigation angles query result field in @query.
+
+
+
+
+
+ a #GstQuery
+
+
+
+ the current viewing angle to set.
+
+
+
+ the number of viewing angles to set.
+
+
+
+
+
+ Set the #GstNavigation command query result fields in @query. The number
+of commands passed must be equal to @n_commands.
+
+
+
+
+
+ a #GstQuery
+
+
+
+ the number of commands to set.
+
+
+
+ A list of @GstNavigationCommand values, @n_cmds entries long.
+
+
+
+
+
+ Set the #GstNavigation command query result fields in @query. The number
+of commands passed must be equal to @n_commands.
+
+
+
+
+
+ a #GstQuery
+
+
+
+ the number of commands to set.
+
+
+
+ An array containing @n_cmds @GstNavigationCommand values.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sends the indicated command to the navigation interface.
+
+
+
+
+
+ The navigation interface instance
+
+
+
+ The command to issue
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ The navigation interface instance
+
+
+
+ The type of the key event. Recognised values are "key-press" and
+"key-release"
+
+
+
+ Character representation of the key. This is typically as produced
+by XKeysymToString.
+
+
+
+
+
+ Sends a mouse event to the navigation interface. Mouse event coordinates
+are sent relative to the display space of the related output area. This is
+usually the size in pixels of the window associated with the element
+implementing the #GstNavigation interface.
+
+
+
+
+
+ The navigation interface instance
+
+
+
+ The type of mouse event, as a text string. Recognised values are
+"mouse-button-press", "mouse-button-release" and "mouse-move".
+
+
+
+ The button number of the button being pressed or released. Pass 0
+for mouse-move events.
+
+
+
+ The x coordinate of the mouse event.
+
+
+
+ The y coordinate of the mouse event.
+
+
+
+
+
+
+ A set of commands that may be issued to an element providing the
+#GstNavigation interface. The available commands can be queried via
+the gst_navigation_query_new_commands() query.
+
+For convenience in handling DVD navigation, the MENU commands are aliased as:
+ GST_NAVIGATION_COMMAND_DVD_MENU = @GST_NAVIGATION_COMMAND_MENU1
+ GST_NAVIGATION_COMMAND_DVD_TITLE_MENU = @GST_NAVIGATION_COMMAND_MENU2
+ GST_NAVIGATION_COMMAND_DVD_ROOT_MENU = @GST_NAVIGATION_COMMAND_MENU3
+ GST_NAVIGATION_COMMAND_DVD_SUBPICTURE_MENU = @GST_NAVIGATION_COMMAND_MENU4
+ GST_NAVIGATION_COMMAND_DVD_AUDIO_MENU = @GST_NAVIGATION_COMMAND_MENU5
+ GST_NAVIGATION_COMMAND_DVD_ANGLE_MENU = @GST_NAVIGATION_COMMAND_MENU6
+ GST_NAVIGATION_COMMAND_DVD_CHAPTER_MENU = @GST_NAVIGATION_COMMAND_MENU7
+
+ An invalid command entry
+
+
+ Execute navigation menu command 1. For DVD,
+this enters the DVD root menu, or exits back to the title from the menu.
+
+
+ Execute navigation menu command 2. For DVD,
+this jumps to the DVD title menu.
+
+
+ Execute navigation menu command 3. For DVD,
+this jumps into the DVD root menu.
+
+
+ Execute navigation menu command 4. For DVD,
+this jumps to the Subpicture menu.
+
+
+ Execute navigation menu command 5. For DVD,
+the jumps to the audio menu.
+
+
+ Execute navigation menu command 6. For DVD,
+this jumps to the angles menu.
+
+
+ Execute navigation menu command 7. For DVD,
+this jumps to the chapter menu.
+
+
+ Select the next button to the left in a menu,
+if such a button exists.
+
+
+ Select the next button to the right in a menu,
+if such a button exists.
+
+
+ Select the button above the current one in a
+menu, if such a button exists.
+
+
+ Select the button below the current one in a
+menu, if such a button exists.
+
+
+ Activate (click) the currently selected
+button in a menu, if such a button exists.
+
+
+ Switch to the previous angle in a
+multiangle feature.
+
+
+ Switch to the next angle in a multiangle
+feature.
+
+
+
+ Enum values for the various events that an element implementing the
+GstNavigation interface might send up the pipeline.
+
+ Returned from
+gst_navigation_event_get_type() when the passed event is not a navigation event.
+
+
+ A key press event. Use
+gst_navigation_event_parse_key_event() to extract the details from the event.
+
+
+ A key release event. Use
+gst_navigation_event_parse_key_event() to extract the details from the event.
+
+
+ A mouse button press event. Use
+gst_navigation_event_parse_mouse_button_event() to extract the details from the
+event.
+
+
+ A mouse button release event. Use
+gst_navigation_event_parse_mouse_button_event() to extract the details from the
+event.
+
+
+ A mouse movement event. Use
+gst_navigation_event_parse_mouse_move_event() to extract the details from the
+event.
+
+
+ A navigation command event. Use
+gst_navigation_event_parse_command() to extract the details from the event.
+
+
+
+ Navigation interface.
+
+ the parent interface
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ A set of notifications that may be received on the bus when navigation
+related status changes.
+
+ Returned from
+gst_navigation_message_get_type() when the passed message is not a
+navigation message.
+
+
+ Sent when the mouse moves over or leaves a
+clickable region of the output, such as a DVD menu button.
+
+
+ Sent when the set of available commands
+changes and should re-queried by interested applications.
+
+
+ Sent when display angles in a multi-angle
+feature (such as a multiangle DVD) change - either angles have appeared or
+disappeared.
+
+
+ Sent when a navigation event was not handled
+by any element in the pipeline (Since 1.6)
+
+
+
+ Tyoes of navigation interface queries.
+
+ invalid query
+
+
+ command query
+
+
+ viewing angle query
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #GST_TYPE_VIDEO_ALPHA_MODE, the alpha mode to use.
+Default is #GST_VIDEO_ALPHA_MODE_COPY.
+
+
+
+ #G_TYPE_DOUBLE, the alpha color value to use.
+Default to 1.0
+
+
+
+ #G_TYPE_UINT, the border color to use if #GST_VIDEO_CONVERTER_OPT_FILL_BORDER
+is set to %TRUE. The color is in ARGB format.
+Default 0xff000000
+
+
+
+ #GST_TYPE_VIDEO_CHROMA_MODE, set the chroma resample mode subsampled
+formats. Default is #GST_VIDEO_CHROMA_MODE_FULL.
+
+
+
+ #GST_TYPE_RESAMPLER_METHOD, The resampler method to use for
+chroma resampling. Other options for the resampler can be used, see
+the #GstResampler. Default is #GST_RESAMPLER_METHOD_LINEAR
+
+
+
+ #G_TYPE_INT, height in the destination frame, default destination height
+
+
+
+ #G_TYPE_INT, width in the destination frame, default destination width
+
+
+
+ #G_TYPE_INT, x position in the destination frame, default 0
+
+
+
+ #G_TYPE_INT, y position in the destination frame, default 0
+
+
+
+ #GST_TYPE_VIDEO_DITHER_METHOD, The dither method to use when
+changing bit depth.
+Default is #GST_VIDEO_DITHER_BAYER.
+
+
+
+ #G_TYPE_UINT, The quantization amount to dither to. Components will be
+quantized to multiples of this value.
+Default is 1
+
+
+
+ #G_TYPE_BOOLEAN, if the destination rectangle does not fill the complete
+destination image, render a border with
+#GST_VIDEO_CONVERTER_OPT_BORDER_ARGB. Otherwise the unusded pixels in the
+destination are untouched. Default %TRUE.
+
+
+
+ #GST_TYPE_VIDEO_GAMMA_MODE, set the gamma mode.
+Default is #GST_VIDEO_GAMMA_MODE_NONE.
+
+
+
+ #GST_TYPE_VIDEO_MATRIX_MODE, set the color matrix conversion mode for
+converting between Y'PbPr and non-linear RGB (R'G'B').
+Default is #GST_VIDEO_MATRIX_MODE_FULL.
+
+
+
+ #GST_TYPE_VIDEO_PRIMARIES_MODE, set the primaries conversion mode.
+Default is #GST_VIDEO_PRIMARIES_MODE_NONE.
+
+
+
+ #GST_TYPE_RESAMPLER_METHOD, The resampler method to use for
+resampling. Other options for the resampler can be used, see
+the #GstResampler. Default is #GST_RESAMPLER_METHOD_CUBIC
+
+
+
+ #G_TYPE_UINT, The number of taps for the resampler.
+Default is 0: let the resampler choose a good value.
+
+
+
+ #G_TYPE_INT, source height to convert, default source height
+
+
+
+ #G_TYPE_INT, source width to convert, default source width
+
+
+
+ #G_TYPE_INT, source x position to start conversion, default 0
+
+
+
+ #G_TYPE_INT, source y position to start conversion, default 0
+
+
+
+ #G_TYPE_UINT, maximum number of threads to use. Default 1, 0 for the number
+of cores.
+
+
+
+ Default maximum number of errors tolerated before signaling error.
+
+
+
+ The name of the templates for the sink pad.
+
+
+
+ The name of the templates for the source pad.
+
+
+
+ The name of the templates for the sink pad.
+
+
+
+ The name of the templates for the source pad.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Video formats supported by gst_video_overlay_composition_blend(), for
+use in overlay elements' pad template caps.
+
+
+
+ G_TYPE_DOUBLE, B parameter of the cubic filter. The B
+parameter controls the bluriness. Values between 0.0 and
+2.0 are accepted. 1/3 is the default.
+
+Below are some values of popular filters:
+ B C
+Hermite 0.0 0.0
+Spline 1.0 0.0
+Catmull-Rom 0.0 1/2
+Mitchell 1/3 1/3
+Robidoux 0.3782 0.3109
+Robidoux
+ Sharp 0.2620 0.3690
+Robidoux
+ Soft 0.6796 0.1602
+
+
+
+ G_TYPE_DOUBLE, C parameter of the cubic filter. The C
+parameter controls the Keys alpha value. Values between 0.0 and
+2.0 are accepted. 1/3 is the default.
+
+See #GST_VIDEO_RESAMPLER_OPT_CUBIC_B for some more common values
+
+
+
+ G_TYPE_DOUBLE, specifies the size of filter envelope for
+@GST_VIDEO_RESAMPLER_METHOD_LANCZOS. values are clamped between
+1.0 and 5.0. 2.0 is the default.
+
+
+
+ G_TYPE_INT, limits the maximum number of taps to use.
+16 is the default.
+
+
+
+ G_TYPE_DOUBLE, specifies sharpening of the filter for
+@GST_VIDEO_RESAMPLER_METHOD_LANCZOS. values are clamped between
+0.0 and 1.0. 0.0 is the default.
+
+
+
+ G_TYPE_DOUBLE, specifies sharpness of the filter for
+@GST_VIDEO_RESAMPLER_METHOD_LANCZOS. values are clamped between
+0.5 and 1.5. 1.0 is the default.
+
+
+
+ #GST_TYPE_VIDEO_DITHER_METHOD, The dither method to use for propagating
+quatization errors.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Apply a transformation using the given 4x4 transformation matrix
+
+
+
+
+
+ a #GstVideoAffineTransformationMeta
+
+
+
+ a 4x4 transformation matrix to be applied
+
+
+
+
+
+
+
+
+
+
+
+ Extra alignment paramters for the memory of video buffers. This
+structure is usually used to configure the bufferpool if it supports the
+#GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT.
+
+ extra pixels on the top
+
+
+
+ extra pixels on the bottom
+
+
+
+ extra pixels on the left side
+
+
+
+ extra pixels on the right side
+
+
+
+ array with extra alignment requirements for the strides
+
+
+
+
+
+ Set @align to its default values with no padding and no alignment.
+
+
+
+
+
+ a #GstVideoAlignment
+
+
+
+
+
+
+ Different alpha modes.
+
+ When input and output have alpha, it will be copied.
+ When the input has no alpha, alpha will be set to
+ #GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE
+
+
+ set all alpha to
+ #GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE
+
+
+ multiply all alpha with
+ #GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE.
+ When the input format has no alpha but the output format has, the
+ alpha value will be set to #GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE
+
+
+
+ Additional video buffer flags. These flags can potentially be used on any
+buffers carrying video data - even encoded data.
+
+Note that these are only valid for #GstCaps of type: video/...
+They can conflict with other extended buffer flags.
+
+ If the #GstBuffer is interlaced. In mixed
+ interlace-mode, this flags specifies if the frame is
+ interlaced or progressive.
+
+
+ If the #GstBuffer is interlaced, then the first field
+ in the video frame is the top field. If unset, the
+ bottom field is first.
+
+
+ If the #GstBuffer is interlaced, then the first field
+ (as defined by the %GST_VIDEO_BUFFER_TFF flag setting)
+ is repeated.
+
+
+ If the #GstBuffer is interlaced, then only the
+ first field (as defined by the %GST_VIDEO_BUFFER_TFF
+ flag setting) is to be displayed.
+
+
+ The #GstBuffer contains one or more specific views,
+ such as left or right eye view. This flags is set on
+ any buffer that contains non-mono content - even for
+ streams that contain only a single viewpoint. In mixed
+ mono / non-mono streams, the absense of the flag marks
+ mono buffers.
+
+
+ When conveying stereo/multiview content with
+ frame-by-frame methods, this flag marks the first buffer
+ in a bundle of frames that belong together.
+
+
+ Offset to define more flags
+
+
+
+
+ Create a new bufferpool that can allocate video frames. This bufferpool
+supports all the video bufferpool options.
+
+ a new #GstBufferPool to allocate video frames
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Extra flags that influence the result from gst_video_chroma_resample_new().
+
+ no flags
+
+
+ the input is interlaced
+
+
+
+ Different subsampling and upsampling methods
+
+ Duplicates the chroma samples when
+ upsampling and drops when subsampling
+
+
+ Uses linear interpolation to reconstruct
+ missing chroma and averaging to subsample
+
+
+
+ Different chroma downsampling and upsampling modes
+
+ do full chroma up and down sampling
+
+
+ only perform chroma upsampling
+
+
+ only perform chroma downsampling
+
+
+ disable chroma resampling
+
+
+
+
+ Perform resampling of @width chroma pixels in @lines.
+
+
+
+
+
+ a #GstVideoChromaResample
+
+
+
+ pixel lines
+
+
+
+ the number of pixels on one line
+
+
+
+
+
+ Free @resample
+
+
+
+
+
+ a #GstVideoChromaResample
+
+
+
+
+
+ The resampler must be fed @n_lines at a time. The first line should be
+at @offset.
+
+
+
+
+
+ a #GstVideoChromaResample
+
+
+
+ the number of input lines
+
+
+
+ the first line
+
+
+
+
+
+ Create a new resampler object for the given parameters. When @h_factor or
+@v_factor is > 0, upsampling will be used, otherwise subsampling is
+performed.
+
+ a new #GstVideoChromaResample that should be freed with
+ gst_video_chroma_resample_free() after usage.
+
+
+
+
+ a #GstVideoChromaMethod
+
+
+
+ a #GstVideoChromaSite
+
+
+
+ #GstVideoChromaFlags
+
+
+
+ the #GstVideoFormat
+
+
+
+ horizontal resampling factor
+
+
+
+ vertical resampling factor
+
+
+
+
+
+
+ Various Chroma sitings.
+
+ unknown cositing
+
+
+ no cositing
+
+
+ chroma is horizontally cosited
+
+
+ chroma is vertically cosited
+
+
+ choma samples are sited on alternate lines
+
+
+ chroma samples cosited with luma samples
+
+
+ jpeg style cositing, also for mpeg1 and mjpeg
+
+
+ mpeg2 style cositing
+
+
+ DV style cositing
+
+
+
+ A #GstVideoCodecFrame represents a video frame both in raw and
+encoded form.
+
+
+
+
+
+
+
+ Unique identifier for the frame. Use this if you need
+ to get hold of the frame later (like when data is being decoded).
+ Typical usage in decoders is to set this on the opaque value provided
+ to the library and get back the frame using gst_video_decoder_get_frame()
+
+
+
+
+
+
+
+
+
+ Decoding timestamp
+
+
+
+ Presentation timestamp
+
+
+
+ Duration of the frame
+
+
+
+ Distance in frames from the last synchronization point.
+
+
+
+ the input #GstBuffer that created this frame. The buffer is owned
+ by the frame and references to the frame instead of the buffer should
+ be kept.
+
+
+
+ the output #GstBuffer. Implementations should set this either
+ directly, or by using the
+ @gst_video_decoder_allocate_output_frame() or
+ @gst_video_decoder_allocate_output_buffer() methods. The buffer is
+ owned by the frame and references to the frame instead of the
+ buffer should be kept.
+
+
+
+ Running time when the frame will be used.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Gets private data set on the frame by the subclass via
+gst_video_codec_frame_set_user_data() previously.
+
+ The previously set user_data
+
+
+
+
+ a #GstVideoCodecFrame
+
+
+
+
+
+ Increases the refcount of the given frame by one.
+
+ @buf
+
+
+
+
+ a #GstVideoCodecFrame
+
+
+
+
+
+ Sets @user_data on the frame and the #GDestroyNotify that will be called when
+the frame is freed. Allows to attach private data by the subclass to frames.
+
+If a @user_data was previously set, then the previous set @notify will be called
+before the @user_data is replaced.
+
+
+
+
+
+ a #GstVideoCodecFrame
+
+
+
+ private data
+
+
+
+ a #GDestroyNotify
+
+
+
+
+
+ Decreases the refcount of the frame. If the refcount reaches 0, the frame
+will be freed.
+
+
+
+
+
+ a #GstVideoCodecFrame
+
+
+
+
+
+
+ Flags for #GstVideoCodecFrame
+
+ is the frame only meant to be decoded
+
+
+ is the frame a synchronization point (keyframe)
+
+
+ should the output frame be made a keyframe
+
+
+ should the encoder output stream headers
+
+
+
+ Structure representing the state of an incoming or outgoing video
+stream for encoders and decoders.
+
+Decoders and encoders will receive such a state through their
+respective @set_format vmethods.
+
+Decoders and encoders can set the downstream state, by using the
+@gst_video_decoder_set_output_state() or
+@gst_video_encoder_set_output_state() methods.
+
+
+
+
+ The #GstVideoInfo describing the stream
+
+
+
+ The #GstCaps used in the caps negotiation of the pad.
+
+
+
+ a #GstBuffer corresponding to the
+ 'codec_data' field of a stream, or NULL.
+
+
+
+ The #GstCaps for allocation query and pool
+ negotiation. Since: 1.10
+
+
+
+
+
+
+
+
+ Increases the refcount of the given state by one.
+
+ @buf
+
+
+
+
+ a #GstVideoCodecState
+
+
+
+
+
+ Decreases the refcount of the state. If the refcount reaches 0, the state
+will be freed.
+
+
+
+
+
+ a #GstVideoCodecState
+
+
+
+
+
+
+ The color matrix is used to convert between Y'PbPr and
+non-linear RGB (R'G'B')
+
+ unknown matrix
+
+
+ identity matrix
+
+
+ FCC color matrix
+
+
+ ITU-R BT.709 color matrix
+
+
+ ITU-R BT.601 color matrix
+
+
+ SMPTE 240M color matrix
+
+
+ ITU-R BT.2020 color matrix. Since: 1.6.
+
+
+ Get the coefficients used to convert between Y'PbPr and R'G'B' using @matrix.
+
+When:
+
+|[
+ 0.0 <= [Y',R',G',B'] <= 1.0)
+ (-0.5 <= [Pb,Pr] <= 0.5)
+]|
+
+the general conversion is given by:
+
+|[
+ Y' = Kr*R' + (1-Kr-Kb)*G' + Kb*B'
+ Pb = (B'-Y')/(2*(1-Kb))
+ Pr = (R'-Y')/(2*(1-Kr))
+]|
+
+and the other way around:
+
+|[
+ R' = Y' + Cr*2*(1-Kr)
+ G' = Y' - Cb*2*(1-Kb)*Kb/(1-Kr-Kb) - Cr*2*(1-Kr)*Kr/(1-Kr-Kb)
+ B' = Y' + Cb*2*(1-Kb)
+]|
+
+ TRUE if @matrix was a YUV color format and @Kr and @Kb contain valid
+ values.
+
+
+
+
+ a #GstVideoColorMatrix
+
+
+
+ result red channel coefficient
+
+
+
+ result blue channel coefficient
+
+
+
+
+
+
+ The color primaries define the how to transform linear RGB values to and from
+the CIE XYZ colorspace.
+
+ unknown color primaries
+
+
+ BT709 primaries
+
+
+ BT470M primaries
+
+
+ BT470BG primaries
+
+
+ SMPTE170M primaries
+
+
+ SMPTE240M primaries
+
+
+ Generic film
+
+
+ BT2020 primaries. Since: 1.6.
+
+
+ Adobe RGB primaries. Since: 1.8
+
+
+ Get information about the chromaticity coordinates of @primaries.
+
+ a #GstVideoColorPrimariesInfo for @primaries.
+
+
+
+
+ a #GstVideoColorPrimaries
+
+
+
+
+
+
+ Structure describing the chromaticity coordinates of an RGB system. These
+values can be used to construct a matrix to transform RGB to and from the
+XYZ colorspace.
+
+ a #GstVideoColorPrimaries
+
+
+
+ reference white x coordinate
+
+
+
+ reference white y coordinate
+
+
+
+ red x coordinate
+
+
+
+ red y coordinate
+
+
+
+ green x coordinate
+
+
+
+ green y coordinate
+
+
+
+ blue x coordinate
+
+
+
+ blue y coordinate
+
+
+
+
+ Possible color range values. These constants are defined for 8 bit color
+values and can be scaled for other bit depths.
+
+ unknown range
+
+
+ [0..255] for 8 bit components
+
+
+ [16..235] for 8 bit components. Chroma has
+ [16..240] range.
+
+
+ Compute the offset and scale values for each component of @info. For each
+component, (c[i] - offset[i]) / scale[i] will scale the component c[i] to the
+range [0.0 .. 1.0].
+
+The reverse operation (c[i] * scale[i]) + offset[i] can be used to convert
+the component values in range [0.0 .. 1.0] back to their representation in
+@info and @range.
+
+
+
+
+
+ a #GstVideoColorRange
+
+
+
+ a #GstVideoFormatInfo
+
+
+
+ output offsets
+
+
+
+ output scale
+
+
+
+
+
+
+ Structure describing the color info.
+
+ the color range. This is the valid range for the samples.
+ It is used to convert the samples to Y'PbPr values.
+
+
+
+ the color matrix. Used to convert between Y'PbPr and
+ non-linear RGB (R'G'B')
+
+
+
+ the transfer function. used to convert between R'G'B' and RGB
+
+
+
+ color primaries. used to convert between R'G'B' and CIE XYZ
+
+
+
+ Parse the colorimetry string and update @cinfo with the parsed
+values.
+
+ #TRUE if @color points to valid colorimetry info.
+
+
+
+
+ a #GstVideoColorimetry
+
+
+
+ a colorimetry string
+
+
+
+
+
+ Compare the 2 colorimetry sets for equality
+
+ #TRUE if @cinfo and @other are equal.
+
+
+
+
+ a #GstVideoColorimetry
+
+
+
+ another #GstVideoColorimetry
+
+
+
+
+
+ Check if the colorimetry information in @info matches that of the
+string @color.
+
+ #TRUE if @color conveys the same colorimetry info as the color
+information in @info.
+
+
+
+
+ a #GstVideoInfo
+
+
+
+ a colorimetry string
+
+
+
+
+
+ Make a string representation of @cinfo.
+
+ a string representation of @cinfo.
+
+
+
+
+ a #GstVideoColorimetry
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Convert the pixels of @src into @dest using @convert.
+
+
+
+
+
+ a #GstVideoConverter
+
+
+
+ a #GstVideoFrame
+
+
+
+ a #GstVideoFrame
+
+
+
+
+
+ Free @convert
+
+
+
+
+
+ a #GstVideoConverter
+
+
+
+
+
+ Get the current configuration of @convert.
+
+ a #GstStructure that remains valid for as long as @convert is valid
+ or until gst_video_converter_set_config() is called.
+
+
+
+
+ a #GstVideoConverter
+
+
+
+
+
+ Set @config as extra configuraion for @convert.
+
+If the parameters in @config can not be set exactly, this function returns
+%FALSE and will try to update as much state as possible. The new state can
+then be retrieved and refined with gst_video_converter_get_config().
+
+Look at the #GST_VIDEO_CONVERTER_OPT_* fields to check valid configuration
+option and values.
+
+ %TRUE when @config could be set.
+
+
+
+
+ a #GstVideoConverter
+
+
+
+ a #GstStructure
+
+
+
+
+
+ Create a new converter object to convert between @in_info and @out_info
+with @config.
+
+ a #GstVideoConverter or %NULL if conversion is not possible.
+
+
+
+
+ a #GstVideoInfo
+
+
+
+ a #GstVideoInfo
+
+
+
+ a #GstStructure with configuration options
+
+
+
+
+
+
+ Extra buffer metadata describing image cropping.
+
+ parent #GstMeta
+
+
+
+ the horizontal offset
+
+
+
+ the vertical offset
+
+
+
+ the cropped width
+
+
+
+ the cropped height
+
+
+
+
+
+
+
+
+
+ This base class is for video decoders turning encoded data into raw video
+frames.
+
+The GstVideoDecoder base class and derived subclasses should cooperate as
+follows:
+
+## Configuration
+
+ * Initially, GstVideoDecoder calls @start when the decoder element
+ is activated, which allows the subclass to perform any global setup.
+
+ * GstVideoDecoder calls @set_format to inform the subclass of caps
+ describing input video data that it is about to receive, including
+ possibly configuration data.
+ While unlikely, it might be called more than once, if changing input
+ parameters require reconfiguration.
+
+ * Incoming data buffers are processed as needed, described in Data
+ Processing below.
+
+ * GstVideoDecoder calls @stop at end of all processing.
+
+## Data processing
+
+ * The base class gathers input data, and optionally allows subclass
+ to parse this into subsequently manageable chunks, typically
+ corresponding to and referred to as 'frames'.
+
+ * Each input frame is provided in turn to the subclass' @handle_frame
+ callback.
+ The ownership of the frame is given to the @handle_frame callback.
+
+ * If codec processing results in decoded data, the subclass should call
+ @gst_video_decoder_finish_frame to have decoded data pushed.
+ downstream. Otherwise, the subclass must call
+ @gst_video_decoder_drop_frame, to allow the base class to do timestamp
+ and offset tracking, and possibly to requeue the frame for a later
+ attempt in the case of reverse playback.
+
+## Shutdown phase
+
+ * The GstVideoDecoder class calls @stop to inform the subclass that data
+ parsing will be stopped.
+
+## Additional Notes
+
+ * Seeking/Flushing
+
+ * When the pipeline is seeked or otherwise flushed, the subclass is
+ informed via a call to its @reset callback, with the hard parameter
+ set to true. This indicates the subclass should drop any internal data
+ queues and timestamps and prepare for a fresh set of buffers to arrive
+ for parsing and decoding.
+
+ * End Of Stream
+
+ * At end-of-stream, the subclass @parse function may be called some final
+ times with the at_eos parameter set to true, indicating that the element
+ should not expect any more data to be arriving, and it should parse and
+ remaining frames and call gst_video_decoder_have_frame() if possible.
+
+The subclass is responsible for providing pad template caps for
+source and sink pads. The pads need to be named "sink" and "src". It also
+needs to provide information about the ouptput caps, when they are known.
+This may be when the base class calls the subclass' @set_format function,
+though it might be during decoding, before calling
+@gst_video_decoder_finish_frame. This is done via
+@gst_video_decoder_set_output_state
+
+The subclass is also responsible for providing (presentation) timestamps
+(likely based on corresponding input ones). If that is not applicable
+or possible, the base class provides limited framerate based interpolation.
+
+Similarly, the base class provides some limited (legacy) seeking support
+if specifically requested by the subclass, as full-fledged support
+should rather be left to upstream demuxer, parser or alike. This simple
+approach caters for seeking and duration reporting using estimated input
+bitrates. To enable it, a subclass should call
+@gst_video_decoder_set_estimate_rate to enable handling of incoming
+byte-streams.
+
+The base class provides some support for reverse playback, in particular
+in case incoming data is not packetized or upstream does not provide
+fragments on keyframe boundaries. However, the subclass should then be
+prepared for the parsing and frame processing stage to occur separately
+(in normal forward processing, the latter immediately follows the former),
+The subclass also needs to ensure the parsing stage properly marks
+keyframes, unless it knows the upstream elements will do so properly for
+incoming data.
+
+The bare minimum that a functional subclass needs to implement is:
+
+ * Provide pad templates
+ * Inform the base class of output caps via
+ @gst_video_decoder_set_output_state
+
+ * Parse input data, if it is not considered packetized from upstream
+ Data will be provided to @parse which should invoke
+ @gst_video_decoder_add_to_frame and @gst_video_decoder_have_frame to
+ separate the data belonging to each video frame.
+
+ * Accept data in @handle_frame and provide decoded results to
+ @gst_video_decoder_finish_frame, or call @gst_video_decoder_drop_frame.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Negotiate with downstream elements to currently configured #GstVideoCodecState.
+Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
+negotiate fails.
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Removes next @n_bytes of input data and adds it to currently parsed frame.
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ the number of bytes to add
+
+
+
+
+
+ Helper function that allocates a buffer to hold a video frame for @decoder's
+current #GstVideoCodecState.
+
+You should use gst_video_decoder_allocate_output_frame() instead of this
+function, if possible at all.
+
+ allocated buffer, or NULL if no buffer could be
+ allocated (e.g. when downstream is flushing or shutting down)
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+ Helper function that allocates a buffer to hold a video frame for @decoder's
+current #GstVideoCodecState. Subclass should already have configured video
+state and set src pad caps.
+
+The buffer allocated here is owned by the frame and you should only
+keep references to the frame, not the buffer.
+
+ %GST_FLOW_OK if an output buffer could be allocated
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ a #GstVideoCodecFrame
+
+
+
+
+
+ Same as #gst_video_decoder_allocate_output_frame except it allows passing
+#GstBufferPoolAcquireParams to the sub call gst_buffer_pool_acquire_buffer.
+
+ %GST_FLOW_OK if an output buffer could be allocated
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ a #GstVideoCodecFrame
+
+
+
+ a #GstBufferPoolAcquireParams
+
+
+
+
+
+ Similar to gst_video_decoder_finish_frame(), but drops @frame in any
+case and posts a QoS message with the frame's details on the bus.
+In any case, the frame is considered finished and released.
+
+ a #GstFlowReturn, usually GST_FLOW_OK.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ the #GstVideoCodecFrame to drop
+
+
+
+
+
+ @frame should have a valid decoded data buffer, whose metadata fields
+are then appropriately set according to frame data and pushed downstream.
+If no output data is provided, @frame is considered skipped.
+In any case, the frame is considered finished and released.
+
+After calling this function the output buffer of the frame is to be
+considered read-only. This function will also change the metadata
+of the buffer.
+
+ a #GstFlowReturn resulting from sending data downstream
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ a decoded #GstVideoCodecFrame
+
+
+
+
+
+ Lets #GstVideoDecoder sub-classes to know the memory @allocator
+used by the base class and its @params.
+
+Unref the @allocator after use it.
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ the #GstAllocator
+used
+
+
+
+ the
+#GstAllocatorParams of @allocator
+
+
+
+
+
+
+ the instance of the #GstBufferPool used
+by the decoder; free it after use it
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+
+ currently configured byte to time conversion setting
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+ Get a pending unfinished #GstVideoCodecFrame
+
+ pending unfinished #GstVideoCodecFrame identified by @frame_number.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ system_frame_number of a frame
+
+
+
+
+
+ Get all pending unfinished #GstVideoCodecFrame
+
+ pending unfinished #GstVideoCodecFrame.
+
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+ Query the configured decoder latency. Results will be returned via
+@min_latency and @max_latency.
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ address of variable in which to store the
+ configured minimum latency, or %NULL
+
+
+
+ address of variable in which to store the
+ configured mximum latency, or %NULL
+
+
+
+
+
+ Determines maximum possible decoding time for @frame that will
+allow it to decode and arrive in time (as determined by QoS events).
+In particular, a negative result means decoding in time is no longer possible
+and should therefore occur as soon/skippy as possible.
+
+ max decoding time.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ a #GstVideoCodecFrame
+
+
+
+
+
+
+ currently configured decoder tolerated error count.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+ Queries decoder required format handling.
+
+ %TRUE if required format handling is enabled.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+ Get the oldest pending unfinished #GstVideoCodecFrame
+
+ oldest pending unfinished #GstVideoCodecFrame.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+ Get the #GstVideoCodecState currently describing the output stream.
+
+ #GstVideoCodecState describing format of video data.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+ Queries whether input data is considered packetized or not by the
+base class.
+
+ TRUE if input data is considered packetized.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+ Returns the number of bytes previously added to the current frame
+by calling gst_video_decoder_add_to_frame().
+
+ The number of bytes pending for the current frame
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+
+ The current QoS proportion.
+
+
+
+
+ a #GstVideoDecoder
+ current QoS proportion, or %NULL
+
+
+
+
+
+ Gathers all data collected for currently parsed frame, gathers corresponding
+metadata and passes it along for further processing, i.e. @handle_frame.
+
+ a #GstFlowReturn
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+ Sets the audio decoder tags and how they should be merged with any
+upstream stream tags. This will override any tags previously-set
+with gst_audio_decoder_merge_tags().
+
+Note that this is provided for convenience, and the subclass is
+not required to use this and can still do tag handling on its own.
+
+MT safe.
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ a #GstTagList to merge, or NULL to unset
+ previously-set tags
+
+
+
+ the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE
+
+
+
+
+
+ Negotiate with downstream elements to currently configured #GstVideoCodecState.
+Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
+negotiate fails.
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+ Returns caps that express @caps (or sink template caps if @caps == NULL)
+restricted to resolution/format/... combinations supported by downstream
+elements.
+
+ a #GstCaps owned by caller
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ initial caps
+
+
+
+ filter caps
+
+
+
+
+
+ Similar to gst_video_decoder_drop_frame(), but simply releases @frame
+without any processing other than removing it from list of pending frames,
+after which it is considered finished and released.
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ the #GstVideoCodecFrame to release
+
+
+
+
+
+ Allows baseclass to perform byte to time estimated conversion.
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ whether to enable byte to time conversion
+
+
+
+
+
+ Lets #GstVideoDecoder sub-classes tell the baseclass what the decoder
+latency is. Will also post a LATENCY message on the bus so the pipeline
+can reconfigure its global latency.
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ minimum latency
+
+
+
+ maximum latency
+
+
+
+
+
+ Sets numbers of tolerated decoder errors, where a tolerated one is then only
+warned about, but more than tolerated will lead to fatal error. You can set
+-1 for never returning fatal errors. Default is set to
+GST_VIDEO_DECODER_MAX_ERRORS.
+
+The '-1' option was added in 1.4
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ max tolerated errors
+
+
+
+
+
+ Configures decoder format needs. If enabled, subclass needs to be
+negotiated with format caps before it can process any data. It will then
+never be handed any data before it has been configured.
+Otherwise, it might be handed data without having been configured and
+is then expected being able to do so either by default
+or based on the input data.
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ new state
+
+
+
+
+
+ Creates a new #GstVideoCodecState with the specified @fmt, @width and @height
+as the output state for the decoder.
+Any previously set output state on @decoder will be replaced by the newly
+created one.
+
+If the subclass wishes to copy over existing fields (like pixel aspec ratio,
+or framerate) from an existing #GstVideoCodecState, it can be provided as a
+@reference.
+
+If the subclass wishes to override some fields from the output state (like
+pixel-aspect-ratio or framerate) it can do so on the returned #GstVideoCodecState.
+
+The new output state will only take effect (set on pads and buffers) starting
+from the next call to #gst_video_decoder_finish_frame().
+
+ the newly configured output state.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ a #GstVideoFormat
+
+
+
+ The width in pixels
+
+
+
+ The height in pixels
+
+
+
+ An optional reference #GstVideoCodecState
+
+
+
+
+
+ Allows baseclass to consider input data as packetized or not. If the
+input is packetized, then the @parse method will not be called.
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ whether the input data should be considered as packetized.
+
+
+
+
+
+ Lets #GstVideoDecoder sub-classes decide if they want the sink pad
+to use the default pad query handler to reply to accept-caps queries.
+
+By setting this to true it is possible to further customize the default
+handler with %GST_PAD_SET_ACCEPT_INTERSECT and
+%GST_PAD_SET_ACCEPT_TEMPLATE
+
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+ if the default pad accept-caps query handling should be used
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Subclasses can override any of the available virtual methods or not, as
+needed. At minimum @handle_frame needs to be overridden, and @set_format
+and likely as well. If non-packetized input is supported or expected,
+@parse needs to be overridden as well.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstVideoDecoder
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ The interface allows unified access to control flipping and rotation
+operations of video-sources or operators.
+
+
+
+
+
+ #GstVideoDirectionInterface interface.
+
+ parent interface type.
+
+
+
+
+ GstVideoDither provides implementations of several dithering algorithms
+that can be applied to lines of video pixels to quantize and dither them.
+
+ Free @dither
+
+
+
+
+
+ a #GstVideoDither
+
+
+
+
+
+ Dither @width pixels starting from offset @x in @line using @dither.
+
+@y is the line number of @line in the output image.
+
+
+
+
+
+ a #GstVideoDither
+
+
+
+ pointer to the pixels of the line
+
+
+
+ x coordinate
+
+
+
+ y coordinate
+
+
+
+ the width
+
+
+
+
+
+ Make a new dither object for dithering lines of @format using the
+algorithm described by @method.
+
+Each component will be quantized to a multiple of @quantizer. Better
+performance is achived when @quantizer is a power of 2.
+
+@width is the width of the lines that this ditherer will handle.
+
+ a new #GstVideoDither
+
+
+
+
+ a #GstVideoDitherMethod
+
+
+
+ a #GstVideoDitherFlags
+
+
+
+ a #GstVideoFormat
+
+
+
+ quantizer
+
+
+
+ the width of the lines
+
+
+
+
+
+
+ Extra flags that influence the result from gst_video_chroma_resample_new().
+
+ no flags
+
+
+ the input is interlaced
+
+
+ quantize values in addition to adding dither.
+
+
+
+ Different dithering methods to use.
+
+ no dithering
+
+
+ propagate rounding errors downwards
+
+
+ Dither with floyd-steinberg error diffusion
+
+
+ Dither with Sierra Lite error diffusion
+
+
+ ordered dither using a bayer pattern
+
+
+
+ This base class is for video encoders turning raw video into
+encoded video data.
+
+GstVideoEncoder and subclass should cooperate as follows.
+
+## Configuration
+
+ * Initially, GstVideoEncoder calls @start when the encoder element
+ is activated, which allows subclass to perform any global setup.
+ * GstVideoEncoder calls @set_format to inform subclass of the format
+ of input video data that it is about to receive. Subclass should
+ setup for encoding and configure base class as appropriate
+ (e.g. latency). While unlikely, it might be called more than once,
+ if changing input parameters require reconfiguration. Baseclass
+ will ensure that processing of current configuration is finished.
+ * GstVideoEncoder calls @stop at end of all processing.
+
+## Data processing
+
+ * Base class collects input data and metadata into a frame and hands
+ this to subclass' @handle_frame.
+
+ * If codec processing results in encoded data, subclass should call
+ @gst_video_encoder_finish_frame to have encoded data pushed
+ downstream.
+
+ * If implemented, baseclass calls subclass @pre_push just prior to
+ pushing to allow subclasses to modify some metadata on the buffer.
+ If it returns GST_FLOW_OK, the buffer is pushed downstream.
+
+ * GstVideoEncoderClass will handle both srcpad and sinkpad events.
+ Sink events will be passed to subclass if @event callback has been
+ provided.
+
+## Shutdown phase
+
+ * GstVideoEncoder class calls @stop to inform the subclass that data
+ parsing will be stopped.
+
+Subclass is responsible for providing pad template caps for
+source and sink pads. The pads need to be named "sink" and "src". It should
+also be able to provide fixed src pad caps in @getcaps by the time it calls
+@gst_video_encoder_finish_frame.
+
+Things that subclass need to take care of:
+
+ * Provide pad templates
+ * Provide source pad caps before pushing the first buffer
+ * Accept data in @handle_frame and provide encoded results to
+ @gst_video_encoder_finish_frame.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Negotiate with downstream elements to currently configured #GstVideoCodecState.
+Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
+negotiate fails.
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Helper function that allocates a buffer to hold an encoded video frame
+for @encoder's current #GstVideoCodecState.
+
+ allocated buffer
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+ size of the buffer
+
+
+
+
+
+ Helper function that allocates a buffer to hold an encoded video frame for @encoder's
+current #GstVideoCodecState. Subclass should already have configured video
+state and set src pad caps.
+
+The buffer allocated here is owned by the frame and you should only
+keep references to the frame, not the buffer.
+
+ %GST_FLOW_OK if an output buffer could be allocated
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+ a #GstVideoCodecFrame
+
+
+
+ size of the buffer
+
+
+
+
+
+ @frame must have a valid encoded data buffer, whose metadata fields
+are then appropriately set according to frame data or no buffer at
+all if the frame should be dropped.
+It is subsequently pushed downstream or provided to @pre_push.
+In any case, the frame is considered finished and released.
+
+After calling this function the output buffer of the frame is to be
+considered read-only. This function will also change the metadata
+of the buffer.
+
+ a #GstFlowReturn resulting from sending data downstream
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+ an encoded #GstVideoCodecFrame
+
+
+
+
+
+ Lets #GstVideoEncoder sub-classes to know the memory @allocator
+used by the base class and its @params.
+
+Unref the @allocator after use it.
+
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+ the #GstAllocator
+used
+
+
+
+ the
+#GstAllocatorParams of @allocator
+
+
+
+
+
+ Get a pending unfinished #GstVideoCodecFrame
+
+ pending unfinished #GstVideoCodecFrame identified by @frame_number.
+
+
+
+
+ a #GstVideoEnccoder
+
+
+
+ system_frame_number of a frame
+
+
+
+
+
+ Get all pending unfinished #GstVideoCodecFrame
+
+ pending unfinished #GstVideoCodecFrame.
+
+
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+
+
+ Query the configured encoding latency. Results will be returned via
+@min_latency and @max_latency.
+
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+ address of variable in which to store the
+ configured minimum latency, or %NULL
+
+
+
+ address of variable in which to store the
+ configured maximum latency, or %NULL
+
+
+
+
+
+ Get the oldest unfinished pending #GstVideoCodecFrame
+
+ oldest unfinished pending #GstVideoCodecFrame
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+
+
+ Get the current #GstVideoCodecState
+
+ #GstVideoCodecState describing format of video data.
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+
+
+ Sets the video encoder tags and how they should be merged with any
+upstream stream tags. This will override any tags previously-set
+with gst_video_encoder_merge_tags().
+
+Note that this is provided for convenience, and the subclass is
+not required to use this and can still do tag handling on its own.
+
+MT safe.
+
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+ a #GstTagList to merge, or NULL to unset
+ previously-set tags
+
+
+
+ the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE
+
+
+
+
+
+ Negotiate with downstream elements to currently configured #GstVideoCodecState.
+Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
+negotiate fails.
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+
+
+ Returns caps that express @caps (or sink template caps if @caps == NULL)
+restricted to resolution/format/... combinations supported by downstream
+elements (e.g. muxers).
+
+ a #GstCaps owned by caller
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+ initial caps
+
+
+
+ filter caps
+
+
+
+
+
+ Set the codec headers to be sent downstream whenever requested.
+
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+ a list of #GstBuffer containing the codec header
+
+
+
+
+
+
+
+ Informs baseclass of encoding latency.
+
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+ minimum latency
+
+
+
+ maximum latency
+
+
+
+
+
+ Request minimal value for PTS passed to handle_frame.
+
+For streams with reordered frames this can be used to ensure that there
+is enough time to accomodate first DTS, which may be less than first PTS
+
+Since 1.6
+
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+ minimal PTS that will be passed to handle_frame
+
+
+
+
+
+ Creates a new #GstVideoCodecState with the specified caps as the output state
+for the encoder.
+Any previously set output state on @encoder will be replaced by the newly
+created one.
+
+The specified @caps should not contain any resolution, pixel-aspect-ratio,
+framerate, codec-data, .... Those should be specified instead in the returned
+#GstVideoCodecState.
+
+If the subclass wishes to copy over existing fields (like pixel aspect ratio,
+or framerate) from an existing #GstVideoCodecState, it can be provided as a
+@reference.
+
+If the subclass wishes to override some fields from the output state (like
+pixel-aspect-ratio or framerate) it can do so on the returned #GstVideoCodecState.
+
+The new output state will only take effect (set on pads and buffers) starting
+from the next call to #gst_video_encoder_finish_frame().
+
+ the newly configured output state.
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+ the #GstCaps to use for the output
+
+
+
+ An optional reference @GstVideoCodecState
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Subclasses can override any of the available virtual methods or not, as
+needed. At minimum @handle_frame needs to be overridden, and @set_format
+and @get_caps are likely needed as well.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ #TRUE if the negotiation succeeded, else #FALSE.
+
+
+
+
+ a #GstVideoEncoder
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Field order of interlaced content. This is only valid for
+interlace-mode=interleaved and not interlace-mode=mixed. In the case of
+mixed or GST_VIDEO_FIELD_ORDER_UNKOWN, the field order is signalled via
+buffer flags.
+
+ unknown field order for interlaced content.
+ The actual field order is signalled via buffer flags.
+
+
+ top field is first
+
+
+ bottom field is first
+
+
+ Convert @order to a #GstVideoFieldOrder
+
+ the #GstVideoFieldOrder of @order or
+ #GST_VIDEO_FIELD_ORDER_UNKNOWN when @order is not a valid
+ string representation for a #GstVideoFieldOrder.
+
+
+
+
+ a field order
+
+
+
+
+
+ Convert @order to its string representation.
+
+ @order as a string or NULL if @order in invalid.
+
+
+
+
+ a #GstVideoFieldOrder
+
+
+
+
+
+
+ Provides useful functions and a base class for video filters.
+
+The videofilter will by default enable QoS on the parent GstBaseTransform
+to implement frame dropping.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ The video filter class structure.
+
+ the parent class structure
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Extra video flags
+
+ no flags
+
+
+ a variable fps is selected, fps_n and fps_d
+ denote the maximum fps of the video
+
+
+ Each color has been scaled by the alpha
+ value.
+
+
+
+ Enum value describing the most common video formats.
+
+ Unknown or unset video format id
+
+
+ Encoded video format. Only ever use that in caps for
+ special video formats in combination with non-system
+ memory GstCapsFeatures where it does not make sense
+ to specify a real video format.
+
+
+ planar 4:2:0 YUV
+
+
+ planar 4:2:0 YVU (like I420 but UV planes swapped)
+
+
+ packed 4:2:2 YUV (Y0-U0-Y1-V0 Y2-U2-Y3-V2 Y4 ...)
+
+
+ packed 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
+
+
+ packed 4:4:4 YUV with alpha channel (A0-Y0-U0-V0 ...)
+
+
+ sparse rgb packed into 32 bit, space last
+
+
+ sparse reverse rgb packed into 32 bit, space last
+
+
+ sparse rgb packed into 32 bit, space first
+
+
+ sparse reverse rgb packed into 32 bit, space first
+
+
+ rgb with alpha channel last
+
+
+ reverse rgb with alpha channel last
+
+
+ rgb with alpha channel first
+
+
+ reverse rgb with alpha channel first
+
+
+ rgb
+
+
+ reverse rgb
+
+
+ planar 4:1:1 YUV
+
+
+ planar 4:2:2 YUV
+
+
+ packed 4:2:2 YUV (Y0-V0-Y1-U0 Y2-V2-Y3-U2 Y4 ...)
+
+
+ planar 4:4:4 YUV
+
+
+ packed 4:2:2 10-bit YUV, complex format
+
+
+ packed 4:2:2 16-bit YUV, Y0-U0-Y1-V1 order
+
+
+ planar 4:2:0 YUV with interleaved UV plane
+
+
+ planar 4:2:0 YUV with interleaved VU plane
+
+
+ 8-bit grayscale
+
+
+ 16-bit grayscale, most significant byte first
+
+
+ 16-bit grayscale, least significant byte first
+
+
+ packed 4:4:4 YUV (Y-U-V ...)
+
+
+ rgb 5-6-5 bits per component
+
+
+ reverse rgb 5-6-5 bits per component
+
+
+ rgb 5-5-5 bits per component
+
+
+ reverse rgb 5-5-5 bits per component
+
+
+ packed 10-bit 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
+
+
+ planar 4:4:2:0 AYUV
+
+
+ 8-bit paletted RGB
+
+
+ planar 4:1:0 YUV
+
+
+ planar 4:1:0 YUV (like YUV9 but UV planes swapped)
+
+
+ packed 4:1:1 YUV (Cb-Y0-Y1-Cr-Y2-Y3 ...)
+
+
+ rgb with alpha channel first, 16 bits per channel
+
+
+ packed 4:4:4 YUV with alpha channel, 16 bits per channel (A0-Y0-U0-V0 ...)
+
+
+ packed 4:4:4 RGB, 10 bits per channel
+
+
+ planar 4:2:0 YUV, 10 bits per channel
+
+
+ planar 4:2:0 YUV, 10 bits per channel
+
+
+ planar 4:2:2 YUV, 10 bits per channel
+
+
+ planar 4:2:2 YUV, 10 bits per channel
+
+
+ planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
+
+
+ planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
+
+
+ planar 4:4:4 RGB, 8 bits per channel (Since: 1.2)
+
+
+ planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
+
+
+ planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
+
+
+ planar 4:2:2 YUV with interleaved UV plane (Since: 1.2)
+
+
+ planar 4:4:4 YUV with interleaved UV plane (Since: 1.2)
+
+
+ NV12 with 64x32 tiling in zigzag pattern (Since: 1.4)
+
+
+ planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
+
+
+ planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
+
+
+ planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
+
+
+ planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
+
+
+ planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
+
+
+ planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
+
+
+ planar 4:2:2 YUV with interleaved VU plane (Since: 1.6)
+
+
+ planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
+
+
+ planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
+
+
+ packed 4:4:4 YUV (U-Y-V ...) (Since 1.10)
+
+
+ packed 4:2:2 YUV (V0-Y0-U0-Y1 V2-Y2-U2-Y3 V4 ...)
+
+
+ planar 4:4:4:4 ARGB, 8 bits per channel (Since: 1.12)
+
+
+ planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
+
+
+ planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
+
+
+ planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
+
+
+ planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
+
+
+ planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
+
+
+ planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
+
+
+ planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
+
+
+ planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
+
+
+ planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
+
+
+ planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
+
+
+ planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
+
+
+ planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
+
+
+ Converts a FOURCC value into the corresponding #GstVideoFormat.
+If the FOURCC cannot be represented by #GstVideoFormat,
+#GST_VIDEO_FORMAT_UNKNOWN is returned.
+
+ the #GstVideoFormat describing the FOURCC value
+
+
+
+
+ a FOURCC value representing raw YUV video
+
+
+
+
+
+ Find the #GstVideoFormat for the given parameters.
+
+ a #GstVideoFormat or GST_VIDEO_FORMAT_UNKNOWN when the parameters to
+not specify a known format.
+
+
+
+
+ the amount of bits used for a pixel
+
+
+
+ the amount of bits used to store a pixel. This value is bigger than
+ @depth
+
+
+
+ the endianness of the masks, #G_LITTLE_ENDIAN or #G_BIG_ENDIAN
+
+
+
+ the red mask
+
+
+
+ the green mask
+
+
+
+ the blue mask
+
+
+
+ the alpha mask, or 0 if no alpha mask
+
+
+
+
+
+ Convert the @format string to its #GstVideoFormat.
+
+ the #GstVideoFormat for @format or GST_VIDEO_FORMAT_UNKNOWN when the
+string is not a known format.
+
+
+
+
+ a format string
+
+
+
+
+
+ Get the #GstVideoFormatInfo for @format
+
+ The #GstVideoFormatInfo for @format.
+
+
+
+
+ a #GstVideoFormat
+
+
+
+
+
+ Get the default palette of @format. This the palette used in the pack
+function for paletted formats.
+
+ the default palette of @format or %NULL when
+@format does not have a palette.
+
+
+
+
+ a #GstVideoFormat
+
+
+
+ size of the palette in bytes
+
+
+
+
+
+ Converts a #GstVideoFormat value into the corresponding FOURCC. Only
+a few YUV formats have corresponding FOURCC values. If @format has
+no corresponding FOURCC value, 0 is returned.
+
+ the FOURCC corresponding to @format
+
+
+
+
+ a #GstVideoFormat video format
+
+
+
+
+
+ Returns a string containing a descriptive name for
+the #GstVideoFormat if there is one, or NULL otherwise.
+
+ the name corresponding to @format
+
+
+
+
+ a #GstVideoFormat video format
+
+
+
+
+
+
+ The different video flags that a format info can have.
+
+ The video format is YUV, components are numbered
+ 0=Y, 1=U, 2=V.
+
+
+ The video format is RGB, components are numbered
+ 0=R, 1=G, 2=B.
+
+
+ The video is gray, there is one gray component
+ with index 0.
+
+
+ The video format has an alpha components with
+ the number 3.
+
+
+ The video format has data stored in little
+ endianness.
+
+
+ The video format has a palette. The palette
+ is stored in the second plane and indexes are stored in the first plane.
+
+
+ The video format has a complex layout that
+ can't be described with the usual information in the #GstVideoFormatInfo.
+
+
+ This format can be used in a
+ #GstVideoFormatUnpack and #GstVideoFormatPack function.
+
+
+ The format is tiled, there is tiling information
+ in the last plane.
+
+
+
+ Information for a video format.
+
+ #GstVideoFormat
+
+
+
+ string representation of the format
+
+
+
+ use readable description of the format
+
+
+
+ #GstVideoFormatFlags
+
+
+
+ The number of bits used to pack data items. This can be less than 8
+ when multiple pixels are stored in a byte. for values > 8 multiple bytes
+ should be read according to the endianness flag before applying the shift
+ and mask.
+
+
+
+ the number of components in the video format.
+
+
+
+ the number of bits to shift away to get the component data
+
+
+
+
+
+ the depth in bits for each component
+
+
+
+
+
+ the pixel stride of each component. This is the amount of
+ bytes to the pixel immediately to the right. When bits < 8, the stride is
+ expressed in bits. For 24-bit RGB, this would be 3 bytes, for example,
+ while it would be 4 bytes for RGBx or ARGB.
+
+
+
+
+
+ the number of planes for this format. The number of planes can be
+ less than the amount of components when multiple components are packed into
+ one plane.
+
+
+
+ the plane number where a component can be found
+
+
+
+
+
+ the offset in the plane where the first pixel of the components
+ can be found.
+
+
+
+
+
+ subsampling factor of the width for the component. Use
+ GST_VIDEO_SUB_SCALE to scale a width.
+
+
+
+
+
+ subsampling factor of the height for the component. Use
+ GST_VIDEO_SUB_SCALE to scale a height.
+
+
+
+
+
+ the format of the unpacked pixels. This format must have the
+ #GST_VIDEO_FORMAT_FLAG_UNPACK flag set.
+
+
+
+ an unpack function for this format
+
+
+
+ the amount of lines that will be packed
+
+
+
+ an pack function for this format
+
+
+
+ The tiling mode
+
+
+
+ The width of a tile, in bytes, represented as a shift
+
+
+
+ The height of a tile, in bytes, represented as a shift
+
+
+
+
+
+
+
+
+
+ Packs @width pixels from @src to the given planes and strides in the
+format @info. The pixels from source have each component interleaved
+and will be packed into the planes in @data.
+
+This function operates on pack_lines lines, meaning that @src should
+contain at least pack_lines lines with a stride of @sstride and @y
+should be a multiple of pack_lines.
+
+Subsampled formats will use the horizontally and vertically cosited
+component from the source. Subsampling should be performed before
+packing.
+
+Because this function does not have a x coordinate, it is not possible to
+pack pixels starting from an unaligned position. For tiled images this
+means that packing should start from a tile coordinate. For subsampled
+formats this means that a complete pixel needs to be packed.
+
+
+
+
+
+ a #GstVideoFormatInfo
+
+
+
+ flags to control the packing
+
+
+
+ a source array
+
+
+
+ the source array stride
+
+
+
+ pointers to the destination data planes
+
+
+
+ strides of the destination planes
+
+
+
+ the chroma siting of the target when subsampled (not used)
+
+
+
+ the y position in the image to pack to
+
+
+
+ the amount of pixels to pack.
+
+
+
+
+
+ Unpacks @width pixels from the given planes and strides containing data of
+format @info. The pixels will be unpacked into @dest with each component
+interleaved as per @info's unpack_format, which will usually be one of
+#GST_VIDEO_FORMAT_ARGB, #GST_VIDEO_FORMAT_AYUV, #GST_VIDEO_FORMAT_ARGB64 or
+#GST_VIDEO_FORMAT_AYUV64 depending on the format to unpack.
+@dest should at least be big enough to hold @width * bytes_per_pixel bytes
+where bytes_per_pixel relates to the unpack format and will usually be
+either 4 or 8 depending on the unpack format. bytes_per_pixel will be
+the same as the pixel stride for plane 0 for the above formats.
+
+For subsampled formats, the components will be duplicated in the destination
+array. Reconstruction of the missing components can be performed in a
+separate step after unpacking.
+
+
+
+
+
+ a #GstVideoFormatInfo
+
+
+
+ flags to control the unpacking
+
+
+
+ a destination array
+
+
+
+ pointers to the data planes
+
+
+
+ strides of the planes
+
+
+
+ the x position in the image to start from
+
+
+
+ the y position in the image to start from
+
+
+
+ the amount of pixels to unpack.
+
+
+
+
+
+ A video frame obtained from gst_video_frame_map()
+
+ the #GstVideoInfo
+
+
+
+ #GstVideoFrameFlags for the frame
+
+
+
+ the mapped buffer
+
+
+
+ pointer to metadata if any
+
+
+
+ id of the mapped frame. the id can for example be used to
+ indentify the frame in case of multiview video.
+
+
+
+ pointers to the plane data
+
+
+
+
+
+ mappings of the planes
+
+
+
+
+
+
+
+
+
+
+ Copy the contents from @src to @dest.
+
+ TRUE if the contents could be copied.
+
+
+
+
+ a #GstVideoFrame
+
+
+
+ a #GstVideoFrame
+
+
+
+
+
+ Copy the plane with index @plane from @src to @dest.
+
+ TRUE if the contents could be copied.
+
+
+
+
+ a #GstVideoFrame
+
+
+
+ a #GstVideoFrame
+
+
+
+ a plane
+
+
+
+
+
+ Use @info and @buffer to fill in the values of @frame. @frame is usually
+allocated on the stack, and you will pass the address to the #GstVideoFrame
+structure allocated on the stack; gst_video_frame_map() will then fill in
+the structures with the various video-specific information you need to access
+the pixels of the video buffer. You can then use accessor macros such as
+GST_VIDEO_FRAME_COMP_DATA(), GST_VIDEO_FRAME_PLANE_DATA(),
+GST_VIDEO_FRAME_COMP_STRIDE(), GST_VIDEO_FRAME_PLANE_STRIDE() etc.
+to get to the pixels.
+
+|[<!-- language="C" -->
+ GstVideoFrame vframe;
+ ...
+ // set RGB pixels to black one at a time
+ if (gst_video_frame_map (&vframe, video_info, video_buffer, GST_MAP_WRITE)) {
+ guint8 *pixels = GST_VIDEO_FRAME_PLANE_DATA (vframe, 0);
+ guint stride = GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 0);
+ guint pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (vframe, 0);
+
+ for (h = 0; h < height; ++h) {
+ for (w = 0; w < width; ++w) {
+ guint8 *pixel = pixels + h * stride + w * pixel_stride;
+
+ memset (pixel, 0, pixel_stride);
+ }
+ }
+
+ gst_video_frame_unmap (&vframe);
+ }
+ ...
+]|
+
+All video planes of @buffer will be mapped and the pointers will be set in
+@frame->data.
+
+The purpose of this function is to make it easy for you to get to the video
+pixels in a generic way, without you having to worry too much about details
+such as whether the video data is allocated in one contiguous memory chunk
+or multiple memory chunks (e.g. one for each plane); or if custom strides
+and custom plane offsets are used or not (as signalled by GstVideoMeta on
+each buffer). This function will just fill the #GstVideoFrame structure
+with the right values and if you use the accessor macros everything will
+just work and you can access the data easily. It also maps the underlying
+memory chunks for you.
+
+ %TRUE on success.
+
+
+
+
+ pointer to #GstVideoFrame
+
+
+
+ a #GstVideoInfo
+
+
+
+ the buffer to map
+
+
+
+ #GstMapFlags
+
+
+
+
+
+ Use @info and @buffer to fill in the values of @frame with the video frame
+information of frame @id.
+
+When @id is -1, the default frame is mapped. When @id != -1, this function
+will return %FALSE when there is no GstVideoMeta with that id.
+
+All video planes of @buffer will be mapped and the pointers will be set in
+@frame->data.
+
+ %TRUE on success.
+
+
+
+
+ pointer to #GstVideoFrame
+
+
+
+ a #GstVideoInfo
+
+
+
+ the buffer to map
+
+
+
+ the frame id to map
+
+
+
+ #GstMapFlags
+
+
+
+
+
+ Unmap the memory previously mapped with gst_video_frame_map.
+
+
+
+
+
+ a #GstVideoFrame
+
+
+
+
+
+
+ Extra video frame flags
+
+ no flags
+
+
+ The video frame is interlaced. In mixed
+ interlace-mode, this flag specifies if the frame is interlaced or
+ progressive.
+
+
+ The video frame has the top field first
+
+
+ The video frame has the repeat flag
+
+
+ The video frame has one field
+
+
+ The video contains one or
+ more non-mono views
+
+
+ The video frame is the first
+ in a set of corresponding views provided as sequential frames.
+
+
+
+ Additional mapping flags for gst_video_frame_map().
+
+ Don't take another reference of the buffer and store it in
+ the GstVideoFrame. This makes sure that the buffer stays
+ writable while the frame is mapped, but requires that the
+ buffer reference stays valid until the frame is unmapped again.
+
+
+ Offset to define more flags
+
+
+
+ The orientation of the GL texture.
+
+ Top line first in memory, left row first
+
+
+ Bottom line first in memory, left row first
+
+
+ Top line first in memory, right row first
+
+
+ Bottom line first in memory, right row first
+
+
+
+ The GL texture type.
+
+ Luminance texture, GL_LUMINANCE
+
+
+ Luminance-alpha texture, GL_LUMINANCE_ALPHA
+
+
+ RGB 565 texture, GL_RGB
+
+
+ RGB texture, GL_RGB
+
+
+ RGBA texture, GL_RGBA
+
+
+ R texture, GL_RED_EXT
+
+
+ RG texture, GL_RG_EXT
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Extra buffer metadata for uploading a buffer to an OpenGL texture
+ID. The caller of gst_video_gl_texture_upload_meta_upload() must
+have OpenGL set up and call this from a thread where it is valid
+to upload something to an OpenGL texture.
+
+ parent #GstMeta
+
+
+
+ Orientation of the textures
+
+
+
+ Number of textures that are generated
+
+
+
+ Type of each texture
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Uploads the buffer which owns the meta to a specific texture ID.
+
+ %TRUE if uploading succeeded, %FALSE otherwise.
+
+
+
+
+ a #GstVideoGLTextureUploadMeta
+
+
+
+ the texture IDs to upload to
+
+
+
+
+
+
+
+
+
+
+
+
+ disable gamma handling
+
+
+ convert between input and output gamma
+Different gamma conversion modes
+
+
+
+ Information describing image properties. This information can be filled
+in from GstCaps with gst_video_info_from_caps(). The information is also used
+to store the specific video info when mapping a video frame with
+gst_video_frame_map().
+
+Use the provided macros to access the info in this structure.
+
+ the format info of the video
+
+
+
+ the interlace mode
+
+
+
+ additional video flags
+
+
+
+ the width of the video
+
+
+
+ the height of the video
+
+
+
+ the default size of one frame
+
+
+
+ the number of views for multiview video
+
+
+
+ a #GstVideoChromaSite.
+
+
+
+ the colorimetry info
+
+
+
+ the pixel-aspect-ratio numerator
+
+
+
+ the pixel-aspect-ratio demnominator
+
+
+
+ the framerate numerator
+
+
+
+ the framerate demnominator
+
+
+
+ offsets of the planes
+
+
+
+
+
+ strides of the planes
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Allocate a new #GstVideoInfo that is also initialized with
+gst_video_info_init().
+
+ a new #GstVideoInfo. free with gst_video_info_free().
+
+
+
+
+ Adjust the offset and stride fields in @info so that the padding and
+stride alignment in @align is respected.
+
+Extra padding will be added to the right side when stride alignment padding
+is required and @align will be updated with the new padding values.
+
+ %FALSE if alignment could not be applied, e.g. because the
+ size of a frame can't be represented as a 32 bit integer (Since: 1.12)
+
+
+
+
+ a #GstVideoInfo
+
+
+
+ alignment parameters
+
+
+
+
+
+ Converts among various #GstFormat types. This function handles
+GST_FORMAT_BYTES, GST_FORMAT_TIME, and GST_FORMAT_DEFAULT. For
+raw video, GST_FORMAT_DEFAULT corresponds to video frames. This
+function can be used to handle pad queries of the type GST_QUERY_CONVERT.
+
+ TRUE if the conversion was successful.
+
+
+
+
+ a #GstVideoInfo
+
+
+
+ #GstFormat of the @src_value
+
+
+
+ value to convert
+
+
+
+ #GstFormat of the @dest_value
+
+
+
+ pointer to destination value
+
+
+
+
+
+ Copy a GstVideoInfo structure.
+
+ a new #GstVideoInfo. free with gst_video_info_free.
+
+
+
+
+ a #GstVideoInfo
+
+
+
+
+
+ Free a GstVideoInfo structure previously allocated with gst_video_info_new()
+or gst_video_info_copy().
+
+
+
+
+
+ a #GstVideoInfo
+
+
+
+
+
+ Parse @caps and update @info.
+
+ TRUE if @caps could be parsed
+
+
+
+
+ a #GstVideoInfo
+
+
+
+ a #GstCaps
+
+
+
+
+
+ Initialize @info with default values.
+
+
+
+
+
+ a #GstVideoInfo
+
+
+
+
+
+ Compares two #GstVideoInfo and returns whether they are equal or not
+
+ %TRUE if @info and @other are equal, else %FALSE.
+
+
+
+
+ a #GstVideoInfo
+
+
+
+ a #GstVideoInfo
+
+
+
+
+
+ Set the default info for a video frame of @format and @width and @height.
+
+Note: This initializes @info first, no values are preserved. This function
+does not set the offsets correctly for interlaced vertically
+subsampled formats.
+
+ %FALSE if the returned video info is invalid, e.g. because the
+ size of a frame can't be represented as a 32 bit integer (Since: 1.12)
+
+
+
+
+ a #GstVideoInfo
+
+
+
+ the format
+
+
+
+ a width
+
+
+
+ a height
+
+
+
+
+
+ Convert the values of @info into a #GstCaps.
+
+ a new #GstCaps containing the info of @info.
+
+
+
+
+ a #GstVideoInfo
+
+
+
+
+
+
+ The possible values of the #GstVideoInterlaceMode describing the interlace
+mode of the stream.
+
+ all frames are progressive
+
+
+ 2 fields are interleaved in one video
+ frame. Extra buffer flags describe the field order.
+
+
+ frames contains both interlaced and
+ progressive video, the buffer flags describe the frame and fields.
+
+
+ 2 fields are stored in one buffer, use the
+ frame ID to get access to the required field. For multiview (the
+ 'views' property > 1) the fields of view N can be found at frame ID
+ (N * 2) and (N * 2) + 1.
+ Each field has only half the amount of lines as noted in the
+ height property. This mode requires multiple GstVideoMeta metadata
+ to describe the fields.
+
+
+ Convert @mode to a #GstVideoInterlaceMode
+
+ the #GstVideoInterlaceMode of @mode or
+ #GST_VIDEO_INTERLACE_MODE_PROGRESSIVE when @mode is not a valid
+ string representation for a #GstVideoInterlaceMode.
+
+
+
+
+ a mode
+
+
+
+
+
+ Convert @mode to its string representation.
+
+ @mode as a string or NULL if @mode in invalid.
+
+
+
+
+ a #GstVideoInterlaceMode
+
+
+
+
+
+
+ Different color matrix conversion modes
+
+ do conversion between color matrices
+
+
+ use the input color matrix to convert
+ to and from R'G'B
+
+
+ use the output color matrix to convert
+ to and from R'G'B
+
+
+ disable color matrix conversion.
+
+
+
+ Extra buffer metadata describing image properties
+
+ parent #GstMeta
+
+
+
+ the buffer this metadata belongs to
+
+
+
+ additional video flags
+
+
+
+ the video format
+
+
+
+ identifier of the frame
+
+
+
+ the video width
+
+
+
+ the video height
+
+
+
+ the number of planes in the image
+
+
+
+ array of offsets for the planes. This field might not always be
+ valid, it is used by the default implementation of @map.
+
+
+
+
+
+ array of strides for the planes. This field might not always be
+ valid, it is used by the default implementation of @map.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Map the video plane with index @plane in @meta and return a pointer to the
+first byte of the plane and the stride of the plane.
+
+ TRUE if the map operation was successful.
+
+
+
+
+ a #GstVideoMeta
+
+
+
+ a plane
+
+
+
+ a #GstMapInfo
+
+
+
+ the data of @plane
+
+
+
+ the stride of @plane
+
+
+
+ @GstMapFlags
+
+
+
+
+
+ Unmap a previously mapped plane with gst_video_meta_map().
+
+ TRUE if the memory was successfully unmapped.
+
+
+
+
+ a #GstVideoMeta
+
+
+
+ a plane
+
+
+
+ a #GstMapInfo
+
+
+
+
+
+
+
+
+
+
+
+ Extra data passed to a video transform #GstMetaTransformFunction such as:
+"gst-video-scale".
+
+ the input #GstVideoInfo
+
+
+
+ the output #GstVideoInfo
+
+
+
+ Get the #GQuark for the "gst-video-scale" metadata transform operation.
+
+ a #GQuark
+
+
+
+
+
+ GstVideoMultiviewFlags are used to indicate extra properties of a
+stereo/multiview stream beyond the frame layout and buffer mapping
+that is conveyed in the #GstMultiviewMode.
+
+ No flags
+
+
+ For stereo streams, the
+ normal arrangement of left and right views is reversed.
+
+
+ The left view is vertically
+ mirrored.
+
+
+ The left view is horizontally
+ mirrored.
+
+
+ The right view is
+ vertically mirrored.
+
+
+ The right view is
+ horizontally mirrored.
+
+
+ For frame-packed
+ multiview modes, indicates that the individual
+ views have been encoded with half the true width or height
+ and should be scaled back up for display. This flag
+ is used for overriding input layout interpretation
+ by adjusting pixel-aspect-ratio.
+ For side-by-side, column interleaved or checkerboard packings, the
+ pixel width will be doubled. For row interleaved and top-bottom
+ encodings, pixel height will be doubled.
+
+
+ The video stream contains both
+ mono and multiview portions, signalled on each buffer by the
+ absence or presence of the @GST_VIDEO_BUFFER_FLAG_MULTIPLE_VIEW
+ buffer flag.
+
+
+
+
+
+ #GstVideoMultiviewFramePacking represents the subset of #GstVideoMultiviewMode
+values that can be applied to any video frame without needing extra metadata.
+It can be used by elements that provide a property to override the
+multiview interpretation of a video stream when the video doesn't contain
+any markers.
+
+This enum is used (for example) on playbin, to re-interpret a played
+video stream as a stereoscopic video. The individual enum values are
+equivalent to and have the same value as the matching #GstVideoMultiviewMode.
+
+ A special value indicating
+no frame packing info.
+
+
+ All frames are monoscopic.
+
+
+ All frames represent a left-eye view.
+
+
+ All frames represent a right-eye view.
+
+
+ Left and right eye views are
+provided in the left and right half of the frame respectively.
+
+
+ Left and right eye
+views are provided in the left and right half of the frame, but
+have been sampled using quincunx method, with half-pixel offset
+between the 2 views.
+
+
+ Alternating vertical
+columns of pixels represent the left and right eye view respectively.
+
+
+ Alternating horizontal
+rows of pixels represent the left and right eye view respectively.
+
+
+ The top half of the frame
+contains the left eye, and the bottom half the right eye.
+
+
+ Pixels are arranged with
+alternating pixels representing left and right eye views in a
+checkerboard fashion.
+
+
+
+ All possible stereoscopic 3D and multiview representations.
+In conjunction with #GstVideoMultiviewFlags, describes how
+multiview content is being transported in the stream.
+
+ A special value indicating
+no multiview information. Used in GstVideoInfo and other places to
+indicate that no specific multiview handling has been requested or
+provided. This value is never carried on caps.
+
+
+ All frames are monoscopic.
+
+
+ All frames represent a left-eye view.
+
+
+ All frames represent a right-eye view.
+
+
+ Left and right eye views are
+provided in the left and right half of the frame respectively.
+
+
+ Left and right eye
+views are provided in the left and right half of the frame, but
+have been sampled using quincunx method, with half-pixel offset
+between the 2 views.
+
+
+ Alternating vertical
+columns of pixels represent the left and right eye view respectively.
+
+
+ Alternating horizontal
+rows of pixels represent the left and right eye view respectively.
+
+
+ The top half of the frame
+contains the left eye, and the bottom half the right eye.
+
+
+ Pixels are arranged with
+alternating pixels representing left and right eye views in a
+checkerboard fashion.
+
+
+ Left and right eye views
+are provided in separate frames alternately.
+
+
+ Multiple
+independent views are provided in separate frames in sequence.
+This method only applies to raw video buffers at the moment.
+Specific view identification is via the #GstVideoMultiviewMeta
+and #GstVideoMeta(s) on raw video buffers.
+
+
+ Multiple views are
+provided as separate #GstMemory framebuffers attached to each
+#GstBuffer, described by the #GstVideoMultiviewMeta
+and #GstVideoMeta(s)
+
+
+
+ The #GstVideoMultiviewMode value
+
+Given a string from a caps multiview-mode field,
+output the corresponding #GstVideoMultiviewMode
+or #GST_VIDEO_MULTIVIEW_MODE_NONE
+
+
+
+
+ multiview-mode field string from caps
+
+
+
+
+
+
+ The caps string representation of the mode, or NULL if invalid.
+
+Given a #GstVideoMultiviewMode returns the multiview-mode caps string
+for insertion into a caps structure
+
+
+
+
+ A #GstVideoMultiviewMode value
+
+
+
+
+
+
+ The interface allows unified access to control flipping and autocenter
+operation of video-sources or operators.
+
+ Get the horizontal centering offset from the given object.
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+ Get the horizontal flipping state (%TRUE for flipped) from the given object.
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+ Get the vertical centering offset from the given object.
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+ Get the vertical flipping state (%TRUE for flipped) from the given object.
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+ Set the horizontal centering offset for the given object.
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ centering offset
+
+
+
+
+
+ Set the horizontal flipping state (%TRUE for flipped) for the given object.
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ use flipping
+
+
+
+
+
+ Set the vertical centering offset for the given object.
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ centering offset
+
+
+
+
+
+ Set the vertical flipping state (%TRUE for flipped) for the given object.
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ use flipping
+
+
+
+
+
+ Get the horizontal centering offset from the given object.
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+ Get the horizontal flipping state (%TRUE for flipped) from the given object.
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+ Get the vertical centering offset from the given object.
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+ Get the vertical flipping state (%TRUE for flipped) from the given object.
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+ Set the horizontal centering offset for the given object.
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ centering offset
+
+
+
+
+
+ Set the horizontal flipping state (%TRUE for flipped) for the given object.
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ use flipping
+
+
+
+
+
+ Set the vertical centering offset for the given object.
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ centering offset
+
+
+
+
+
+ Set the vertical flipping state (%TRUE for flipped) for the given object.
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ use flipping
+
+
+
+
+
+
+ #GstVideoOrientationInterface interface.
+
+ parent interface type.
+
+
+
+
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+
+
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+
+
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+
+
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ return location for the result
+
+
+
+
+
+
+
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ use flipping
+
+
+
+
+
+
+
+
+ %TRUE in case the element supports flipping
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ use flipping
+
+
+
+
+
+
+
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ centering offset
+
+
+
+
+
+
+
+
+ %TRUE in case the element supports centering
+
+
+
+
+ #GstVideoOrientation interface of a #GstElement
+
+
+
+ centering offset
+
+
+
+
+
+
+
+ The different video orientation methods.
+
+ Identity (no rotation)
+
+
+ Rotate clockwise 90 degrees
+
+
+ Rotate 180 degrees
+
+
+ Rotate counter-clockwise 90 degrees
+
+
+ Flip horizontally
+
+
+ Flip vertically
+
+
+ Flip across upper left/lower right diagonal
+
+
+ Flip across upper right/lower left diagonal
+
+
+ Select flip method based on image-orientation tag
+
+
+ Current status depends on plugin internal setup
+
+
+
+ The #GstVideoOverlay interface is used for 2 main purposes :
+
+* To get a grab on the Window where the video sink element is going to render.
+ This is achieved by either being informed about the Window identifier that
+ the video sink element generated, or by forcing the video sink element to use
+ a specific Window identifier for rendering.
+* To force a redrawing of the latest video frame the video sink element
+ displayed on the Window. Indeed if the #GstPipeline is in #GST_STATE_PAUSED
+ state, moving the Window around will damage its content. Application
+ developers will want to handle the Expose events themselves and force the
+ video sink element to refresh the Window's content.
+
+Using the Window created by the video sink is probably the simplest scenario,
+in some cases, though, it might not be flexible enough for application
+developers if they need to catch events such as mouse moves and button
+clicks.
+
+Setting a specific Window identifier on the video sink element is the most
+flexible solution but it has some issues. Indeed the application needs to set
+its Window identifier at the right time to avoid internal Window creation
+from the video sink element. To solve this issue a #GstMessage is posted on
+the bus to inform the application that it should set the Window identifier
+immediately. Here is an example on how to do that correctly:
+|[
+static GstBusSyncReply
+create_window (GstBus * bus, GstMessage * message, GstPipeline * pipeline)
+{
+ // ignore anything but 'prepare-window-handle' element messages
+ if (!gst_is_video_overlay_prepare_window_handle_message (message))
+ return GST_BUS_PASS;
+
+ win = XCreateSimpleWindow (disp, root, 0, 0, 320, 240, 0, 0, 0);
+
+ XSetWindowBackgroundPixmap (disp, win, None);
+
+ XMapRaised (disp, win);
+
+ XSync (disp, FALSE);
+
+ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message)),
+ win);
+
+ gst_message_unref (message);
+
+ return GST_BUS_DROP;
+}
+...
+int
+main (int argc, char **argv)
+{
+...
+ bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
+ gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, pipeline,
+ NULL);
+...
+}
+]|
+
+## Two basic usage scenarios
+
+There are two basic usage scenarios: in the simplest case, the application
+uses #playbin or #plasink or knows exactly what particular element is used
+for video output, which is usually the case when the application creates
+the videosink to use (e.g. #xvimagesink, #ximagesink, etc.) itself; in this
+case, the application can just create the videosink element, create and
+realize the window to render the video on and then
+call gst_video_overlay_set_window_handle() directly with the XID or native
+window handle, before starting up the pipeline.
+As #playbin and #playsink implement the video overlay interface and proxy
+it transparently to the actual video sink even if it is created later, this
+case also applies when using these elements.
+
+In the other and more common case, the application does not know in advance
+what GStreamer video sink element will be used for video output. This is
+usually the case when an element such as #autovideosink is used.
+In this case, the video sink element itself is created
+asynchronously from a GStreamer streaming thread some time after the
+pipeline has been started up. When that happens, however, the video sink
+will need to know right then whether to render onto an already existing
+application window or whether to create its own window. This is when it
+posts a prepare-window-handle message, and that is also why this message needs
+to be handled in a sync bus handler which will be called from the streaming
+thread directly (because the video sink will need an answer right then).
+
+As response to the prepare-window-handle element message in the bus sync
+handler, the application may use gst_video_overlay_set_window_handle() to tell
+the video sink to render onto an existing window surface. At this point the
+application should already have obtained the window handle / XID, so it
+just needs to set it. It is generally not advisable to call any GUI toolkit
+functions or window system functions from the streaming thread in which the
+prepare-window-handle message is handled, because most GUI toolkits and
+windowing systems are not thread-safe at all and a lot of care would be
+required to co-ordinate the toolkit and window system calls of the
+different threads (Gtk+ users please note: prior to Gtk+ 2.18
+GDK_WINDOW_XID() was just a simple structure access, so generally fine to do
+within the bus sync handler; this macro was changed to a function call in
+Gtk+ 2.18 and later, which is likely to cause problems when called from a
+sync handler; see below for a better approach without GDK_WINDOW_XID()
+used in the callback).
+
+## GstVideoOverlay and Gtk+
+
+|[
+#include <gst/video/videooverlay.h>
+#include <gtk/gtk.h>
+#ifdef GDK_WINDOWING_X11
+#include <gdk/gdkx.h> // for GDK_WINDOW_XID
+#endif
+#ifdef GDK_WINDOWING_WIN32
+#include <gdk/gdkwin32.h> // for GDK_WINDOW_HWND
+#endif
+...
+static guintptr video_window_handle = 0;
+...
+static GstBusSyncReply
+bus_sync_handler (GstBus * bus, GstMessage * message, gpointer user_data)
+{
+ // ignore anything but 'prepare-window-handle' element messages
+ if (!gst_is_video_overlay_prepare_window_handle_message (message))
+ return GST_BUS_PASS;
+
+ if (video_window_handle != 0) {
+ GstVideoOverlay *overlay;
+
+ // GST_MESSAGE_SRC (message) will be the video sink element
+ overlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message));
+ gst_video_overlay_set_window_handle (overlay, video_window_handle);
+ } else {
+ g_warning ("Should have obtained video_window_handle by now!");
+ }
+
+ gst_message_unref (message);
+ return GST_BUS_DROP;
+}
+...
+static void
+video_widget_realize_cb (GtkWidget * widget, gpointer data)
+{
+#if GTK_CHECK_VERSION(2,18,0)
+ // Tell Gtk+/Gdk to create a native window for this widget instead of
+ // drawing onto the parent widget.
+ // This is here just for pedagogical purposes, GDK_WINDOW_XID will call
+ // it as well in newer Gtk versions
+ if (!gdk_window_ensure_native (widget->window))
+ g_error ("Couldn't create native window needed for GstVideoOverlay!");
+#endif
+
+#ifdef GDK_WINDOWING_X11
+ {
+ gulong xid = GDK_WINDOW_XID (gtk_widget_get_window (video_window));
+ video_window_handle = xid;
+ }
+#endif
+#ifdef GDK_WINDOWING_WIN32
+ {
+ HWND wnd = GDK_WINDOW_HWND (gtk_widget_get_window (video_window));
+ video_window_handle = (guintptr) wnd;
+ }
+#endif
+}
+...
+int
+main (int argc, char **argv)
+{
+ GtkWidget *video_window;
+ GtkWidget *app_window;
+ ...
+ app_window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
+ ...
+ video_window = gtk_drawing_area_new ();
+ g_signal_connect (video_window, "realize",
+ G_CALLBACK (video_widget_realize_cb), NULL);
+ gtk_widget_set_double_buffered (video_window, FALSE);
+ ...
+ // usually the video_window will not be directly embedded into the
+ // application window like this, but there will be many other widgets
+ // and the video window will be embedded in one of them instead
+ gtk_container_add (GTK_CONTAINER (ap_window), video_window);
+ ...
+ // show the GUI
+ gtk_widget_show_all (app_window);
+
+ // realize window now so that the video window gets created and we can
+ // obtain its XID/HWND before the pipeline is started up and the videosink
+ // asks for the XID/HWND of the window to render onto
+ gtk_widget_realize (video_window);
+
+ // we should have the XID/HWND now
+ g_assert (video_window_handle != 0);
+ ...
+ // set up sync handler for setting the xid once the pipeline is started
+ bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
+ gst_bus_set_sync_handler (bus, (GstBusSyncHandler) bus_sync_handler, NULL,
+ NULL);
+ gst_object_unref (bus);
+ ...
+ gst_element_set_state (pipeline, GST_STATE_PLAYING);
+ ...
+}
+]|
+
+## GstVideoOverlay and Qt
+
+|[
+#include <glib.h>
+#include <gst/gst.h>
+#include <gst/video/videooverlay.h>
+
+#include <QApplication>
+#include <QTimer>
+#include <QWidget>
+
+int main(int argc, char *argv[])
+{
+ if (!g_thread_supported ())
+ g_thread_init (NULL);
+
+ gst_init (&argc, &argv);
+ QApplication app(argc, argv);
+ app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ()));
+
+ // prepare the pipeline
+
+ GstElement *pipeline = gst_pipeline_new ("xvoverlay");
+ GstElement *src = gst_element_factory_make ("videotestsrc", NULL);
+ GstElement *sink = gst_element_factory_make ("xvimagesink", NULL);
+ gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
+ gst_element_link (src, sink);
+
+ // prepare the ui
+
+ QWidget window;
+ window.resize(320, 240);
+ window.show();
+
+ WId xwinid = window.winId();
+ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), xwinid);
+
+ // run the pipeline
+
+ GstStateChangeReturn sret = gst_element_set_state (pipeline,
+ GST_STATE_PLAYING);
+ if (sret == GST_STATE_CHANGE_FAILURE) {
+ gst_element_set_state (pipeline, GST_STATE_NULL);
+ gst_object_unref (pipeline);
+ // Exit application
+ QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
+ }
+
+ int ret = app.exec();
+
+ window.hide();
+ gst_element_set_state (pipeline, GST_STATE_NULL);
+ gst_object_unref (pipeline);
+
+ return ret;
+}
+]|
+
+ Tell an overlay that it has been exposed. This will redraw the current frame
+in the drawable even if the pipeline is PAUSED.
+
+
+
+
+
+ a #GstVideoOverlay to expose.
+
+
+
+
+
+ Tell an overlay that it should handle events from the window system. These
+events are forwarded upstream as navigation events. In some window system,
+events are not propagated in the window hierarchy if a client is listening
+for them. This method allows you to disable events handling completely
+from the #GstVideoOverlay.
+
+
+
+
+
+ a #GstVideoOverlay to expose.
+
+
+
+ a #gboolean indicating if events should be handled or not.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This will call the video overlay's set_window_handle method. You
+should use this method to tell to an overlay to display video output to a
+specific window (e.g. an XWindow on X11). Passing 0 as the @handle will
+tell the overlay to stop using that window and create an internal one.
+
+
+
+
+
+ a #GstVideoOverlay to set the window on.
+
+
+
+ a handle referencing the window.
+
+
+
+
+
+ Tell an overlay that it has been exposed. This will redraw the current frame
+in the drawable even if the pipeline is PAUSED.
+
+
+
+
+
+ a #GstVideoOverlay to expose.
+
+
+
+
+
+ This will post a "have-window-handle" element message on the bus.
+
+This function should only be used by video overlay plugin developers.
+
+
+
+
+
+ a #GstVideoOverlay which got a window
+
+
+
+ a platform-specific handle referencing the window
+
+
+
+
+
+ Tell an overlay that it should handle events from the window system. These
+events are forwarded upstream as navigation events. In some window system,
+events are not propagated in the window hierarchy if a client is listening
+for them. This method allows you to disable events handling completely
+from the #GstVideoOverlay.
+
+
+
+
+
+ a #GstVideoOverlay to expose.
+
+
+
+ a #gboolean indicating if events should be handled or not.
+
+
+
+
+
+ This will post a "prepare-window-handle" element message on the bus
+to give applications an opportunity to call
+gst_video_overlay_set_window_handle() before a plugin creates its own
+window.
+
+This function should only be used by video overlay plugin developers.
+
+
+
+
+
+ a #GstVideoOverlay which does not yet have an Window handle set
+
+
+
+
+
+ Configure a subregion as a video target within the window set by
+gst_video_overlay_set_window_handle(). If this is not used or not supported
+the video will fill the area of the window set as the overlay to 100%.
+By specifying the rectangle, the video can be overlayed to a specific region
+of that window only. After setting the new rectangle one should call
+gst_video_overlay_expose() to force a redraw. To unset the region pass -1 for
+the @width and @height parameters.
+
+This method is needed for non fullscreen video overlay in UI toolkits that
+do not support subwindows.
+
+ %FALSE if not supported by the sink.
+
+
+
+
+ a #GstVideoOverlay
+
+
+
+ the horizontal offset of the render area inside the window
+
+
+
+ the vertical offset of the render area inside the window
+
+
+
+ the width of the render area inside the window
+
+
+
+ the height of the render area inside the window
+
+
+
+
+
+ This will call the video overlay's set_window_handle method. You
+should use this method to tell to an overlay to display video output to a
+specific window (e.g. an XWindow on X11). Passing 0 as the @handle will
+tell the overlay to stop using that window and create an internal one.
+
+
+
+
+
+ a #GstVideoOverlay to set the window on.
+
+
+
+ a handle referencing the window.
+
+
+
+
+
+
+ Functions to create and handle overlay compositions on video buffers.
+
+An overlay composition describes one or more overlay rectangles to be
+blended on top of a video buffer.
+
+This API serves two main purposes:
+
+* it can be used to attach overlay information (subtitles or logos)
+ to non-raw video buffers such as GL/VAAPI/VDPAU surfaces. The actual
+ blending of the overlay can then be done by e.g. the video sink that
+ processes these non-raw buffers.
+
+* it can also be used to blend overlay rectangles on top of raw video
+ buffers, thus consolidating blending functionality for raw video in
+ one place.
+
+Together, this allows existing overlay elements to easily handle raw
+and non-raw video as input in without major changes (once the overlays
+have been put into a #GstOverlayComposition object anyway) - for raw
+video the overlay can just use the blending function to blend the data
+on top of the video, and for surface buffers it can just attach them to
+the buffer and let the sink render the overlays.
+
+ Creates a new video overlay composition object to hold one or more
+overlay rectangles.
+
+ a new #GstVideoOverlayComposition. Unref with
+ gst_video_overlay_composition_unref() when no longer needed.
+
+
+
+
+ a #GstVideoOverlayRectangle to add to the
+ composition
+
+
+
+
+
+ Adds an overlay rectangle to an existing overlay composition object. This
+must be done right after creating the overlay composition.
+
+
+
+
+
+ a #GstVideoOverlayComposition
+
+
+
+ a #GstVideoOverlayRectangle to add to the
+ composition
+
+
+
+
+
+ Blends the overlay rectangles in @comp on top of the raw video data
+contained in @video_buf. The data in @video_buf must be writable and
+mapped appropriately.
+
+Since @video_buf data is read and will be modified, it ought be
+mapped with flag GST_MAP_READWRITE.
+
+
+
+
+
+ a #GstVideoOverlayComposition
+
+
+
+ a #GstVideoFrame containing raw video data in a
+ supported format. It should be mapped using GST_MAP_READWRITE
+
+
+
+
+
+ Makes a copy of @comp and all contained rectangles, so that it is possible
+to modify the composition and contained rectangles (e.g. add additional
+rectangles or change the render co-ordinates or render dimension). The
+actual overlay pixel data buffers contained in the rectangles are not
+copied.
+
+ a new #GstVideoOverlayComposition equivalent
+ to @comp.
+
+
+
+
+ a #GstVideoOverlayComposition to copy
+
+
+
+
+
+ Returns the @n-th #GstVideoOverlayRectangle contained in @comp.
+
+ the @n-th rectangle, or NULL if @n is out of
+ bounds. Will not return a new reference, the caller will need to
+ obtain her own reference using gst_video_overlay_rectangle_ref()
+ if needed.
+
+
+
+
+ a #GstVideoOverlayComposition
+
+
+
+ number of the rectangle to get
+
+
+
+
+
+ Returns the sequence number of this composition. Sequence numbers are
+monotonically increasing and unique for overlay compositions and rectangles
+(meaning there will never be a rectangle with the same sequence number as
+a composition).
+
+ the sequence number of @comp
+
+
+
+
+ a #GstVideoOverlayComposition
+
+
+
+
+
+ Takes ownership of @comp and returns a version of @comp that is writable
+(i.e. can be modified). Will either return @comp right away, or create a
+new writable copy of @comp and unref @comp itself. All the contained
+rectangles will also be copied, but the actual overlay pixel data buffers
+contained in the rectangles are not copied.
+
+ a writable #GstVideoOverlayComposition
+ equivalent to @comp.
+
+
+
+
+ a #GstVideoOverlayComposition to copy
+
+
+
+
+
+ Returns the number of #GstVideoOverlayRectangle<!-- -->s contained in @comp.
+
+ the number of rectangles
+
+
+
+
+ a #GstVideoOverlayComposition
+
+
+
+
+
+
+ Extra buffer metadata describing image overlay data.
+
+ parent #GstMeta
+
+
+
+ the attached #GstVideoOverlayComposition
+
+
+
+
+
+
+
+
+
+ Overlay format flags.
+
+ no flags
+
+
+ RGB are premultiplied by A/255.
+
+
+ a global-alpha value != 1 is set.
+
+
+
+ #GstVideoOverlay interface
+
+ parent interface type.
+
+
+
+
+
+
+
+
+
+ a #GstVideoOverlay to expose.
+
+
+
+
+
+
+
+
+
+
+
+
+ a #GstVideoOverlay to expose.
+
+
+
+ a #gboolean indicating if events should be handled or not.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ a #GstVideoOverlay to set the window on.
+
+
+
+ a handle referencing the window.
+
+
+
+
+
+
+
+ An opaque video overlay rectangle object. A rectangle contains a single
+overlay rectangle which can be added to a composition.
+
+ Creates a new video overlay rectangle with ARGB or AYUV pixel data.
+The layout in case of ARGB of the components in memory is B-G-R-A
+on little-endian platforms
+(corresponding to #GST_VIDEO_FORMAT_BGRA) and A-R-G-B on big-endian
+platforms (corresponding to #GST_VIDEO_FORMAT_ARGB). In other words,
+pixels are treated as 32-bit words and the lowest 8 bits then contain
+the blue component value and the highest 8 bits contain the alpha
+component value. Unless specified in the flags, the RGB values are
+non-premultiplied. This is the format that is used by most hardware,
+and also many rendering libraries such as Cairo, for example.
+The pixel data buffer must have #GstVideoMeta set.
+
+ a new #GstVideoOverlayRectangle. Unref with
+ gst_video_overlay_rectangle_unref() when no longer needed.
+
+
+
+
+ a #GstBuffer pointing to the pixel memory
+
+
+
+ the X co-ordinate on the video where the top-left corner of this
+ overlay rectangle should be rendered to
+
+
+
+ the Y co-ordinate on the video where the top-left corner of this
+ overlay rectangle should be rendered to
+
+
+
+ the render width of this rectangle on the video
+
+
+
+ the render height of this rectangle on the video
+
+
+
+ flags
+
+
+
+
+
+ Makes a copy of @rectangle, so that it is possible to modify it
+(e.g. to change the render co-ordinates or render dimension). The
+actual overlay pixel data buffers contained in the rectangle are not
+copied.
+
+ a new #GstVideoOverlayRectangle equivalent
+ to @rectangle.
+
+
+
+
+ a #GstVideoOverlayRectangle to copy
+
+
+
+
+
+ Retrieves the flags associated with a #GstVideoOverlayRectangle.
+This is useful if the caller can handle both premultiplied alpha and
+non premultiplied alpha, for example. By knowing whether the rectangle
+uses premultiplied or not, it can request the pixel data in the format
+it is stored in, to avoid unnecessary conversion.
+
+ the #GstVideoOverlayFormatFlags associated with the rectangle.
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+
+
+ Retrieves the global-alpha value associated with a #GstVideoOverlayRectangle.
+
+ the global-alpha value associated with the rectangle.
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+
+
+
+ a #GstBuffer holding the ARGB pixel data with
+ width and height of the render dimensions as per
+ gst_video_overlay_rectangle_get_render_rectangle(). This function does
+ not return a reference, the caller should obtain a reference of her own
+ with gst_buffer_ref() if needed.
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+ flags
+ If a global_alpha value != 1 is set for the rectangle, the caller
+ should set the #GST_VIDEO_OVERLAY_FORMAT_FLAG_GLOBAL_ALPHA flag
+ if he wants to apply global-alpha himself. If the flag is not set
+ global_alpha is applied internally before returning the pixel-data.
+
+
+
+
+
+
+ a #GstBuffer holding the AYUV pixel data with
+ width and height of the render dimensions as per
+ gst_video_overlay_rectangle_get_render_rectangle(). This function does
+ not return a reference, the caller should obtain a reference of her own
+ with gst_buffer_ref() if needed.
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+ flags
+ If a global_alpha value != 1 is set for the rectangle, the caller
+ should set the #GST_VIDEO_OVERLAY_FORMAT_FLAG_GLOBAL_ALPHA flag
+ if he wants to apply global-alpha himself. If the flag is not set
+ global_alpha is applied internally before returning the pixel-data.
+
+
+
+
+
+
+ a #GstBuffer holding the pixel data with
+ format as originally provided and specified in video meta with
+ width and height of the render dimensions as per
+ gst_video_overlay_rectangle_get_render_rectangle(). This function does
+ not return a reference, the caller should obtain a reference of her own
+ with gst_buffer_ref() if needed.
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+ flags
+ If a global_alpha value != 1 is set for the rectangle, the caller
+ should set the #GST_VIDEO_OVERLAY_FORMAT_FLAG_GLOBAL_ALPHA flag
+ if he wants to apply global-alpha himself. If the flag is not set
+ global_alpha is applied internally before returning the pixel-data.
+
+
+
+
+
+ Retrieves the pixel data as it is. This is useful if the caller can
+do the scaling itself when handling the overlaying. The rectangle will
+need to be scaled to the render dimensions, which can be retrieved using
+gst_video_overlay_rectangle_get_render_rectangle().
+
+ a #GstBuffer holding the ARGB pixel data with
+ #GstVideoMeta set. This function does not return a reference, the caller
+ should obtain a reference of her own with gst_buffer_ref() if needed.
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+ flags.
+ If a global_alpha value != 1 is set for the rectangle, the caller
+ should set the #GST_VIDEO_OVERLAY_FORMAT_FLAG_GLOBAL_ALPHA flag
+ if he wants to apply global-alpha himself. If the flag is not set
+ global_alpha is applied internally before returning the pixel-data.
+
+
+
+
+
+ Retrieves the pixel data as it is. This is useful if the caller can
+do the scaling itself when handling the overlaying. The rectangle will
+need to be scaled to the render dimensions, which can be retrieved using
+gst_video_overlay_rectangle_get_render_rectangle().
+
+ a #GstBuffer holding the AYUV pixel data with
+ #GstVideoMeta set. This function does not return a reference, the caller
+ should obtain a reference of her own with gst_buffer_ref() if needed.
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+ flags.
+ If a global_alpha value != 1 is set for the rectangle, the caller
+ should set the #GST_VIDEO_OVERLAY_FORMAT_FLAG_GLOBAL_ALPHA flag
+ if he wants to apply global-alpha himself. If the flag is not set
+ global_alpha is applied internally before returning the pixel-data.
+
+
+
+
+
+ Retrieves the pixel data as it is. This is useful if the caller can
+do the scaling itself when handling the overlaying. The rectangle will
+need to be scaled to the render dimensions, which can be retrieved using
+gst_video_overlay_rectangle_get_render_rectangle().
+
+ a #GstBuffer holding the pixel data with
+ #GstVideoMeta set. This function does not return a reference, the caller
+ should obtain a reference of her own with gst_buffer_ref() if needed.
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+ flags.
+ If a global_alpha value != 1 is set for the rectangle, the caller
+ should set the #GST_VIDEO_OVERLAY_FORMAT_FLAG_GLOBAL_ALPHA flag
+ if he wants to apply global-alpha himself. If the flag is not set
+ global_alpha is applied internally before returning the pixel-data.
+
+
+
+
+
+ Retrieves the render position and render dimension of the overlay
+rectangle on the video.
+
+ TRUE if valid render dimensions were retrieved.
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+ address where to store the X render offset
+
+
+
+ address where to store the Y render offset
+
+
+
+ address where to store the render width
+
+
+
+ address where to store the render height
+
+
+
+
+
+ Returns the sequence number of this rectangle. Sequence numbers are
+monotonically increasing and unique for overlay compositions and rectangles
+(meaning there will never be a rectangle with the same sequence number as
+a composition).
+
+Using the sequence number of a rectangle as an indicator for changed
+pixel-data of a rectangle is dangereous. Some API calls, like e.g.
+gst_video_overlay_rectangle_set_global_alpha(), automatically update
+the per rectangle sequence number, which is misleading for renderers/
+consumers, that handle global-alpha themselves. For them the
+pixel-data returned by gst_video_overlay_rectangle_get_pixels_*()
+wont be different for different global-alpha values. In this case a
+renderer could also use the GstBuffer pointers as a hint for changed
+pixel-data.
+
+ the sequence number of @rectangle
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+
+
+ Sets the global alpha value associated with a #GstVideoOverlayRectangle. Per-
+pixel alpha values are multiplied with this value. Valid
+values: 0 <= global_alpha <= 1; 1 to deactivate.
+
+@rectangle must be writable, meaning its refcount must be 1. You can
+make the rectangles inside a #GstVideoOverlayComposition writable using
+gst_video_overlay_composition_make_writable() or
+gst_video_overlay_composition_copy().
+
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+ Global alpha value (0 to 1.0)
+
+
+
+
+
+ Sets the render position and dimensions of the rectangle on the video.
+This function is mainly for elements that modify the size of the video
+in some way (e.g. through scaling or cropping) and need to adjust the
+details of any overlays to match the operation that changed the size.
+
+@rectangle must be writable, meaning its refcount must be 1. You can
+make the rectangles inside a #GstVideoOverlayComposition writable using
+gst_video_overlay_composition_make_writable() or
+gst_video_overlay_composition_copy().
+
+
+
+
+
+ a #GstVideoOverlayRectangle
+
+
+
+ render X position of rectangle on video
+
+
+
+ render Y position of rectangle on video
+
+
+
+ render width of rectangle
+
+
+
+ render height of rectangle
+
+
+
+
+
+
+ The different flags that can be used when packing and unpacking.
+
+ No flag
+
+
+ When the source has a smaller depth
+ than the target format, set the least significant bits of the target
+ to 0. This is likely sightly faster but less accurate. When this flag
+ is not specified, the most significant bits of the source are duplicated
+ in the least significant bits of the destination.
+
+
+ The source is interlaced. The unpacked
+ format will be interlaced as well with each line containing
+ information from alternating fields. (Since 1.2)
+
+
+
+ Different primaries conversion modes
+
+ disable conversion between primaries
+
+
+ do conversion between primaries only
+ when it can be merged with color matrix conversion.
+
+
+ fast conversion between primaries
+
+
+
+ Helper structure representing a rectangular area.
+
+ X coordinate of rectangle's top-left point
+
+
+
+ Y coordinate of rectangle's top-left point
+
+
+
+ width of the rectangle
+
+
+
+ height of the rectangle
+
+
+
+
+ Extra buffer metadata describing an image region of interest
+
+ parent #GstMeta
+
+
+
+ GQuark describing the semantic of the Roi (f.i. a face, a pedestrian)
+
+
+
+ identifier of this particular ROI
+
+
+
+ identifier of its parent ROI, used f.i. for ROI hierarchisation.
+
+
+
+ x component of upper-left corner
+
+
+
+ y component of upper-left corner
+
+
+
+ bounding box width
+
+
+
+ bounding box height
+
+
+
+
+
+
+
+
+
+ #GstVideoResampler is a structure which holds the information
+required to perform various kinds of resampling filtering.
+
+ the input size
+
+
+
+ the output size
+
+
+
+ the maximum number of taps
+
+
+
+ the number of phases
+
+
+
+ array with the source offset for each output element
+
+
+
+ array with the phase to use for each output element
+
+
+
+ array with new number of taps for each phase
+
+
+
+ the taps for all phases
+
+
+
+
+
+
+
+
+ Clear a previously initialized #GstVideoResampler @resampler.
+
+
+
+
+
+ a #GstVideoResampler
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Different resampler flags.
+
+ no flags
+
+
+ when no taps are given, half the
+ number of calculated taps. This can be used when making scalers
+ for the different fields of an interlaced picture. Since 1.10
+
+
+
+ Different subsampling and upsampling methods
+
+ Duplicates the samples when
+ upsampling and drops when downsampling
+
+
+ Uses linear interpolation to reconstruct
+ missing samples and averaging to downsample
+
+
+ Uses cubic interpolation
+
+
+ Uses sinc interpolation
+
+
+ Uses lanczos interpolation
+
+
+
+ #GstVideoScaler is a utility object for rescaling and resampling
+video frames using various interpolation / sampling methods.
+
+ Scale a rectangle of pixels in @src with @src_stride to @dest with
+@dest_stride using the horizontal scaler @hscaler and the vertical
+scaler @vscale.
+
+One or both of @hscale and @vscale can be NULL to only perform scaling in
+one dimension or do a copy without scaling.
+
+@x and @y are the coordinates in the destination image to process.
+
+
+
+
+
+ a horzontal #GstVideoScaler
+
+
+
+ a vertical #GstVideoScaler
+
+
+
+ a #GstVideoFormat for @srcs and @dest
+
+
+
+ source pixels
+
+
+
+ source pixels stride
+
+
+
+ destination pixels
+
+
+
+ destination pixels stride
+
+
+
+ the horizontal destination offset
+
+
+
+ the vertical destination offset
+
+
+
+ the number of output pixels to scale
+
+
+
+ the number of output lines to scale
+
+
+
+
+
+ Combine a scaler for Y and UV into one scaler for the packed @format.
+
+ a new horizontal videoscaler for @format.
+
+
+
+
+ a scaler for the Y component
+
+
+
+ a scaler for the U and V components
+
+
+
+ the input video format
+
+
+
+ the output video format
+
+
+
+
+
+ Free a previously allocated #GstVideoScaler @scale.
+
+
+
+
+
+ a #GstVideoScaler
+
+
+
+
+
+ For a given pixel at @out_offset, get the first required input pixel at
+@in_offset and the @n_taps filter coefficients.
+
+Note that for interlaced content, @in_offset needs to be incremented with
+2 to get the next input line.
+
+ an array of @n_tap gdouble values with filter coefficients.
+
+
+
+
+ a #GstVideoScaler
+
+
+
+ an output offset
+
+
+
+ result input offset
+
+
+
+ result n_taps
+
+
+
+
+
+ Get the maximum number of taps for @scale.
+
+ the maximum number of taps
+
+
+
+
+ a #GstVideoScaler
+
+
+
+
+
+ Horizontally scale the pixels in @src to @dest, starting from @dest_offset
+for @width samples.
+
+
+
+
+
+ a #GstVideoScaler
+
+
+
+ a #GstVideoFormat for @src and @dest
+
+
+
+ source pixels
+
+
+
+ destination pixels
+
+
+
+ the horizontal destination offset
+
+
+
+ the number of pixels to scale
+
+
+
+
+
+ Vertically combine @width pixels in the lines in @src_lines to @dest.
+@dest is the location of the target line at @dest_offset and
+@srcs are the input lines for @dest_offset, as obtained with
+gst_video_scaler_get_info().
+
+
+
+
+
+ a #GstVideoScaler
+
+
+
+ a #GstVideoFormat for @srcs and @dest
+
+
+
+ source pixels lines
+
+
+
+ destination pixels
+
+
+
+ the vertical destination offset
+
+
+
+ the number of pixels to scale
+
+
+
+
+
+ Make a new @method video scaler. @in_size source lines/pixels will
+be scaled to @out_size destination lines/pixels.
+
+@n_taps specifies the amount of pixels to use from the source for one output
+pixel. If n_taps is 0, this function chooses a good value automatically based
+on the @method and @in_size/@out_size.
+
+ a #GstVideoResample
+
+
+
+
+ a #GstVideoResamplerMethod
+
+
+
+ #GstVideoScalerFlags
+
+
+
+ number of taps to use
+
+
+
+ number of source elements
+
+
+
+ number of destination elements
+
+
+
+ extra options
+
+
+
+
+
+
+ Different scale flags.
+
+ no flags
+
+
+ Set up a scaler for interlaced content
+
+
+
+ Provides useful functions and a base class for video sinks.
+
+GstVideoSink will configure the default base sink to drop frames that
+arrive later than 20ms as this is considered the default threshold for
+observing out-of-sync frames.
+
+ Takes @src rectangle and position it at the center of @dst rectangle with or
+without @scaling. It handles clipping if the @src rectangle is bigger than
+the @dst one and @scaling is set to FALSE.
+
+
+
+
+
+ the #GstVideoRectangle describing the source area
+
+
+
+ the #GstVideoRectangle describing the destination area
+
+
+
+ a pointer to a #GstVideoRectangle which will receive the result area
+
+
+
+ a #gboolean indicating if scaling should be applied or not
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Whether to show video frames during preroll. If set to #FALSE, video
+frames will only be rendered in PLAYING state.
+
+
+
+
+
+
+ video width (derived class needs to set this)
+
+
+
+ video height (derived class needs to set this)
+
+
+
+
+
+
+
+
+
+
+
+
+ The video sink class structure. Derived classes should override the
+@show_frame virtual function.
+
+ the parent class structure
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Enum value describing the available tiling modes.
+
+ Unknown or unset tile mode
+
+
+ Every four adjacent blocks - two
+ horizontally and two vertically are grouped together and are located
+ in memory in Z or flipped Z order. In case of odd rows, the last row
+ of blocks is arranged in linear order.
+
+
+
+ Enum value describing the most common tiling types.
+
+ Tiles are indexed. Use
+ gst_video_tile_get_index () to retrieve the tile at the requested
+ coordinates.
+
+
+
+ @field_count must be 0 for progressive video and 1 or 2 for interlaced.
+
+A representation of a SMPTE time code.
+
+@hours must be positive and less than 24. Will wrap around otherwise.
+@minutes and @seconds must be positive and less than 60.
+@frames must be less than or equal to @config.fps_n / @config.fps_d
+These values are *NOT* automatically normalized.
+
+ the corresponding #GstVideoTimeCodeConfig
+
+
+
+ the hours field of #GstVideoTimeCode
+
+
+
+ the minutes field of #GstVideoTimeCode
+
+
+
+ the seconds field of #GstVideoTimeCode
+
+
+
+ the frames field of #GstVideoTimeCode
+
+
+
+ Interlaced video field count
+
+
+
+ @field_count is 0 for progressive, 1 or 2 for interlaced.
+@latest_daiy_jam reference is stolen from caller.
+
+ a new #GstVideoTimeCode with the given values.
+The values are not checked for being in a valid range. To see if your
+timecode actually has valid content, use #gst_video_time_code_is_valid.
+
+
+
+
+ Numerator of the frame rate
+
+
+
+ Denominator of the frame rate
+
+
+
+ The latest daily jam of the #GstVideoTimeCode
+
+
+
+ #GstVideoTimeCodeFlags
+
+
+
+ the hours field of #GstVideoTimeCode
+
+
+
+ the minutes field of #GstVideoTimeCode
+
+
+
+ the seconds field of #GstVideoTimeCode
+
+
+
+ the frames field of #GstVideoTimeCode
+
+
+
+ Interlaced video field count
+
+
+
+
+
+
+ a new empty #GstVideoTimeCode
+
+
+
+
+ The resulting config->latest_daily_jam is set to
+midnight, and timecode is set to the given time.
+
+ the #GVideoTimeCode representation of @dt.
+
+
+
+
+ Numerator of the frame rate
+
+
+
+ Denominator of the frame rate
+
+
+
+ #GDateTime to convert
+
+
+
+ #GstVideoTimeCodeFlags
+
+
+
+ Interlaced video field count
+
+
+
+
+
+
+ a new #GstVideoTimeCode from the given string
+
+
+
+
+ The string that represents the #GstVideoTimeCode
+
+
+
+
+
+ Adds or subtracts @frames amount of frames to @tc. tc needs to
+contain valid data, as verified by #gst_video_time_code_is_valid.
+
+
+
+
+
+ a valid #GstVideoTimeCode
+
+
+
+ How many frames to add or subtract
+
+
+
+
+
+ This makes a component-wise addition of @tc_inter to @tc. For example,
+adding ("01:02:03:04", "00:01:00:00") will return "01:03:03:04".
+When it comes to drop-frame timecodes,
+adding ("00:00:00;00", "00:01:00:00") will return "00:01:00;02"
+because of drop-frame oddities. However,
+adding ("00:09:00;02", "00:01:00:00") will return "00:10:00;00"
+because this time we can have an exact minute.
+
+ A new #GstVideoTimeCode with @tc_inter added.
+
+
+
+
+ The #GstVideoTimeCode where the diff should be added. This
+must contain valid timecode values.
+
+
+
+ The #GstVideoTimeCodeInterval to add to @tc.
+The interval must contain valid values, except that for drop-frame
+timecode, it may also contain timecodes which would normally
+be dropped. These are then corrected to the next reasonable timecode.
+
+
+
+
+
+ Initializes @tc with empty/zero/NULL values.
+
+
+
+
+
+ a #GstVideoTimeCode
+
+
+
+
+
+ Compares @tc1 and @tc2 . If both have latest daily jam information, it is
+taken into account. Otherwise, it is assumed that the daily jam of both
+@tc1 and @tc2 was at the same time. Both time codes must be valid.
+
+ 1 if @tc1 is after @tc2, -1 if @tc1 is before @tc2, 0 otherwise.
+
+
+
+
+ a #GstVideoTimeCode
+
+
+
+ another #GstVideoTimeCode
+
+
+
+
+
+
+ a new #GstVideoTimeCode with the same values as @tc .
+
+
+
+
+ a #GstVideoTimeCode
+
+
+
+
+
+
+ how many frames have passed since the daily jam of @tc .
+
+
+
+
+ a valid #GstVideoTimeCode
+
+
+
+
+
+ Frees @tc .
+
+
+
+
+
+ a #GstVideoTimeCode
+
+
+
+
+
+ Adds one frame to @tc .
+
+
+
+
+
+ a valid #GstVideoTimeCode
+
+
+
+
+
+ @field_count is 0 for progressive, 1 or 2 for interlaced.
+@latest_daiy_jam reference is stolen from caller.
+
+Initializes @tc with the given values.
+The values are not checked for being in a valid range. To see if your
+timecode actually has valid content, use #gst_video_time_code_is_valid.
+
+
+
+
+
+ a #GstVideoTimeCode
+
+
+
+ Numerator of the frame rate
+
+
+
+ Denominator of the frame rate
+
+
+
+ The latest daily jam of the #GstVideoTimeCode
+
+
+
+ #GstVideoTimeCodeFlags
+
+
+
+ the hours field of #GstVideoTimeCode
+
+
+
+ the minutes field of #GstVideoTimeCode
+
+
+
+ the seconds field of #GstVideoTimeCode
+
+
+
+ the frames field of #GstVideoTimeCode
+
+
+
+ Interlaced video field count
+
+
+
+
+
+ The resulting config->latest_daily_jam is set to
+midnight, and timecode is set to the given time.
+
+
+
+
+
+ a #GstVideoTimeCode
+
+
+
+ Numerator of the frame rate
+
+
+
+ Denominator of the frame rate
+
+
+
+ #GDateTime to convert
+
+
+
+ #GstVideoTimeCodeFlags
+
+
+
+ Interlaced video field count
+
+
+
+
+
+
+ whether @tc is a valid timecode (supported frame rate,
+hours/minutes/seconds/frames not overflowing)
+
+
+
+
+ #GstVideoTimeCode to check
+
+
+
+
+
+
+ how many nsec have passed since the daily jam of @tc .
+
+
+
+
+ a valid #GstVideoTimeCode
+
+
+
+
+
+ The @tc.config->latest_daily_jam is required to be non-NULL.
+
+ the #GDateTime representation of @tc.
+
+
+
+
+ A valid #GstVideoTimeCode to convert
+
+
+
+
+
+
+ the SMPTE ST 2059-1:2015 string representation of @tc. That will
+take the form hh:mm:ss:ff . The last separator (between seconds and frames)
+may vary:
+
+';' for drop-frame, non-interlaced content and for drop-frame interlaced
+field 2
+',' for drop-frame interlaced field 1
+':' for non-drop-frame, non-interlaced content and for non-drop-frame
+interlaced field 2
+'.' for non-drop-frame interlaced field 1
+
+
+
+
+ #GstVideoTimeCode to convert
+
+
+
+
+
+
+ Supported frame rates: 30000/1001, 60000/1001 (both with and without drop
+frame), and integer frame rates e.g. 25/1, 30/1, 50/1, 60/1.
+
+The configuration of the time code.
+
+ Numerator of the frame rate
+
+
+
+ Denominator of the frame rate
+
+
+
+ the corresponding #GstVideoTimeCodeFlags
+
+
+
+ The latest daily jam information, if present, or NULL
+
+
+
+
+ Flags related to the time code information.
+For drop frame, only 30000/1001 and 60000/1001 frame rates are supported.
+
+ No flags
+
+
+ Whether we have drop frame rate
+
+
+ Whether we have interlaced video
+
+
+
+ A representation of a difference between two #GstVideoTimeCode instances.
+Will not necessarily correspond to a real timecode (e.g. 00:00:10;00)
+
+ the hours field of #GstVideoTimeCodeInterval
+
+
+
+ the minutes field of #GstVideoTimeCodeInterval
+
+
+
+ the seconds field of #GstVideoTimeCodeInterval
+
+
+
+ the frames field of #GstVideoTimeCodeInterval
+
+
+
+
+ a new #GstVideoTimeCodeInterval with the given values.
+
+
+
+
+ the hours field of #GstVideoTimeCodeInterval
+
+
+
+ the minutes field of #GstVideoTimeCodeInterval
+
+
+
+ the seconds field of #GstVideoTimeCodeInterval
+
+
+
+ the frames field of #GstVideoTimeCodeInterval
+
+
+
+
+
+ @tc_inter_str must only have ":" as separators.
+
+ a new #GstVideoTimeCodeInterval from the given string
+
+
+
+
+ The string that represents the #GstVideoTimeCodeInterval
+
+
+
+
+
+ Initializes @tc with empty/zero/NULL values.
+
+
+
+
+
+ a #GstVideoTimeCodeInterval
+
+
+
+
+
+
+ a new #GstVideoTimeCodeInterval with the same values as @tc .
+
+
+
+
+ a #GstVideoTimeCodeInterval
+
+
+
+
+
+ Frees @tc .
+
+
+
+
+
+ a #GstVideoTimeCodeInterval
+
+
+
+
+
+ Initializes @tc with the given values.
+
+
+
+
+
+ a #GstVideoTimeCodeInterval
+
+
+
+ the hours field of #GstVideoTimeCodeInterval
+
+
+
+ the minutes field of #GstVideoTimeCodeInterval
+
+
+
+ the seconds field of #GstVideoTimeCodeInterval
+
+
+
+ the frames field of #GstVideoTimeCodeInterval
+
+
+
+
+
+
+ Extra buffer metadata describing the GstVideoTimeCode of the frame.
+
+Each frame is assumed to have its own timecode, i.e. they are not
+automatically incremented/interpolated.
+
+ parent #GstMeta
+
+
+
+ the GstVideoTimeCode to attach
+
+
+
+
+
+
+
+
+
+ The video transfer function defines the formula for converting between
+non-linear RGB (R'G'B') and linear RGB
+
+ unknown transfer function
+
+
+ linear RGB, gamma 1.0 curve
+
+
+ Gamma 1.8 curve
+
+
+ Gamma 2.0 curve
+
+
+ Gamma 2.2 curve
+
+
+ Gamma 2.2 curve with a linear segment in the lower
+ range
+
+
+ Gamma 2.2 curve with a linear segment in the
+ lower range
+
+
+ Gamma 2.4 curve with a linear segment in the lower
+ range
+
+
+ Gamma 2.8 curve
+
+
+ Logarithmic transfer characteristic
+ 100:1 range
+
+
+ Logarithmic transfer characteristic
+ 316.22777:1 range
+
+
+ Gamma 2.2 curve with a linear segment in the lower
+ range. Used for BT.2020 with 12 bits per
+ component. Since: 1.6.
+
+
+ Gamma 2.19921875. Since: 1.8
+
+
+
+ Attaches GstVideoAffineTransformationMeta metadata to @buffer with
+the given parameters.
+
+ the #GstVideoAffineTransformationMeta on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+
+
+ Attaches GstVideoGLTextureUploadMeta metadata to @buffer with the given
+parameters.
+
+ the #GstVideoGLTextureUploadMeta on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ the #GstVideoGLTextureOrientation
+
+
+
+ the number of textures
+
+
+
+ array of #GstVideoGLTextureType
+
+
+
+ the function to upload the buffer to a specific texture ID
+
+
+
+ user data for the implementor of @upload
+
+
+
+ function to copy @user_data
+
+
+
+ function to free @user_data
+
+
+
+
+
+ Attaches GstVideoMeta metadata to @buffer with the given parameters and the
+default offsets and strides for @format and @width x @height.
+
+This function calculates the default offsets and strides and then calls
+gst_buffer_add_video_meta_full() with them.
+
+ the #GstVideoMeta on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ #GstVideoFrameFlags
+
+
+
+ a #GstVideoFormat
+
+
+
+ the width
+
+
+
+ the height
+
+
+
+
+
+ Attaches GstVideoMeta metadata to @buffer with the given parameters.
+
+ the #GstVideoMeta on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ #GstVideoFrameFlags
+
+
+
+ a #GstVideoFormat
+
+
+
+ the width
+
+
+
+ the height
+
+
+
+ number of planes
+
+
+
+ offset of each plane
+
+
+
+ stride of each plane
+
+
+
+
+
+ Sets an overlay composition on a buffer. The buffer will obtain its own
+reference to the composition, meaning this function does not take ownership
+of @comp.
+
+ a #GstVideoOverlayCompositionMeta
+
+
+
+
+ a #GstBuffer
+
+
+
+ a #GstVideoOverlayComposition
+
+
+
+
+
+ Attaches #GstVideoRegionOfInterestMeta metadata to @buffer with the given
+parameters.
+
+ the #GstVideoRegionOfInterestMeta on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ Type of the region of interest (e.g. "face")
+
+
+
+ X position
+
+
+
+ Y position
+
+
+
+ width
+
+
+
+ height
+
+
+
+
+
+ Attaches #GstVideoRegionOfInterestMeta metadata to @buffer with the given
+parameters.
+
+ the #GstVideoRegionOfInterestMeta on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ Type of the region of interest (e.g. "face")
+
+
+
+ X position
+
+
+
+ Y position
+
+
+
+ width
+
+
+
+ height
+
+
+
+
+
+ Attaches #GstVideoTimeCodeMeta metadata to @buffer with the given
+parameters.
+
+ the #GstVideoTimeCodeMeta on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ a #GstVideoTimeCode
+
+
+
+
+
+ Attaches #GstVideoTimeCodeMeta metadata to @buffer with the given
+parameters.
+
+ the #GstVideoTimeCodeMeta on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ framerate numerator
+
+
+
+ framerate denominator
+
+
+
+ a #GDateTime for the latest daily jam
+
+
+
+ a #GstVideoTimeCodeFlags
+
+
+
+ hours since the daily jam
+
+
+
+ minutes since the daily jam
+
+
+
+ seconds since the daily jam
+
+
+
+ frames since the daily jam
+
+
+
+ fields since the daily jam
+
+
+
+
+
+ Find the #GstVideoMeta on @buffer with the lowest @id.
+
+Buffers can contain multiple #GstVideoMeta metadata items when dealing with
+multiview buffers.
+
+ the #GstVideoMeta with lowest id (usually 0) or %NULL when there
+is no such metadata on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+
+
+ Find the #GstVideoMeta on @buffer with the given @id.
+
+Buffers can contain multiple #GstVideoMeta metadata items when dealing with
+multiview buffers.
+
+ the #GstVideoMeta with @id or %NULL when there is no such metadata
+on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ a metadata id
+
+
+
+
+
+ Find the #GstVideoRegionOfInterestMeta on @buffer with the given @id.
+
+Buffers can contain multiple #GstVideoRegionOfInterestMeta metadata items if
+multiple regions of interests are marked on a frame.
+
+ the #GstVideoRegionOfInterestMeta with @id or %NULL when there is
+no such metadata on @buffer.
+
+
+
+
+ a #GstBuffer
+
+
+
+ a metadata id
+
+
+
+
+
+ Get the video alignment from the bufferpool configuration @config in
+in @align
+
+ #TRUE if @config could be parsed correctly.
+
+
+
+
+ a #GstStructure
+
+
+
+ a #GstVideoAlignment
+
+
+
+
+
+ Set the video alignment in @align to the bufferpool configuration
+@config
+
+
+
+
+
+ a #GstStructure
+
+
+
+ a #GstVideoAlignment
+
+
+
+
+
+ Convenience function to check if the given message is a
+"prepare-window-handle" message from a #GstVideoOverlay.
+
+ whether @msg is a "prepare-window-handle" message
+
+
+
+
+ a #GstMessage
+
+
+
+
+
+ Inspect a #GstEvent and return the #GstNavigationEventType of the event, or
+#GST_NAVIGATION_EVENT_INVALID if the event is not a #GstNavigation event.
+
+
+
+
+
+ A #GstEvent to inspect.
+
+
+
+
+
+ Inspect a #GstNavigation command event and retrieve the enum value of the
+associated command.
+
+ TRUE if the navigation command could be extracted, otherwise FALSE.
+
+
+
+
+ A #GstEvent to inspect.
+
+
+
+ Pointer to GstNavigationCommand to receive the type of the
+navigation event.
+
+
+
+
+
+
+
+
+
+
+ A #GstEvent to inspect.
+
+
+
+ A pointer to a location to receive the string identifying the key
+press. The returned string is owned by the event, and valid only until the
+event is unreffed.
+
+
+
+
+
+ Retrieve the details of either a #GstNavigation mouse button press event or
+a mouse button release event. Determine which type the event is using
+gst_navigation_event_get_type() to retrieve the #GstNavigationEventType.
+
+ TRUE if the button number and both coordinates could be extracted,
+ otherwise FALSE.
+
+
+
+
+ A #GstEvent to inspect.
+
+
+
+ Pointer to a gint that will receive the button number associated
+with the event.
+
+
+
+ Pointer to a gdouble to receive the x coordinate of the mouse button
+event.
+
+
+
+ Pointer to a gdouble to receive the y coordinate of the mouse button
+event.
+
+
+
+
+
+ Inspect a #GstNavigation mouse movement event and extract the coordinates
+of the event.
+
+ TRUE if both coordinates could be extracted, otherwise FALSE.
+
+
+
+
+ A #GstEvent to inspect.
+
+
+
+ Pointer to a gdouble to receive the x coordinate of the mouse movement.
+
+
+
+ Pointer to a gdouble to receive the y coordinate of the mouse movement.
+
+
+
+
+
+ Check a bus message to see if it is a #GstNavigation event, and return
+the #GstNavigationMessageType identifying the type of the message if so.
+
+ The type of the #GstMessage, or
+#GST_NAVIGATION_MESSAGE_INVALID if the message is not a #GstNavigation
+notification.
+
+
+
+
+ A #GstMessage to inspect.
+
+
+
+
+
+ Creates a new #GstNavigation message with type
+#GST_NAVIGATION_MESSAGE_ANGLES_CHANGED for notifying an application
+that the current angle, or current number of angles available in a
+multiangle video has changed.
+
+ The new #GstMessage.
+
+
+
+
+ A #GstObject to set as source of the new message.
+
+
+
+ The currently selected angle.
+
+
+
+ The number of viewing angles now available.
+
+
+
+
+
+ Creates a new #GstNavigation message with type
+#GST_NAVIGATION_MESSAGE_COMMANDS_CHANGED
+
+ The new #GstMessage.
+
+
+
+
+ A #GstObject to set as source of the new message.
+
+
+
+
+
+ Creates a new #GstNavigation message with type
+#GST_NAVIGATION_MESSAGE_EVENT.
+
+ The new #GstMessage.
+
+
+
+
+ A #GstObject to set as source of the new message.
+
+
+
+ A navigation #GstEvent
+
+
+
+
+
+ Creates a new #GstNavigation message with type
+#GST_NAVIGATION_MESSAGE_MOUSE_OVER.
+
+ The new #GstMessage.
+
+
+
+
+ A #GstObject to set as source of the new message.
+
+
+
+ %TRUE if the mouse has entered a clickable area of the display.
+%FALSE if it over a non-clickable area.
+
+
+
+
+
+ Parse a #GstNavigation message of type GST_NAVIGATION_MESSAGE_ANGLES_CHANGED
+and extract the @cur_angle and @n_angles parameters.
+
+ %TRUE if the message could be successfully parsed. %FALSE if not.
+
+
+
+
+ A #GstMessage to inspect.
+
+
+
+ A pointer to a #guint to receive the new current angle number,
+or NULL
+
+
+
+ A pointer to a #guint to receive the new angle count, or NULL.
+
+
+
+
+
+ Parse a #GstNavigation message of type #GST_NAVIGATION_MESSAGE_EVENT
+and extract contained #GstEvent. The caller must unref the @event when done
+with it.
+
+ %TRUE if the message could be successfully parsed. %FALSE if not.
+
+
+
+
+ A #GstMessage to inspect.
+
+
+
+ a pointer to a #GstEvent to receive the
+ contained navigation event.
+
+
+
+
+
+ Parse a #GstNavigation message of type #GST_NAVIGATION_MESSAGE_MOUSE_OVER
+and extract the active/inactive flag. If the mouse over event is marked
+active, it indicates that the mouse is over a clickable area.
+
+ %TRUE if the message could be successfully parsed. %FALSE if not.
+
+
+
+
+ A #GstMessage to inspect.
+
+
+
+ A pointer to a gboolean to receive the active/inactive state,
+or NULL.
+
+
+
+
+
+ Inspect a #GstQuery and return the #GstNavigationQueryType associated with
+it if it is a #GstNavigation query.
+
+ The #GstNavigationQueryType of the query, or
+#GST_NAVIGATION_QUERY_INVALID
+
+
+
+
+ The query to inspect
+
+
+
+
+
+ Create a new #GstNavigation angles query. When executed, it will
+query the pipeline for the set of currently available angles, which may be
+greater than one in a multiangle video.
+
+ The new query.
+
+
+
+
+ Create a new #GstNavigation commands query. When executed, it will
+query the pipeline for the set of currently available commands.
+
+ The new query.
+
+
+
+
+ Parse the current angle number in the #GstNavigation angles @query into the
+#guint pointed to by the @cur_angle variable, and the number of available
+angles into the #guint pointed to by the @n_angles variable.
+
+ %TRUE if the query could be successfully parsed. %FALSE if not.
+
+
+
+
+ a #GstQuery
+
+
+
+ Pointer to a #guint into which to store the currently selected
+angle value from the query, or NULL
+
+
+
+ Pointer to a #guint into which to store the number of angles
+value from the query, or NULL
+
+
+
+
+
+ Parse the number of commands in the #GstNavigation commands @query.
+
+ %TRUE if the query could be successfully parsed. %FALSE if not.
+
+
+
+
+ a #GstQuery
+
+
+
+ the number of commands in this query.
+
+
+
+
+
+ Parse the #GstNavigation command query and retrieve the @nth command from
+it into @cmd. If the list contains less elements than @nth, @cmd will be
+set to #GST_NAVIGATION_COMMAND_INVALID.
+
+ %TRUE if the query could be successfully parsed. %FALSE if not.
+
+
+
+
+ a #GstQuery
+
+
+
+ the nth command to retrieve.
+
+
+
+ a pointer to store the nth command into.
+
+
+
+
+
+ Set the #GstNavigation angles query result field in @query.
+
+
+
+
+
+ a #GstQuery
+
+
+
+ the current viewing angle to set.
+
+
+
+ the number of viewing angles to set.
+
+
+
+
+
+ Set the #GstNavigation command query result fields in @query. The number
+of commands passed must be equal to @n_commands.
+
+
+
+
+
+ a #GstQuery
+
+
+
+ the number of commands to set.
+
+
+
+ An array containing @n_cmds @GstNavigationCommand values.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Lets you blend the @src image into the @dest image
+
+
+
+
+
+ The #GstVideoFrame where to blend @src in
+
+
+
+ the #GstVideoFrame that we want to blend into
+
+
+
+ The x offset in pixel where the @src image should be blended
+
+
+
+ the y offset in pixel where the @src image should be blended
+
+
+
+ the global_alpha each per-pixel alpha value is multiplied
+ with
+
+
+
+
+
+ Scales a buffer containing RGBA (or AYUV) video. This is an internal
+helper function which is used to scale subtitle overlays, and may be
+deprecated in the near future. Use #GstVideoScaler to scale video buffers
+instead.
+
+
+
+
+
+ the #GstVideoInfo describing the video data in @src_buffer
+
+
+
+ the source buffer containing video pixels to scale
+
+
+
+ the height in pixels to scale the video data in @src_buffer to
+
+
+
+ the width in pixels to scale the video data in @src_buffer to
+
+
+
+ pointer to a #GstVideoInfo structure that will be filled in
+ with the details for @dest_buffer
+
+
+
+ a pointer to a #GstBuffer variable, which will be
+ set to a newly-allocated buffer containing the scaled pixels.
+
+
+
+
+
+ Given the Pixel Aspect Ratio and size of an input video frame, and the
+pixel aspect ratio of the intended display device, calculates the actual
+display ratio the video will be rendered with.
+
+ A boolean indicating success and a calculated Display Ratio in the
+dar_n and dar_d parameters.
+The return value is FALSE in the case of integer overflow or other error.
+
+
+
+
+ Numerator of the calculated display_ratio
+
+
+
+ Denominator of the calculated display_ratio
+
+
+
+ Width of the video frame in pixels
+
+
+
+ Height of the video frame in pixels
+
+
+
+ Numerator of the pixel aspect ratio of the input video.
+
+
+
+ Denominator of the pixel aspect ratio of the input video.
+
+
+
+ Numerator of the pixel aspect ratio of the display device
+
+
+
+ Denominator of the pixel aspect ratio of the display device
+
+
+
+
+
+ Convert @s to a #GstVideoChromaSite
+
+ a #GstVideoChromaSite or %GST_VIDEO_CHROMA_SITE_UNKNOWN when @s does
+not contain a valid chroma description.
+
+
+
+
+ a chromasite string
+
+
+
+
+
+ Perform resampling of @width chroma pixels in @lines.
+
+
+
+
+
+ a #GstVideoChromaResample
+
+
+
+ pixel lines
+
+
+
+ the number of pixels on one line
+
+
+
+
+
+ Create a new resampler object for the given parameters. When @h_factor or
+@v_factor is > 0, upsampling will be used, otherwise subsampling is
+performed.
+
+ a new #GstVideoChromaResample that should be freed with
+ gst_video_chroma_resample_free() after usage.
+
+
+
+
+ a #GstVideoChromaMethod
+
+
+
+ a #GstVideoChromaSite
+
+
+
+ #GstVideoChromaFlags
+
+
+
+ the #GstVideoFormat
+
+
+
+ horizontal resampling factor
+
+
+
+ vertical resampling factor
+
+
+
+
+
+ Converts @site to its string representation.
+
+ a string describing @site.
+
+
+
+
+ a #GstVideoChromaSite
+
+
+
+
+
+ Get the coefficients used to convert between Y'PbPr and R'G'B' using @matrix.
+
+When:
+
+|[
+ 0.0 <= [Y',R',G',B'] <= 1.0)
+ (-0.5 <= [Pb,Pr] <= 0.5)
+]|
+
+the general conversion is given by:
+
+|[
+ Y' = Kr*R' + (1-Kr-Kb)*G' + Kb*B'
+ Pb = (B'-Y')/(2*(1-Kb))
+ Pr = (R'-Y')/(2*(1-Kr))
+]|
+
+and the other way around:
+
+|[
+ R' = Y' + Cr*2*(1-Kr)
+ G' = Y' - Cb*2*(1-Kb)*Kb/(1-Kr-Kb) - Cr*2*(1-Kr)*Kr/(1-Kr-Kb)
+ B' = Y' + Cb*2*(1-Kb)
+]|
+
+ TRUE if @matrix was a YUV color format and @Kr and @Kb contain valid
+ values.
+
+
+
+
+ a #GstVideoColorMatrix
+
+
+
+ result red channel coefficient
+
+
+
+ result blue channel coefficient
+
+
+
+
+
+ Get information about the chromaticity coordinates of @primaries.
+
+ a #GstVideoColorPrimariesInfo for @primaries.
+
+
+
+
+ a #GstVideoColorPrimaries
+
+
+
+
+
+ Compute the offset and scale values for each component of @info. For each
+component, (c[i] - offset[i]) / scale[i] will scale the component c[i] to the
+range [0.0 .. 1.0].
+
+The reverse operation (c[i] * scale[i]) + offset[i] can be used to convert
+the component values in range [0.0 .. 1.0] back to their representation in
+@info and @range.
+
+
+
+
+
+ a #GstVideoColorRange
+
+
+
+ a #GstVideoFormatInfo
+
+
+
+ output offsets
+
+
+
+ output scale
+
+
+
+
+
+ Convert @val to its gamma decoded value. This is the inverse operation of
+@gst_video_color_transfer_encode().
+
+For a non-linear value L' in the range [0..1], conversion to the linear
+L is in general performed with a power function like:
+
+|[
+ L = L' ^ gamma
+]|
+
+Depending on @func, different formulas might be applied. Some formulas
+encode a linear segment in the lower range.
+
+ the gamme decoded value of @val
+
+
+
+
+ a #GstVideoTransferFunction
+
+
+
+ a value
+
+
+
+
+
+ Convert @val to its gamma encoded value.
+
+For a linear value L in the range [0..1], conversion to the non-linear
+(gamma encoded) L' is in general performed with a power function like:
+
+|[
+ L' = L ^ (1 / gamma)
+]|
+
+Depending on @func, different formulas might be applied. Some formulas
+encode a linear segment in the lower range.
+
+ the gamme encoded value of @val
+
+
+
+
+ a #GstVideoTransferFunction
+
+
+
+ a value
+
+
+
+
+
+ Converts a raw video buffer into the specified output caps.
+
+The output caps can be any raw video formats or any image formats (jpeg, png, ...).
+
+The width, height and pixel-aspect-ratio can also be specified in the output caps.
+
+ The converted #GstSample, or %NULL if an error happened (in which case @err
+will point to the #GError).
+
+
+
+
+ a #GstSample
+
+
+
+ the #GstCaps to convert to
+
+
+
+ the maximum amount of time allowed for the processing.
+
+
+
+
+
+ Converts a raw video buffer into the specified output caps.
+
+The output caps can be any raw video formats or any image formats (jpeg, png, ...).
+
+The width, height and pixel-aspect-ratio can also be specified in the output caps.
+
+@callback will be called after conversion, when an error occured or if conversion didn't
+finish after @timeout. @callback will always be called from the thread default
+%GMainContext, see g_main_context_get_thread_default(). If GLib before 2.22 is used,
+this will always be the global default main context.
+
+@destroy_notify will be called after the callback was called and @user_data is not needed
+anymore.
+
+
+
+
+
+ a #GstSample
+
+
+
+ the #GstCaps to convert to
+
+
+
+ the maximum amount of time allowed for the processing.
+
+
+
+ %GstVideoConvertSampleCallback that will be called after conversion.
+
+
+
+ extra data that will be passed to the @callback
+
+
+
+ %GDestroyNotify to be called after @user_data is not needed anymore
+
+
+
+
+
+ Create a new converter object to convert between @in_info and @out_info
+with @config.
+
+ a #GstVideoConverter or %NULL if conversion is not possible.
+
+
+
+
+ a #GstVideoInfo
+
+
+
+ a #GstVideoInfo
+
+
+
+ a #GstStructure with configuration options
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Make a new dither object for dithering lines of @format using the
+algorithm described by @method.
+
+Each component will be quantized to a multiple of @quantizer. Better
+performance is achived when @quantizer is a power of 2.
+
+@width is the width of the lines that this ditherer will handle.
+
+ a new #GstVideoDither
+
+
+
+
+ a #GstVideoDitherMethod
+
+
+
+ a #GstVideoDitherFlags
+
+
+
+ a #GstVideoFormat
+
+
+
+ quantizer
+
+
+
+ the width of the lines
+
+
+
+
+
+ Checks if an event is a force key unit event. Returns true for both upstream
+and downstream force key unit events.
+
+ %TRUE if the event is a valid force key unit event
+
+
+
+
+ A #GstEvent to check
+
+
+
+
+
+ Creates a new downstream force key unit event. A downstream force key unit
+event can be sent down the pipeline to request downstream elements to produce
+a key unit. A downstream force key unit event must also be sent when handling
+an upstream force key unit event to notify downstream that the latter has been
+handled.
+
+To parse an event created by gst_video_event_new_downstream_force_key_unit() use
+gst_video_event_parse_downstream_force_key_unit().
+
+ The new GstEvent
+
+
+
+
+ the timestamp of the buffer that starts a new key unit
+
+
+
+ the stream_time of the buffer that starts a new key unit
+
+
+
+ the running_time of the buffer that starts a new key unit
+
+
+
+ %TRUE to produce headers when starting a new key unit
+
+
+
+ integer that can be used to number key units
+
+
+
+
+
+ Creates a new Still Frame event. If @in_still is %TRUE, then the event
+represents the start of a still frame sequence. If it is %FALSE, then
+the event ends a still frame sequence.
+
+To parse an event created by gst_video_event_new_still_frame() use
+gst_video_event_parse_still_frame().
+
+ The new GstEvent
+
+
+
+
+ boolean value for the still-frame state of the event.
+
+
+
+
+
+ Creates a new upstream force key unit event. An upstream force key unit event
+can be sent to request upstream elements to produce a key unit.
+
+@running_time can be set to request a new key unit at a specific
+running_time. If set to GST_CLOCK_TIME_NONE, upstream elements will produce a
+new key unit as soon as possible.
+
+To parse an event created by gst_video_event_new_downstream_force_key_unit() use
+gst_video_event_parse_downstream_force_key_unit().
+
+ The new GstEvent
+
+
+
+
+ the running_time at which a new key unit should be produced
+
+
+
+ %TRUE to produce headers when starting a new key unit
+
+
+
+ integer that can be used to number key units
+
+
+
+
+
+ Get timestamp, stream-time, running-time, all-headers and count in the force
+key unit event. See gst_video_event_new_downstream_force_key_unit() for a
+full description of the downstream force key unit event.
+
+@running_time will be adjusted for any pad offsets of pads it was passing through.
+
+ %TRUE if the event is a valid downstream force key unit event.
+
+
+
+
+ A #GstEvent to parse
+
+
+
+ A pointer to the timestamp in the event
+
+
+
+ A pointer to the stream-time in the event
+
+
+
+ A pointer to the running-time in the event
+
+
+
+ A pointer to the all_headers flag in the event
+
+
+
+ A pointer to the count field of the event
+
+
+
+
+
+ Parse a #GstEvent, identify if it is a Still Frame event, and
+return the still-frame state from the event if it is.
+If the event represents the start of a still frame, the in_still
+variable will be set to TRUE, otherwise FALSE. It is OK to pass NULL for the
+in_still variable order to just check whether the event is a valid still-frame
+event.
+
+Create a still frame event using gst_video_event_new_still_frame()
+
+ %TRUE if the event is a valid still-frame event. %FALSE if not
+
+
+
+
+ A #GstEvent to parse
+
+
+
+ A boolean to receive the still-frame status from the event, or NULL
+
+
+
+
+
+ Get running-time, all-headers and count in the force key unit event. See
+gst_video_event_new_upstream_force_key_unit() for a full description of the
+upstream force key unit event.
+
+Create an upstream force key unit event using gst_video_event_new_upstream_force_key_unit()
+
+@running_time will be adjusted for any pad offsets of pads it was passing through.
+
+ %TRUE if the event is a valid upstream force-key-unit event. %FALSE if not
+
+
+
+
+ A #GstEvent to parse
+
+
+
+ A pointer to the running_time in the event
+
+
+
+ A pointer to the all_headers flag in the event
+
+
+
+ A pointer to the count field in the event
+
+
+
+
+
+ Convert @order to a #GstVideoFieldOrder
+
+ the #GstVideoFieldOrder of @order or
+ #GST_VIDEO_FIELD_ORDER_UNKNOWN when @order is not a valid
+ string representation for a #GstVideoFieldOrder.
+
+
+
+
+ a field order
+
+
+
+
+
+ Convert @order to its string representation.
+
+ @order as a string or NULL if @order in invalid.
+
+
+
+
+ a #GstVideoFieldOrder
+
+
+
+
+
+ Converts a FOURCC value into the corresponding #GstVideoFormat.
+If the FOURCC cannot be represented by #GstVideoFormat,
+#GST_VIDEO_FORMAT_UNKNOWN is returned.
+
+ the #GstVideoFormat describing the FOURCC value
+
+
+
+
+ a FOURCC value representing raw YUV video
+
+
+
+
+
+ Find the #GstVideoFormat for the given parameters.
+
+ a #GstVideoFormat or GST_VIDEO_FORMAT_UNKNOWN when the parameters to
+not specify a known format.
+
+
+
+
+ the amount of bits used for a pixel
+
+
+
+ the amount of bits used to store a pixel. This value is bigger than
+ @depth
+
+
+
+ the endianness of the masks, #G_LITTLE_ENDIAN or #G_BIG_ENDIAN
+
+
+
+ the red mask
+
+
+
+ the green mask
+
+
+
+ the blue mask
+
+
+
+ the alpha mask, or 0 if no alpha mask
+
+
+
+
+
+ Convert the @format string to its #GstVideoFormat.
+
+ the #GstVideoFormat for @format or GST_VIDEO_FORMAT_UNKNOWN when the
+string is not a known format.
+
+
+
+
+ a format string
+
+
+
+
+
+ Get the #GstVideoFormatInfo for @format
+
+ The #GstVideoFormatInfo for @format.
+
+
+
+
+ a #GstVideoFormat
+
+
+
+
+
+ Get the default palette of @format. This the palette used in the pack
+function for paletted formats.
+
+ the default palette of @format or %NULL when
+@format does not have a palette.
+
+
+
+
+ a #GstVideoFormat
+
+
+
+ size of the palette in bytes
+
+
+
+
+
+ Converts a #GstVideoFormat value into the corresponding FOURCC. Only
+a few YUV formats have corresponding FOURCC values. If @format has
+no corresponding FOURCC value, 0 is returned.
+
+ the FOURCC corresponding to @format
+
+
+
+
+ a #GstVideoFormat video format
+
+
+
+
+
+ Returns a string containing a descriptive name for
+the #GstVideoFormat if there is one, or NULL otherwise.
+
+ the name corresponding to @format
+
+
+
+
+ a #GstVideoFormat video format
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Given the nominal duration of one video frame,
+this function will check some standard framerates for
+a close match (within 0.1%) and return one if possible,
+
+It will calculate an arbitrary framerate if no close
+match was found, and return %FALSE.
+
+It returns %FALSE if a duration of 0 is passed.
+
+ %TRUE if a close "standard" framerate was
+recognised, and %FALSE otherwise.
+
+
+
+
+ Nominal duration of one frame
+
+
+
+ Numerator of the calculated framerate
+
+
+
+ Denominator of the calculated framerate
+
+
+
+
+
+ Convert @mode to a #GstVideoInterlaceMode
+
+ the #GstVideoInterlaceMode of @mode or
+ #GST_VIDEO_INTERLACE_MODE_PROGRESSIVE when @mode is not a valid
+ string representation for a #GstVideoInterlaceMode.
+
+
+
+
+ a mode
+
+
+
+
+
+ Convert @mode to its string representation.
+
+ @mode as a string or NULL if @mode in invalid.
+
+
+
+
+ a #GstVideoInterlaceMode
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Get the #GQuark for the "gst-video-scale" metadata transform operation.
+
+ a #GQuark
+
+
+
+
+
+ A const #GValue containing a list of stereo video modes
+
+Utility function that returns a #GValue with a GstList of packed stereo
+video modes with double the height of a single view for use in
+caps negotiations. Currently this is top-bottom and row-interleaved.
+
+
+
+
+
+ A const #GValue containing a list of stereo video modes
+
+Utility function that returns a #GValue with a GstList of packed
+stereo video modes that have double the width/height of a single
+view for use in caps negotiation. Currently this is just
+'checkerboard' layout.
+
+
+
+
+
+ A const #GValue containing a list of stereo video modes
+
+Utility function that returns a #GValue with a GstList of packed stereo
+video modes with double the width of a single view for use in
+caps negotiations. Currently this is side-by-side, side-by-side-quincunx
+and column-interleaved.
+
+
+
+
+
+ A const #GValue containing a list of mono video modes
+
+Utility function that returns a #GValue with a GstList of mono video
+modes (mono/left/right) for use in caps negotiations.
+
+
+
+
+
+ A const #GValue containing a list of 'unpacked' stereo video modes
+
+Utility function that returns a #GValue with a GstList of unpacked
+stereo video modes (separated/frame-by-frame/frame-by-frame-multiview)
+for use in caps negotiations.
+
+
+
+
+
+ A boolean indicating whether the
+ #GST_VIDEO_MULTIVIEW_FLAG_HALF_ASPECT flag should be set.
+
+Utility function that heuristically guess whether a
+frame-packed stereoscopic video contains half width/height
+encoded views, or full-frame views by looking at the
+overall display aspect ratio.
+
+
+
+
+ A #GstVideoMultiviewMode
+
+
+
+ Video frame width in pixels
+
+
+
+ Video frame height in pixels
+
+
+
+ Numerator of the video pixel-aspect-ratio
+
+
+
+ Denominator of the video pixel-aspect-ratio
+
+
+
+
+
+
+ The #GstVideoMultiviewMode value
+
+Given a string from a caps multiview-mode field,
+output the corresponding #GstVideoMultiviewMode
+or #GST_VIDEO_MULTIVIEW_MODE_NONE
+
+
+
+
+ multiview-mode field string from caps
+
+
+
+
+
+
+ The caps string representation of the mode, or NULL if invalid.
+
+Given a #GstVideoMultiviewMode returns the multiview-mode caps string
+for insertion into a caps structure
+
+
+
+
+ A #GstVideoMultiviewMode value
+
+
+
+
+
+ Utility function that transforms the width/height/PAR
+and multiview mode and flags of a #GstVideoInfo into
+the requested mode.
+
+
+
+
+
+ A #GstVideoInfo structure to operate on
+
+
+
+ A #GstVideoMultiviewMode value
+
+
+
+ A set of #GstVideoMultiviewFlags
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Make a new @method video scaler. @in_size source lines/pixels will
+be scaled to @out_size destination lines/pixels.
+
+@n_taps specifies the amount of pixels to use from the source for one output
+pixel. If n_taps is 0, this function chooses a good value automatically based
+on the @method and @in_size/@out_size.
+
+ a #GstVideoResample
+
+
+
+
+ a #GstVideoResamplerMethod
+
+
+
+ #GstVideoScalerFlags
+
+
+
+ number of taps to use
+
+
+
+ number of source elements
+
+
+
+ number of destination elements
+
+
+
+ extra options
+
+
+
+
+
+ Get the tile index of the tile at coordinates @x and @y in the tiled
+image of @x_tiles by @y_tiles.
+
+Use this method when @mode is of type %GST_VIDEO_TILE_MODE_INDEXED.
+
+ the index of the tile at @x and @y in the tiled image of
+ @x_tiles by @y_tiles.
+
+
+
+
+ a #GstVideoTileMode
+
+
+
+ x coordinate
+
+
+
+ y coordinate
+
+
+
+ number of horizintal tiles
+
+
+
+ number of vertical tiles
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/gstreamer-audio-sys/Cargo.toml b/gstreamer-audio-sys/Cargo.toml
new file mode 100644
index 000000000..98ef94ac9
--- /dev/null
+++ b/gstreamer-audio-sys/Cargo.toml
@@ -0,0 +1,52 @@
+[build-dependencies]
+pkg-config = "0.3.7"
+
+[dependencies]
+bitflags = "0.8"
+glib-sys = "0.3"
+gobject-sys = "0.3"
+libc = "0.2"
+
+[dependencies.gstreamer-sys]
+path = "../gstreamer-sys"
+version = "0.1.0"
+
+[dependencies.gstreamer-base-sys]
+path = "../gstreamer-base-sys"
+version = "0.1.0"
+
+[dependencies.gstreamer-tag-sys]
+path = "../gstreamer-tag-sys"
+version = "0.1.0"
+
+[features]
+v1_0_10 = []
+v1_10 = ["v1_8"]
+v1_12 = ["v1_10"]
+v1_2 = ["v1_0_10"]
+v1_2_3 = ["v1_2"]
+v1_4 = ["v1_2_3"]
+v1_6 = ["v1_4"]
+v1_8 = ["v1_6"]
+
+[lib]
+name = "gstreamer_audio_sys"
+
+[package]
+build = "build.rs"
+links = "gstaudio-1.0"
+name = "gstreamer-audio-sys"
+version = "0.1.0"
+authors = ["Sebastian Dröge "]
+description = "FFI bindings to libgstaudio-1.0"
+homepage = "https://gstreamer.freedesktop.org"
+keywords = ["ffi", "gstreamer", "gnome", "multimedia"]
+repository = "https://github.com/sdroege/gstreamer-sys"
+license = "MIT"
+
+include = [
+ "src/*.rs",
+ "Cargo.toml",
+ "build.rs",
+ "LICENSE",
+]
diff --git a/gstreamer-audio-sys/LICENSE b/gstreamer-audio-sys/LICENSE
new file mode 100644
index 000000000..3d76f6e2f
--- /dev/null
+++ b/gstreamer-audio-sys/LICENSE
@@ -0,0 +1,23 @@
+The MIT License (MIT)
+
+Copyright (c) 2017 Sebastian Dröge .
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
diff --git a/gstreamer-audio-sys/build.rs b/gstreamer-audio-sys/build.rs
new file mode 100644
index 000000000..5a879ca27
--- /dev/null
+++ b/gstreamer-audio-sys/build.rs
@@ -0,0 +1,68 @@
+extern crate pkg_config;
+
+use pkg_config::{Config, Error};
+use std::env;
+use std::io::prelude::*;
+use std::io;
+use std::process;
+
+fn main() {
+ if let Err(s) = find() {
+ let _ = writeln!(io::stderr(), "{}", s);
+ process::exit(1);
+ }
+}
+
+fn find() -> Result<(), Error> {
+ let package_name = "gstreamer-audio-1.0";
+ let shared_libs = ["gstaudio-1.0"];
+ let version = if cfg!(feature = "v1_12") {
+ "1.12"
+ } else if cfg!(feature = "v1_10") {
+ "1.10"
+ } else if cfg!(feature = "v1_8") {
+ "1.8"
+ } else if cfg!(feature = "v1_6") {
+ "1.6"
+ } else {
+ "1.0"
+ };
+
+ if let Ok(lib_dir) = env::var("GTK_LIB_DIR") {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ println!("cargo:rustc-link-search=native={}", lib_dir);
+ return Ok(())
+ }
+
+ let target = env::var("TARGET").unwrap();
+ let hardcode_shared_libs = target.contains("windows");
+
+ let mut config = Config::new();
+ config.atleast_version(version);
+ if hardcode_shared_libs {
+ config.cargo_metadata(false);
+ }
+ match config.probe(package_name) {
+ Ok(library) => {
+ if hardcode_shared_libs {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ for path in library.link_paths.iter() {
+ println!("cargo:rustc-link-search=native={}", path.to_str().unwrap());
+ }
+ }
+ Ok(())
+ }
+ Err(Error::EnvNoPkgConfig(_)) | Err(Error::Command { .. }) => {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ Ok(())
+ }
+ Err(err) => Err(err),
+ }
+}
+
diff --git a/gstreamer-audio-sys/src/lib.rs b/gstreamer-audio-sys/src/lib.rs
new file mode 100644
index 000000000..6fb1da49f
--- /dev/null
+++ b/gstreamer-audio-sys/src/lib.rs
@@ -0,0 +1,1111 @@
+// This file was generated by gir (10e1d4f) from gir-files (???)
+// DO NOT EDIT
+
+#![allow(non_camel_case_types, non_upper_case_globals)]
+
+extern crate libc;
+#[macro_use] extern crate bitflags;
+extern crate glib_sys as glib;
+extern crate gobject_sys as gobject;
+extern crate gstreamer_sys as gst;
+extern crate gstreamer_base_sys as gst_base;
+extern crate gstreamer_tag_sys as gst_tag;
+
+#[allow(unused_imports)]
+use libc::{c_int, c_char, c_uchar, c_float, c_uint, c_double,
+ c_short, c_ushort, c_long, c_ulong,
+ c_void, size_t, ssize_t, time_t, FILE};
+
+#[allow(unused_imports)]
+use glib::{gboolean, gconstpointer, gpointer, GType, Volatile};
+
+// Enums
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioBaseSinkDiscontReason {
+ NoDiscont = 0,
+ NewCaps = 1,
+ Flush = 2,
+ SyncLatency = 3,
+ Alignment = 4,
+ DeviceFailure = 5,
+}
+pub const GST_AUDIO_BASE_SINK_DISCONT_REASON_NO_DISCONT: GstAudioBaseSinkDiscontReason = GstAudioBaseSinkDiscontReason::NoDiscont;
+pub const GST_AUDIO_BASE_SINK_DISCONT_REASON_NEW_CAPS: GstAudioBaseSinkDiscontReason = GstAudioBaseSinkDiscontReason::NewCaps;
+pub const GST_AUDIO_BASE_SINK_DISCONT_REASON_FLUSH: GstAudioBaseSinkDiscontReason = GstAudioBaseSinkDiscontReason::Flush;
+pub const GST_AUDIO_BASE_SINK_DISCONT_REASON_SYNC_LATENCY: GstAudioBaseSinkDiscontReason = GstAudioBaseSinkDiscontReason::SyncLatency;
+pub const GST_AUDIO_BASE_SINK_DISCONT_REASON_ALIGNMENT: GstAudioBaseSinkDiscontReason = GstAudioBaseSinkDiscontReason::Alignment;
+pub const GST_AUDIO_BASE_SINK_DISCONT_REASON_DEVICE_FAILURE: GstAudioBaseSinkDiscontReason = GstAudioBaseSinkDiscontReason::DeviceFailure;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioBaseSinkSlaveMethod {
+ Resample = 0,
+ Skew = 1,
+ None = 2,
+ Custom = 3,
+}
+pub const GST_AUDIO_BASE_SINK_SLAVE_RESAMPLE: GstAudioBaseSinkSlaveMethod = GstAudioBaseSinkSlaveMethod::Resample;
+pub const GST_AUDIO_BASE_SINK_SLAVE_SKEW: GstAudioBaseSinkSlaveMethod = GstAudioBaseSinkSlaveMethod::Skew;
+pub const GST_AUDIO_BASE_SINK_SLAVE_NONE: GstAudioBaseSinkSlaveMethod = GstAudioBaseSinkSlaveMethod::None;
+pub const GST_AUDIO_BASE_SINK_SLAVE_CUSTOM: GstAudioBaseSinkSlaveMethod = GstAudioBaseSinkSlaveMethod::Custom;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioBaseSrcSlaveMethod {
+ Resample = 0,
+ ReTimestamp = 1,
+ Skew = 2,
+ None = 3,
+}
+pub const GST_AUDIO_BASE_SRC_SLAVE_RESAMPLE: GstAudioBaseSrcSlaveMethod = GstAudioBaseSrcSlaveMethod::Resample;
+pub const GST_AUDIO_BASE_SRC_SLAVE_RE_TIMESTAMP: GstAudioBaseSrcSlaveMethod = GstAudioBaseSrcSlaveMethod::ReTimestamp;
+pub const GST_AUDIO_BASE_SRC_SLAVE_SKEW: GstAudioBaseSrcSlaveMethod = GstAudioBaseSrcSlaveMethod::Skew;
+pub const GST_AUDIO_BASE_SRC_SLAVE_NONE: GstAudioBaseSrcSlaveMethod = GstAudioBaseSrcSlaveMethod::None;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioCdSrcMode {
+ Normal = 0,
+ Continuous = 1,
+}
+pub const GST_AUDIO_CD_SRC_MODE_NORMAL: GstAudioCdSrcMode = GstAudioCdSrcMode::Normal;
+pub const GST_AUDIO_CD_SRC_MODE_CONTINUOUS: GstAudioCdSrcMode = GstAudioCdSrcMode::Continuous;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioChannelPosition {
+ None = -3,
+ Mono = -2,
+ Invalid = -1,
+ FrontLeft = 0,
+ FrontRight = 1,
+ FrontCenter = 2,
+ Lfe1 = 3,
+ RearLeft = 4,
+ RearRight = 5,
+ FrontLeftOfCenter = 6,
+ FrontRightOfCenter = 7,
+ RearCenter = 8,
+ Lfe2 = 9,
+ SideLeft = 10,
+ SideRight = 11,
+ TopFrontLeft = 12,
+ TopFrontRight = 13,
+ TopFrontCenter = 14,
+ TopCenter = 15,
+ TopRearLeft = 16,
+ TopRearRight = 17,
+ TopSideLeft = 18,
+ TopSideRight = 19,
+ TopRearCenter = 20,
+ BottomFrontCenter = 21,
+ BottomFrontLeft = 22,
+ BottomFrontRight = 23,
+ WideLeft = 24,
+ WideRight = 25,
+ SurroundLeft = 26,
+ SurroundRight = 27,
+}
+pub const GST_AUDIO_CHANNEL_POSITION_NONE: GstAudioChannelPosition = GstAudioChannelPosition::None;
+pub const GST_AUDIO_CHANNEL_POSITION_MONO: GstAudioChannelPosition = GstAudioChannelPosition::Mono;
+pub const GST_AUDIO_CHANNEL_POSITION_INVALID: GstAudioChannelPosition = GstAudioChannelPosition::Invalid;
+pub const GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT: GstAudioChannelPosition = GstAudioChannelPosition::FrontLeft;
+pub const GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT: GstAudioChannelPosition = GstAudioChannelPosition::FrontRight;
+pub const GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER: GstAudioChannelPosition = GstAudioChannelPosition::FrontCenter;
+pub const GST_AUDIO_CHANNEL_POSITION_LFE1: GstAudioChannelPosition = GstAudioChannelPosition::Lfe1;
+pub const GST_AUDIO_CHANNEL_POSITION_REAR_LEFT: GstAudioChannelPosition = GstAudioChannelPosition::RearLeft;
+pub const GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT: GstAudioChannelPosition = GstAudioChannelPosition::RearRight;
+pub const GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER: GstAudioChannelPosition = GstAudioChannelPosition::FrontLeftOfCenter;
+pub const GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER: GstAudioChannelPosition = GstAudioChannelPosition::FrontRightOfCenter;
+pub const GST_AUDIO_CHANNEL_POSITION_REAR_CENTER: GstAudioChannelPosition = GstAudioChannelPosition::RearCenter;
+pub const GST_AUDIO_CHANNEL_POSITION_LFE2: GstAudioChannelPosition = GstAudioChannelPosition::Lfe2;
+pub const GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT: GstAudioChannelPosition = GstAudioChannelPosition::SideLeft;
+pub const GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT: GstAudioChannelPosition = GstAudioChannelPosition::SideRight;
+pub const GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT: GstAudioChannelPosition = GstAudioChannelPosition::TopFrontLeft;
+pub const GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT: GstAudioChannelPosition = GstAudioChannelPosition::TopFrontRight;
+pub const GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER: GstAudioChannelPosition = GstAudioChannelPosition::TopFrontCenter;
+pub const GST_AUDIO_CHANNEL_POSITION_TOP_CENTER: GstAudioChannelPosition = GstAudioChannelPosition::TopCenter;
+pub const GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT: GstAudioChannelPosition = GstAudioChannelPosition::TopRearLeft;
+pub const GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT: GstAudioChannelPosition = GstAudioChannelPosition::TopRearRight;
+pub const GST_AUDIO_CHANNEL_POSITION_TOP_SIDE_LEFT: GstAudioChannelPosition = GstAudioChannelPosition::TopSideLeft;
+pub const GST_AUDIO_CHANNEL_POSITION_TOP_SIDE_RIGHT: GstAudioChannelPosition = GstAudioChannelPosition::TopSideRight;
+pub const GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER: GstAudioChannelPosition = GstAudioChannelPosition::TopRearCenter;
+pub const GST_AUDIO_CHANNEL_POSITION_BOTTOM_FRONT_CENTER: GstAudioChannelPosition = GstAudioChannelPosition::BottomFrontCenter;
+pub const GST_AUDIO_CHANNEL_POSITION_BOTTOM_FRONT_LEFT: GstAudioChannelPosition = GstAudioChannelPosition::BottomFrontLeft;
+pub const GST_AUDIO_CHANNEL_POSITION_BOTTOM_FRONT_RIGHT: GstAudioChannelPosition = GstAudioChannelPosition::BottomFrontRight;
+pub const GST_AUDIO_CHANNEL_POSITION_WIDE_LEFT: GstAudioChannelPosition = GstAudioChannelPosition::WideLeft;
+pub const GST_AUDIO_CHANNEL_POSITION_WIDE_RIGHT: GstAudioChannelPosition = GstAudioChannelPosition::WideRight;
+pub const GST_AUDIO_CHANNEL_POSITION_SURROUND_LEFT: GstAudioChannelPosition = GstAudioChannelPosition::SurroundLeft;
+pub const GST_AUDIO_CHANNEL_POSITION_SURROUND_RIGHT: GstAudioChannelPosition = GstAudioChannelPosition::SurroundRight;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioDitherMethod {
+ None = 0,
+ Rpdf = 1,
+ Tpdf = 2,
+ TpdfHf = 3,
+}
+pub const GST_AUDIO_DITHER_NONE: GstAudioDitherMethod = GstAudioDitherMethod::None;
+pub const GST_AUDIO_DITHER_RPDF: GstAudioDitherMethod = GstAudioDitherMethod::Rpdf;
+pub const GST_AUDIO_DITHER_TPDF: GstAudioDitherMethod = GstAudioDitherMethod::Tpdf;
+pub const GST_AUDIO_DITHER_TPDF_HF: GstAudioDitherMethod = GstAudioDitherMethod::TpdfHf;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioFormat {
+ Unknown = 0,
+ Encoded = 1,
+ S8 = 2,
+ U8 = 3,
+ S16le = 4,
+ S16be = 5,
+ U16le = 6,
+ U16be = 7,
+ S2432le = 8,
+ S2432be = 9,
+ U2432le = 10,
+ U2432be = 11,
+ S32le = 12,
+ S32be = 13,
+ U32le = 14,
+ U32be = 15,
+ S24le = 16,
+ S24be = 17,
+ U24le = 18,
+ U24be = 19,
+ S20le = 20,
+ S20be = 21,
+ U20le = 22,
+ U20be = 23,
+ S18le = 24,
+ S18be = 25,
+ U18le = 26,
+ U18be = 27,
+ F32le = 28,
+ F32be = 29,
+ F64le = 30,
+ F64be = 31,
+}
+pub const GST_AUDIO_FORMAT_UNKNOWN: GstAudioFormat = GstAudioFormat::Unknown;
+pub const GST_AUDIO_FORMAT_ENCODED: GstAudioFormat = GstAudioFormat::Encoded;
+pub const GST_AUDIO_FORMAT_S8: GstAudioFormat = GstAudioFormat::S8;
+pub const GST_AUDIO_FORMAT_U8: GstAudioFormat = GstAudioFormat::U8;
+pub const GST_AUDIO_FORMAT_S16LE: GstAudioFormat = GstAudioFormat::S16le;
+pub const GST_AUDIO_FORMAT_S16BE: GstAudioFormat = GstAudioFormat::S16be;
+pub const GST_AUDIO_FORMAT_U16LE: GstAudioFormat = GstAudioFormat::U16le;
+pub const GST_AUDIO_FORMAT_U16BE: GstAudioFormat = GstAudioFormat::U16be;
+pub const GST_AUDIO_FORMAT_S24_32LE: GstAudioFormat = GstAudioFormat::S2432le;
+pub const GST_AUDIO_FORMAT_S24_32BE: GstAudioFormat = GstAudioFormat::S2432be;
+pub const GST_AUDIO_FORMAT_U24_32LE: GstAudioFormat = GstAudioFormat::U2432le;
+pub const GST_AUDIO_FORMAT_U24_32BE: GstAudioFormat = GstAudioFormat::U2432be;
+pub const GST_AUDIO_FORMAT_S32LE: GstAudioFormat = GstAudioFormat::S32le;
+pub const GST_AUDIO_FORMAT_S32BE: GstAudioFormat = GstAudioFormat::S32be;
+pub const GST_AUDIO_FORMAT_U32LE: GstAudioFormat = GstAudioFormat::U32le;
+pub const GST_AUDIO_FORMAT_U32BE: GstAudioFormat = GstAudioFormat::U32be;
+pub const GST_AUDIO_FORMAT_S24LE: GstAudioFormat = GstAudioFormat::S24le;
+pub const GST_AUDIO_FORMAT_S24BE: GstAudioFormat = GstAudioFormat::S24be;
+pub const GST_AUDIO_FORMAT_U24LE: GstAudioFormat = GstAudioFormat::U24le;
+pub const GST_AUDIO_FORMAT_U24BE: GstAudioFormat = GstAudioFormat::U24be;
+pub const GST_AUDIO_FORMAT_S20LE: GstAudioFormat = GstAudioFormat::S20le;
+pub const GST_AUDIO_FORMAT_S20BE: GstAudioFormat = GstAudioFormat::S20be;
+pub const GST_AUDIO_FORMAT_U20LE: GstAudioFormat = GstAudioFormat::U20le;
+pub const GST_AUDIO_FORMAT_U20BE: GstAudioFormat = GstAudioFormat::U20be;
+pub const GST_AUDIO_FORMAT_S18LE: GstAudioFormat = GstAudioFormat::S18le;
+pub const GST_AUDIO_FORMAT_S18BE: GstAudioFormat = GstAudioFormat::S18be;
+pub const GST_AUDIO_FORMAT_U18LE: GstAudioFormat = GstAudioFormat::U18le;
+pub const GST_AUDIO_FORMAT_U18BE: GstAudioFormat = GstAudioFormat::U18be;
+pub const GST_AUDIO_FORMAT_F32LE: GstAudioFormat = GstAudioFormat::F32le;
+pub const GST_AUDIO_FORMAT_F32BE: GstAudioFormat = GstAudioFormat::F32be;
+pub const GST_AUDIO_FORMAT_F64LE: GstAudioFormat = GstAudioFormat::F64le;
+pub const GST_AUDIO_FORMAT_F64BE: GstAudioFormat = GstAudioFormat::F64be;
+pub const GST_AUDIO_FORMAT_S16: GstAudioFormat = GstAudioFormat::S16le;
+pub const GST_AUDIO_FORMAT_U16: GstAudioFormat = GstAudioFormat::U16le;
+pub const GST_AUDIO_FORMAT_S24_32: GstAudioFormat = GstAudioFormat::S2432le;
+pub const GST_AUDIO_FORMAT_U24_32: GstAudioFormat = GstAudioFormat::U2432le;
+pub const GST_AUDIO_FORMAT_S32: GstAudioFormat = GstAudioFormat::S32le;
+pub const GST_AUDIO_FORMAT_U32: GstAudioFormat = GstAudioFormat::U32le;
+pub const GST_AUDIO_FORMAT_S24: GstAudioFormat = GstAudioFormat::S24le;
+pub const GST_AUDIO_FORMAT_U24: GstAudioFormat = GstAudioFormat::U24le;
+pub const GST_AUDIO_FORMAT_S20: GstAudioFormat = GstAudioFormat::S20le;
+pub const GST_AUDIO_FORMAT_U20: GstAudioFormat = GstAudioFormat::U20le;
+pub const GST_AUDIO_FORMAT_S18: GstAudioFormat = GstAudioFormat::S18le;
+pub const GST_AUDIO_FORMAT_U18: GstAudioFormat = GstAudioFormat::U18le;
+pub const GST_AUDIO_FORMAT_F32: GstAudioFormat = GstAudioFormat::F32le;
+pub const GST_AUDIO_FORMAT_F64: GstAudioFormat = GstAudioFormat::F64le;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioLayout {
+ Interleaved = 0,
+ NonInterleaved = 1,
+}
+pub const GST_AUDIO_LAYOUT_INTERLEAVED: GstAudioLayout = GstAudioLayout::Interleaved;
+pub const GST_AUDIO_LAYOUT_NON_INTERLEAVED: GstAudioLayout = GstAudioLayout::NonInterleaved;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioNoiseShapingMethod {
+ None = 0,
+ ErrorFeedback = 1,
+ Simple = 2,
+ Medium = 3,
+ High = 4,
+}
+pub const GST_AUDIO_NOISE_SHAPING_NONE: GstAudioNoiseShapingMethod = GstAudioNoiseShapingMethod::None;
+pub const GST_AUDIO_NOISE_SHAPING_ERROR_FEEDBACK: GstAudioNoiseShapingMethod = GstAudioNoiseShapingMethod::ErrorFeedback;
+pub const GST_AUDIO_NOISE_SHAPING_SIMPLE: GstAudioNoiseShapingMethod = GstAudioNoiseShapingMethod::Simple;
+pub const GST_AUDIO_NOISE_SHAPING_MEDIUM: GstAudioNoiseShapingMethod = GstAudioNoiseShapingMethod::Medium;
+pub const GST_AUDIO_NOISE_SHAPING_HIGH: GstAudioNoiseShapingMethod = GstAudioNoiseShapingMethod::High;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioResamplerFilterInterpolation {
+ None = 0,
+ Linear = 1,
+ Cubic = 2,
+}
+pub const GST_AUDIO_RESAMPLER_FILTER_INTERPOLATION_NONE: GstAudioResamplerFilterInterpolation = GstAudioResamplerFilterInterpolation::None;
+pub const GST_AUDIO_RESAMPLER_FILTER_INTERPOLATION_LINEAR: GstAudioResamplerFilterInterpolation = GstAudioResamplerFilterInterpolation::Linear;
+pub const GST_AUDIO_RESAMPLER_FILTER_INTERPOLATION_CUBIC: GstAudioResamplerFilterInterpolation = GstAudioResamplerFilterInterpolation::Cubic;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioResamplerFilterMode {
+ Interpolated = 0,
+ Full = 1,
+ Auto = 2,
+}
+pub const GST_AUDIO_RESAMPLER_FILTER_MODE_INTERPOLATED: GstAudioResamplerFilterMode = GstAudioResamplerFilterMode::Interpolated;
+pub const GST_AUDIO_RESAMPLER_FILTER_MODE_FULL: GstAudioResamplerFilterMode = GstAudioResamplerFilterMode::Full;
+pub const GST_AUDIO_RESAMPLER_FILTER_MODE_AUTO: GstAudioResamplerFilterMode = GstAudioResamplerFilterMode::Auto;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioResamplerMethod {
+ Nearest = 0,
+ Linear = 1,
+ Cubic = 2,
+ BlackmanNuttall = 3,
+ Kaiser = 4,
+}
+pub const GST_AUDIO_RESAMPLER_METHOD_NEAREST: GstAudioResamplerMethod = GstAudioResamplerMethod::Nearest;
+pub const GST_AUDIO_RESAMPLER_METHOD_LINEAR: GstAudioResamplerMethod = GstAudioResamplerMethod::Linear;
+pub const GST_AUDIO_RESAMPLER_METHOD_CUBIC: GstAudioResamplerMethod = GstAudioResamplerMethod::Cubic;
+pub const GST_AUDIO_RESAMPLER_METHOD_BLACKMAN_NUTTALL: GstAudioResamplerMethod = GstAudioResamplerMethod::BlackmanNuttall;
+pub const GST_AUDIO_RESAMPLER_METHOD_KAISER: GstAudioResamplerMethod = GstAudioResamplerMethod::Kaiser;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioRingBufferFormatType {
+ Raw = 0,
+ MuLaw = 1,
+ ALaw = 2,
+ ImaAdpcm = 3,
+ Mpeg = 4,
+ Gsm = 5,
+ Iec958 = 6,
+ Ac3 = 7,
+ Eac3 = 8,
+ Dts = 9,
+ Mpeg2Aac = 10,
+ Mpeg4Aac = 11,
+ Mpeg2AacRaw = 12,
+ Mpeg4AacRaw = 13,
+ Flac = 14,
+}
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_RAW: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Raw;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MU_LAW: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::MuLaw;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_A_LAW: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::ALaw;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_IMA_ADPCM: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::ImaAdpcm;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Mpeg;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_GSM: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Gsm;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_IEC958: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Iec958;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_AC3: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Ac3;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_EAC3: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Eac3;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_DTS: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Dts;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG2_AAC: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Mpeg2Aac;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG4_AAC: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Mpeg4Aac;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG2_AAC_RAW: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Mpeg2AacRaw;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG4_AAC_RAW: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Mpeg4AacRaw;
+pub const GST_AUDIO_RING_BUFFER_FORMAT_TYPE_FLAC: GstAudioRingBufferFormatType = GstAudioRingBufferFormatType::Flac;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioRingBufferState {
+ Stopped = 0,
+ Paused = 1,
+ Started = 2,
+ Error = 3,
+}
+pub const GST_AUDIO_RING_BUFFER_STATE_STOPPED: GstAudioRingBufferState = GstAudioRingBufferState::Stopped;
+pub const GST_AUDIO_RING_BUFFER_STATE_PAUSED: GstAudioRingBufferState = GstAudioRingBufferState::Paused;
+pub const GST_AUDIO_RING_BUFFER_STATE_STARTED: GstAudioRingBufferState = GstAudioRingBufferState::Started;
+pub const GST_AUDIO_RING_BUFFER_STATE_ERROR: GstAudioRingBufferState = GstAudioRingBufferState::Error;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstStreamVolumeFormat {
+ Linear = 0,
+ Cubic = 1,
+ Db = 2,
+}
+pub const GST_STREAM_VOLUME_FORMAT_LINEAR: GstStreamVolumeFormat = GstStreamVolumeFormat::Linear;
+pub const GST_STREAM_VOLUME_FORMAT_CUBIC: GstStreamVolumeFormat = GstStreamVolumeFormat::Cubic;
+pub const GST_STREAM_VOLUME_FORMAT_DB: GstStreamVolumeFormat = GstStreamVolumeFormat::Db;
+
+// Constants
+pub const GST_AUDIO_CHANNELS_RANGE: *const c_char = b"(int) [ 1, max ]\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_CONVERTER_OPT_DITHER_METHOD: *const c_char = b"GstAudioConverter.dither-method\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_CONVERTER_OPT_NOISE_SHAPING_METHOD: *const c_char = b"GstAudioConverter.noise-shaping-method\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_CONVERTER_OPT_QUANTIZATION: *const c_char = b"GstAudioConverter.quantization\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_CONVERTER_OPT_RESAMPLER_METHOD: *const c_char = b"GstAudioConverter.resampler-method\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_DECODER_MAX_ERRORS: c_int = 10;
+pub const GST_AUDIO_DECODER_SINK_NAME: *const c_char = b"sink\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_DECODER_SRC_NAME: *const c_char = b"src\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_DEF_CHANNELS: c_int = 2;
+pub const GST_AUDIO_DEF_FORMAT: *const c_char = b"S16LE\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_DEF_RATE: c_int = 44100;
+pub const GST_AUDIO_ENCODER_SINK_NAME: *const c_char = b"sink\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_ENCODER_SRC_NAME: *const c_char = b"src\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_FORMATS_ALL: *const c_char = b" { S8, U8, S16LE, S16BE, U16LE, U16BE, S24_32LE, S24_32BE, U24_32LE, U24_32BE, S32LE, S32BE, U32LE, U32BE, S24LE, S24BE, U24LE, U24BE, S20LE, S20BE, U20LE, U20BE, S18LE, S18BE, U18LE, U18BE, F32LE, F32BE, F64LE, F64BE }\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RATE_RANGE: *const c_char = b"(int) [ 1, max ]\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_OPT_CUBIC_B: *const c_char = b"GstAudioResampler.cubic-b\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_OPT_CUBIC_C: *const c_char = b"GstAudioResampler.cubic-c\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_OPT_CUTOFF: *const c_char = b"GstAudioResampler.cutoff\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_OPT_FILTER_INTERPOLATION: *const c_char = b"GstAudioResampler.filter-interpolation\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_OPT_FILTER_MODE: *const c_char = b"GstAudioResampler.filter-mode\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_OPT_FILTER_MODE_THRESHOLD: *const c_char = b"GstAudioResampler.filter-mode-threshold\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_OPT_FILTER_OVERSAMPLE: *const c_char = b"GstAudioResampler.filter-oversample\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_OPT_MAX_PHASE_ERROR: *const c_char = b"GstAudioResampler.max-phase-error\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_OPT_N_TAPS: *const c_char = b"GstAudioResampler.n-taps\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_OPT_STOP_ATTENUATION: *const c_char = b"GstAudioResampler.stop-attenutation\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_OPT_TRANSITION_BANDWIDTH: *const c_char = b"GstAudioResampler.transition-bandwidth\0" as *const u8 as *const c_char;
+pub const GST_AUDIO_RESAMPLER_QUALITY_DEFAULT: c_int = 4;
+pub const GST_AUDIO_RESAMPLER_QUALITY_MAX: c_int = 10;
+pub const GST_AUDIO_RESAMPLER_QUALITY_MIN: c_int = 0;
+pub const GST_META_TAG_AUDIO_CHANNELS_STR: *const c_char = b"channels\0" as *const u8 as *const c_char;
+pub const GST_META_TAG_AUDIO_RATE_STR: *const c_char = b"rate\0" as *const u8 as *const c_char;
+pub const GST_META_TAG_AUDIO_STR: *const c_char = b"audio\0" as *const u8 as *const c_char;
+
+// Flags
+bitflags! {
+ #[repr(C)]
+ pub flags GstAudioChannelMixerFlags: c_uint {
+ const GST_AUDIO_CHANNEL_MIXER_FLAGS_NONE = 0,
+ const GST_AUDIO_CHANNEL_MIXER_FLAGS_NON_INTERLEAVED_IN = 1,
+ const GST_AUDIO_CHANNEL_MIXER_FLAGS_NON_INTERLEAVED_OUT = 2,
+ const GST_AUDIO_CHANNEL_MIXER_FLAGS_UNPOSITIONED_IN = 4,
+ const GST_AUDIO_CHANNEL_MIXER_FLAGS_UNPOSITIONED_OUT = 8,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstAudioConverterFlags: c_uint {
+ const GST_AUDIO_CONVERTER_FLAG_NONE = 0,
+ const GST_AUDIO_CONVERTER_FLAG_IN_WRITABLE = 1,
+ const GST_AUDIO_CONVERTER_FLAG_VARIABLE_RATE = 2,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstAudioFlags: c_uint {
+ const GST_AUDIO_FLAG_NONE = 0,
+ const GST_AUDIO_FLAG_UNPOSITIONED = 1,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstAudioFormatFlags: c_uint {
+ const GST_AUDIO_FORMAT_FLAG_INTEGER = 1,
+ const GST_AUDIO_FORMAT_FLAG_FLOAT = 2,
+ const GST_AUDIO_FORMAT_FLAG_SIGNED = 4,
+ const GST_AUDIO_FORMAT_FLAG_COMPLEX = 16,
+ const GST_AUDIO_FORMAT_FLAG_UNPACK = 32,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstAudioPackFlags: c_uint {
+ const GST_AUDIO_PACK_FLAG_NONE = 0,
+ const GST_AUDIO_PACK_FLAG_TRUNCATE_RANGE = 1,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstAudioQuantizeFlags: c_uint {
+ const GST_AUDIO_QUANTIZE_FLAG_NONE = 0,
+ const GST_AUDIO_QUANTIZE_FLAG_NON_INTERLEAVED = 1,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstAudioResamplerFlags: c_uint {
+ const GST_AUDIO_RESAMPLER_FLAG_NONE = 0,
+ const GST_AUDIO_RESAMPLER_FLAG_NON_INTERLEAVED_IN = 1,
+ const GST_AUDIO_RESAMPLER_FLAG_NON_INTERLEAVED_OUT = 2,
+ const GST_AUDIO_RESAMPLER_FLAG_VARIABLE_RATE = 4,
+ }
+}
+
+// Callbacks
+pub type GstAudioBaseSinkCustomSlavingCallback = Option;
+pub type GstAudioClockGetTimeFunc = Option gst::GstClockTime>;
+pub type GstAudioFormatPack = Option;
+pub type GstAudioFormatUnpack = Option;
+pub type GstAudioRingBufferCallback = Option;
+
+// Records
+#[repr(C)]
+pub struct GstAudioBaseSinkClass {
+ pub parent_class: gst_base::GstBaseSinkClass,
+ pub create_ringbuffer: Option *mut GstAudioRingBuffer>,
+ pub payload: Option *mut gst::GstBuffer>,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioBaseSinkPrivate(c_void);
+
+#[repr(C)]
+pub struct GstAudioBaseSrcClass {
+ pub parent_class: gst_base::GstPushSrcClass,
+ pub create_ringbuffer: Option *mut GstAudioRingBuffer>,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioBaseSrcPrivate(c_void);
+
+#[repr(C)]
+pub struct GstAudioCdSrcClass {
+ pub pushsrc_class: gst_base::GstPushSrcClass,
+ pub open: Option gboolean>,
+ pub close: Option,
+ pub read_sector: Option *mut gst::GstBuffer>,
+ _gst_reserved: [gpointer; 20],
+}
+
+#[repr(C)]
+pub struct GstAudioCdSrcPrivate(c_void);
+
+#[repr(C)]
+pub struct GstAudioCdSrcTrack {
+ pub is_audio: gboolean,
+ pub num: c_uint,
+ pub start: c_uint,
+ pub end: c_uint,
+ pub tags: *mut gst::GstTagList,
+ _gst_reserved1: [c_uint; 2],
+ _gst_reserved2: [gpointer; 2],
+}
+
+#[repr(C)]
+pub struct GstAudioChannelMixer(c_void);
+
+#[repr(C)]
+pub struct GstAudioClippingMeta {
+ pub meta: gst::GstMeta,
+ pub format: gst::GstFormat,
+ pub start: u64,
+ pub end: u64,
+}
+
+#[repr(C)]
+pub struct GstAudioClockClass {
+ pub parent_class: gst::GstSystemClockClass,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioConverter(c_void);
+
+#[repr(C)]
+pub struct GstAudioDecoderClass {
+ pub element_class: gst::GstElementClass,
+ pub start: Option gboolean>,
+ pub stop: Option gboolean>,
+ pub set_format: Option gboolean>,
+ pub parse: Option gst::GstFlowReturn>,
+ pub handle_frame: Option gst::GstFlowReturn>,
+ pub flush: Option,
+ pub pre_push: Option gst::GstFlowReturn>,
+ pub sink_event: Option gboolean>,
+ pub src_event: Option gboolean>,
+ pub open: Option gboolean>,
+ pub close: Option gboolean>,
+ pub negotiate: Option gboolean>,
+ pub decide_allocation: Option gboolean>,
+ pub propose_allocation: Option gboolean>,
+ pub sink_query: Option gboolean>,
+ pub src_query: Option gboolean>,
+ pub getcaps: Option *mut gst::GstCaps>,
+ pub transform_meta: Option gboolean>,
+ _gst_reserved: [gpointer; 16],
+}
+
+#[repr(C)]
+pub struct GstAudioDecoderPrivate(c_void);
+
+#[repr(C)]
+pub struct GstAudioDownmixMeta {
+ pub meta: gst::GstMeta,
+ pub from_position: *mut GstAudioChannelPosition,
+ pub to_position: *mut GstAudioChannelPosition,
+ pub from_channels: c_int,
+ pub to_channels: c_int,
+ pub matrix: *mut *mut c_float,
+}
+
+#[repr(C)]
+pub struct GstAudioEncoderClass {
+ pub element_class: gst::GstElementClass,
+ pub start: Option gboolean>,
+ pub stop: Option gboolean>,
+ pub set_format: Option gboolean>,
+ pub handle_frame: Option gst::GstFlowReturn>,
+ pub flush: Option,
+ pub pre_push: Option gst::GstFlowReturn>,
+ pub sink_event: Option gboolean>,
+ pub src_event: Option gboolean>,
+ pub getcaps: Option *mut gst::GstCaps>,
+ pub open: Option gboolean>,
+ pub close: Option gboolean>,
+ pub negotiate: Option gboolean>,
+ pub decide_allocation: Option gboolean>,
+ pub propose_allocation: Option gboolean>,
+ pub transform_meta: Option gboolean>,
+ pub sink_query: Option gboolean>,
+ pub src_query: Option gboolean>,
+ _gst_reserved: [gpointer; 17],
+}
+
+#[repr(C)]
+pub struct GstAudioEncoderPrivate(c_void);
+
+#[repr(C)]
+pub struct GstAudioFilterClass {
+ pub basetransformclass: gst_base::GstBaseTransformClass,
+ pub setup: Option gboolean>,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioFormatInfo {
+ pub format: GstAudioFormat,
+ pub name: *const c_char,
+ pub description: *const c_char,
+ pub flags: GstAudioFormatFlags,
+ pub endianness: c_int,
+ pub width: c_int,
+ pub depth: c_int,
+ pub silence: [u8; 8],
+ pub unpack_format: GstAudioFormat,
+ pub unpack_func: GstAudioFormatUnpack,
+ pub pack_func: GstAudioFormatPack,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioInfo {
+ pub finfo: *const GstAudioFormatInfo,
+ pub flags: GstAudioFlags,
+ pub layout: GstAudioLayout,
+ pub rate: c_int,
+ pub channels: c_int,
+ pub bpf: c_int,
+ pub position: [GstAudioChannelPosition; 64],
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioQuantize(c_void);
+
+#[repr(C)]
+pub struct GstAudioResampler(c_void);
+
+#[repr(C)]
+pub struct GstAudioRingBufferClass {
+ pub parent_class: gst::GstObjectClass,
+ pub open_device: Option gboolean>,
+ pub acquire: Option gboolean>,
+ pub release: Option gboolean>,
+ pub close_device: Option gboolean>,
+ pub start: Option gboolean>,
+ pub pause: Option gboolean>,
+ pub resume: Option gboolean>,
+ pub stop: Option gboolean>,
+ pub delay: Option c_uint>,
+ pub activate: Option gboolean>,
+ pub commit: Option c_uint>,
+ pub clear_all: Option,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioRingBufferSpec {
+ pub caps: *mut gst::GstCaps,
+ pub type_: GstAudioRingBufferFormatType,
+ pub info: GstAudioInfo,
+ pub latency_time: u64,
+ pub buffer_time: u64,
+ pub segsize: c_int,
+ pub segtotal: c_int,
+ pub seglatency: c_int,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioSinkClass {
+ pub parent_class: GstAudioBaseSinkClass,
+ pub open: Option gboolean>,
+ pub prepare: Option gboolean>,
+ pub unprepare: Option gboolean>,
+ pub close: Option gboolean>,
+ pub write: Option c_int>,
+ pub delay: Option c_uint>,
+ pub reset: Option,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioSrcClass {
+ pub parent_class: GstAudioBaseSrcClass,
+ pub open: Option gboolean>,
+ pub prepare: Option gboolean>,
+ pub unprepare: Option gboolean>,
+ pub close: Option gboolean>,
+ pub read: Option c_uint>,
+ pub delay: Option c_uint>,
+ pub reset: Option,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstStreamVolumeInterface {
+ pub iface: gobject::GTypeInterface,
+}
+
+// Classes
+#[repr(C)]
+pub struct GstAudioBaseSink {
+ pub element: gst_base::GstBaseSink,
+ pub ringbuffer: *mut GstAudioRingBuffer,
+ pub buffer_time: u64,
+ pub latency_time: u64,
+ pub next_sample: u64,
+ pub provided_clock: *mut gst::GstClock,
+ pub eos_rendering: gboolean,
+ priv_: *mut GstAudioBaseSinkPrivate,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioBaseSrc {
+ pub element: gst_base::GstPushSrc,
+ pub ringbuffer: *mut GstAudioRingBuffer,
+ pub buffer_time: gst::GstClockTime,
+ pub latency_time: gst::GstClockTime,
+ pub next_sample: u64,
+ pub clock: *mut gst::GstClock,
+ priv_: *mut GstAudioBaseSrcPrivate,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioCdSrc {
+ pub pushsrc: gst_base::GstPushSrc,
+ pub tags: *mut gst::GstTagList,
+ priv_: *mut GstAudioCdSrcPrivate,
+ _gst_reserved1: [c_uint; 2],
+ _gst_reserved2: [gpointer; 2],
+}
+
+#[repr(C)]
+pub struct GstAudioClock {
+ pub clock: gst::GstSystemClock,
+ pub func: GstAudioClockGetTimeFunc,
+ pub user_data: gpointer,
+ pub destroy_notify: glib::GDestroyNotify,
+ last_time: gst::GstClockTime,
+ time_offset: gst::GstClockTimeDiff,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioDecoder {
+ pub element: gst::GstElement,
+ pub sinkpad: *mut gst::GstPad,
+ pub srcpad: *mut gst::GstPad,
+ pub stream_lock: glib::GRecMutex,
+ pub input_segment: gst::GstSegment,
+ pub output_segment: gst::GstSegment,
+ priv_: *mut GstAudioDecoderPrivate,
+ _gst_reserved: [gpointer; 20],
+}
+
+#[repr(C)]
+pub struct GstAudioEncoder {
+ pub element: gst::GstElement,
+ pub sinkpad: *mut gst::GstPad,
+ pub srcpad: *mut gst::GstPad,
+ pub stream_lock: glib::GRecMutex,
+ pub input_segment: gst::GstSegment,
+ pub output_segment: gst::GstSegment,
+ priv_: *mut GstAudioEncoderPrivate,
+ _gst_reserved: [gpointer; 20],
+}
+
+#[repr(C)]
+pub struct GstAudioFilter {
+ pub basetransform: gst_base::GstBaseTransform,
+ pub info: GstAudioInfo,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioRingBuffer {
+ pub object: gst::GstObject,
+ pub cond: glib::GCond,
+ pub open: gboolean,
+ pub acquired: gboolean,
+ pub memory: *mut u8,
+ pub size: size_t,
+ pub timestamps: *mut gst::GstClockTime,
+ pub spec: GstAudioRingBufferSpec,
+ pub samples_per_seg: c_int,
+ pub empty_seg: *mut u8,
+ pub state: c_int,
+ pub segdone: c_int,
+ pub segbase: c_int,
+ pub waiting: c_int,
+ callback: GstAudioRingBufferCallback,
+ cb_data: gpointer,
+ need_reorder: gboolean,
+ channel_reorder_map: [c_int; 64],
+ flushing: gboolean,
+ may_start: c_int,
+ active: gboolean,
+ cb_data_notify: glib::GDestroyNotify,
+ _gst_reserved: [gpointer; 3],
+}
+
+#[repr(C)]
+pub struct GstAudioSink {
+ pub element: GstAudioBaseSink,
+ thread: *mut glib::GThread,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstAudioSrc {
+ pub element: GstAudioBaseSrc,
+ thread: *mut glib::GThread,
+ _gst_reserved: [gpointer; 4],
+}
+
+// Interfaces
+#[repr(C)]
+pub struct GstStreamVolume(c_void);
+
+extern "C" {
+
+ //=========================================================================
+ // GstAudioFormat
+ //=========================================================================
+ pub fn gst_audio_format_build_integer(sign: gboolean, endianness: c_int, width: c_int, depth: c_int) -> GstAudioFormat;
+ pub fn gst_audio_format_fill_silence(info: *const GstAudioFormatInfo, dest: gpointer, length: size_t);
+ pub fn gst_audio_format_from_string(format: *const c_char) -> GstAudioFormat;
+ pub fn gst_audio_format_get_info(format: GstAudioFormat) -> *const GstAudioFormatInfo;
+ pub fn gst_audio_format_to_string(format: GstAudioFormat) -> *const c_char;
+
+ //=========================================================================
+ // GstAudioChannelMixer
+ //=========================================================================
+ pub fn gst_audio_channel_mixer_free(mix: *mut GstAudioChannelMixer);
+ pub fn gst_audio_channel_mixer_is_passthrough(mix: *mut GstAudioChannelMixer) -> gboolean;
+ pub fn gst_audio_channel_mixer_samples(mix: *mut GstAudioChannelMixer, in_: gpointer, out: gpointer, samples: c_int);
+ pub fn gst_audio_channel_mixer_new(flags: GstAudioChannelMixerFlags, format: GstAudioFormat, in_channels: c_int, in_position: *mut GstAudioChannelPosition, out_channels: c_int, out_position: *mut GstAudioChannelPosition) -> *mut GstAudioChannelMixer;
+
+ //=========================================================================
+ // GstAudioClippingMeta
+ //=========================================================================
+ pub fn gst_audio_clipping_meta_get_info() -> *const gst::GstMetaInfo;
+
+ //=========================================================================
+ // GstAudioConverter
+ //=========================================================================
+ pub fn gst_audio_converter_free(convert: *mut GstAudioConverter);
+ pub fn gst_audio_converter_get_config(convert: *mut GstAudioConverter, in_rate: *mut c_int, out_rate: *mut c_int) -> *const gst::GstStructure;
+ pub fn gst_audio_converter_get_in_frames(convert: *mut GstAudioConverter, out_frames: size_t) -> size_t;
+ pub fn gst_audio_converter_get_max_latency(convert: *mut GstAudioConverter) -> size_t;
+ pub fn gst_audio_converter_get_out_frames(convert: *mut GstAudioConverter, in_frames: size_t) -> size_t;
+ pub fn gst_audio_converter_reset(convert: *mut GstAudioConverter);
+ pub fn gst_audio_converter_samples(convert: *mut GstAudioConverter, flags: GstAudioConverterFlags, in_: gpointer, in_frames: size_t, out: gpointer, out_frames: size_t) -> gboolean;
+ pub fn gst_audio_converter_supports_inplace(convert: *mut GstAudioConverter) -> gboolean;
+ pub fn gst_audio_converter_update_config(convert: *mut GstAudioConverter, in_rate: c_int, out_rate: c_int, config: *mut gst::GstStructure) -> gboolean;
+ pub fn gst_audio_converter_new(flags: GstAudioConverterFlags, in_info: *mut GstAudioInfo, out_info: *mut GstAudioInfo, config: *mut gst::GstStructure) -> *mut GstAudioConverter;
+
+ //=========================================================================
+ // GstAudioDownmixMeta
+ //=========================================================================
+ pub fn gst_audio_downmix_meta_get_info() -> *const gst::GstMetaInfo;
+
+ //=========================================================================
+ // GstAudioFilterClass
+ //=========================================================================
+ pub fn gst_audio_filter_class_add_pad_templates(klass: *mut GstAudioFilterClass, allowed_caps: *mut gst::GstCaps);
+
+ //=========================================================================
+ // GstAudioInfo
+ //=========================================================================
+ pub fn gst_audio_info_get_type() -> GType;
+ pub fn gst_audio_info_new() -> *mut GstAudioInfo;
+ pub fn gst_audio_info_convert(info: *const GstAudioInfo, src_fmt: gst::GstFormat, src_val: i64, dest_fmt: gst::GstFormat, dest_val: *mut i64) -> gboolean;
+ pub fn gst_audio_info_copy(info: *const GstAudioInfo) -> *mut GstAudioInfo;
+ pub fn gst_audio_info_free(info: *mut GstAudioInfo);
+ pub fn gst_audio_info_from_caps(info: *mut GstAudioInfo, caps: *const gst::GstCaps) -> gboolean;
+ pub fn gst_audio_info_init(info: *mut GstAudioInfo);
+ #[cfg(feature = "v1_2")]
+ pub fn gst_audio_info_is_equal(info: *const GstAudioInfo, other: *const GstAudioInfo) -> gboolean;
+ pub fn gst_audio_info_set_format(info: *mut GstAudioInfo, format: GstAudioFormat, rate: c_int, channels: c_int, position: *const GstAudioChannelPosition);
+ pub fn gst_audio_info_to_caps(info: *const GstAudioInfo) -> *mut gst::GstCaps;
+
+ //=========================================================================
+ // GstAudioQuantize
+ //=========================================================================
+ pub fn gst_audio_quantize_free(quant: *mut GstAudioQuantize);
+ pub fn gst_audio_quantize_reset(quant: *mut GstAudioQuantize);
+ pub fn gst_audio_quantize_samples(quant: *mut GstAudioQuantize, in_: gpointer, out: gpointer, samples: c_uint);
+ pub fn gst_audio_quantize_new(dither: GstAudioDitherMethod, ns: GstAudioNoiseShapingMethod, flags: GstAudioQuantizeFlags, format: GstAudioFormat, channels: c_uint, quantizer: c_uint) -> *mut GstAudioQuantize;
+
+ //=========================================================================
+ // GstAudioResampler
+ //=========================================================================
+ #[cfg(feature = "v1_6")]
+ pub fn gst_audio_resampler_free(resampler: *mut GstAudioResampler);
+ pub fn gst_audio_resampler_get_in_frames(resampler: *mut GstAudioResampler, out_frames: size_t) -> size_t;
+ pub fn gst_audio_resampler_get_max_latency(resampler: *mut GstAudioResampler) -> size_t;
+ pub fn gst_audio_resampler_get_out_frames(resampler: *mut GstAudioResampler, in_frames: size_t) -> size_t;
+ pub fn gst_audio_resampler_resample(resampler: *mut GstAudioResampler, in_: gpointer, in_frames: size_t, out: gpointer, out_frames: size_t);
+ pub fn gst_audio_resampler_reset(resampler: *mut GstAudioResampler);
+ pub fn gst_audio_resampler_update(resampler: *mut GstAudioResampler, in_rate: c_int, out_rate: c_int, options: *mut gst::GstStructure) -> gboolean;
+ pub fn gst_audio_resampler_new(method: GstAudioResamplerMethod, flags: GstAudioResamplerFlags, format: GstAudioFormat, channels: c_int, in_rate: c_int, out_rate: c_int, options: *mut gst::GstStructure) -> *mut GstAudioResampler;
+ pub fn gst_audio_resampler_options_set_quality(method: GstAudioResamplerMethod, quality: c_uint, in_rate: c_int, out_rate: c_int, options: *mut gst::GstStructure);
+
+ //=========================================================================
+ // GstAudioBaseSink
+ //=========================================================================
+ pub fn gst_audio_base_sink_get_type() -> GType;
+ pub fn gst_audio_base_sink_create_ringbuffer(sink: *mut GstAudioBaseSink) -> *mut GstAudioRingBuffer;
+ pub fn gst_audio_base_sink_get_alignment_threshold(sink: *mut GstAudioBaseSink) -> gst::GstClockTime;
+ pub fn gst_audio_base_sink_get_discont_wait(sink: *mut GstAudioBaseSink) -> gst::GstClockTime;
+ pub fn gst_audio_base_sink_get_drift_tolerance(sink: *mut GstAudioBaseSink) -> i64;
+ pub fn gst_audio_base_sink_get_provide_clock(sink: *mut GstAudioBaseSink) -> gboolean;
+ pub fn gst_audio_base_sink_get_slave_method(sink: *mut GstAudioBaseSink) -> GstAudioBaseSinkSlaveMethod;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_audio_base_sink_report_device_failure(sink: *mut GstAudioBaseSink);
+ pub fn gst_audio_base_sink_set_alignment_threshold(sink: *mut GstAudioBaseSink, alignment_threshold: gst::GstClockTime);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_audio_base_sink_set_custom_slaving_callback(sink: *mut GstAudioBaseSink, callback: GstAudioBaseSinkCustomSlavingCallback, user_data: gpointer, notify: glib::GDestroyNotify);
+ pub fn gst_audio_base_sink_set_discont_wait(sink: *mut GstAudioBaseSink, discont_wait: gst::GstClockTime);
+ pub fn gst_audio_base_sink_set_drift_tolerance(sink: *mut GstAudioBaseSink, drift_tolerance: i64);
+ pub fn gst_audio_base_sink_set_provide_clock(sink: *mut GstAudioBaseSink, provide: gboolean);
+ pub fn gst_audio_base_sink_set_slave_method(sink: *mut GstAudioBaseSink, method: GstAudioBaseSinkSlaveMethod);
+
+ //=========================================================================
+ // GstAudioBaseSrc
+ //=========================================================================
+ pub fn gst_audio_base_src_get_type() -> GType;
+ pub fn gst_audio_base_src_create_ringbuffer(src: *mut GstAudioBaseSrc) -> *mut GstAudioRingBuffer;
+ pub fn gst_audio_base_src_get_provide_clock(src: *mut GstAudioBaseSrc) -> gboolean;
+ pub fn gst_audio_base_src_get_slave_method(src: *mut GstAudioBaseSrc) -> GstAudioBaseSrcSlaveMethod;
+ pub fn gst_audio_base_src_set_provide_clock(src: *mut GstAudioBaseSrc, provide: gboolean);
+ pub fn gst_audio_base_src_set_slave_method(src: *mut GstAudioBaseSrc, method: GstAudioBaseSrcSlaveMethod);
+
+ //=========================================================================
+ // GstAudioCdSrc
+ //=========================================================================
+ pub fn gst_audio_cd_src_get_type() -> GType;
+ pub fn gst_audio_cd_src_add_track(src: *mut GstAudioCdSrc, track: *mut GstAudioCdSrcTrack) -> gboolean;
+
+ //=========================================================================
+ // GstAudioClock
+ //=========================================================================
+ pub fn gst_audio_clock_get_type() -> GType;
+ pub fn gst_audio_clock_new(name: *const c_char, func: GstAudioClockGetTimeFunc, user_data: gpointer, destroy_notify: glib::GDestroyNotify) -> *mut gst::GstClock;
+ pub fn gst_audio_clock_adjust(clock: *mut GstAudioClock, time: gst::GstClockTime) -> gst::GstClockTime;
+ pub fn gst_audio_clock_get_time(clock: *mut GstAudioClock) -> gst::GstClockTime;
+ pub fn gst_audio_clock_invalidate(clock: *mut GstAudioClock);
+ pub fn gst_audio_clock_reset(clock: *mut GstAudioClock, time: gst::GstClockTime);
+
+ //=========================================================================
+ // GstAudioDecoder
+ //=========================================================================
+ pub fn gst_audio_decoder_get_type() -> GType;
+ pub fn gst_audio_decoder_allocate_output_buffer(dec: *mut GstAudioDecoder, size: size_t) -> *mut gst::GstBuffer;
+ pub fn gst_audio_decoder_finish_frame(dec: *mut GstAudioDecoder, buf: *mut gst::GstBuffer, frames: c_int) -> gst::GstFlowReturn;
+ pub fn gst_audio_decoder_get_allocator(dec: *mut GstAudioDecoder, allocator: *mut *mut gst::GstAllocator, params: *mut gst::GstAllocationParams);
+ pub fn gst_audio_decoder_get_audio_info(dec: *mut GstAudioDecoder) -> *mut GstAudioInfo;
+ pub fn gst_audio_decoder_get_delay(dec: *mut GstAudioDecoder) -> c_int;
+ pub fn gst_audio_decoder_get_drainable(dec: *mut GstAudioDecoder) -> gboolean;
+ pub fn gst_audio_decoder_get_estimate_rate(dec: *mut GstAudioDecoder) -> c_int;
+ pub fn gst_audio_decoder_get_latency(dec: *mut GstAudioDecoder, min: *mut gst::GstClockTime, max: *mut gst::GstClockTime);
+ pub fn gst_audio_decoder_get_max_errors(dec: *mut GstAudioDecoder) -> c_int;
+ pub fn gst_audio_decoder_get_min_latency(dec: *mut GstAudioDecoder) -> gst::GstClockTime;
+ pub fn gst_audio_decoder_get_needs_format(dec: *mut GstAudioDecoder) -> gboolean;
+ pub fn gst_audio_decoder_get_parse_state(dec: *mut GstAudioDecoder, sync: *mut gboolean, eos: *mut gboolean);
+ pub fn gst_audio_decoder_get_plc(dec: *mut GstAudioDecoder) -> gboolean;
+ pub fn gst_audio_decoder_get_plc_aware(dec: *mut GstAudioDecoder) -> c_int;
+ pub fn gst_audio_decoder_get_tolerance(dec: *mut GstAudioDecoder) -> gst::GstClockTime;
+ pub fn gst_audio_decoder_merge_tags(dec: *mut GstAudioDecoder, tags: *const gst::GstTagList, mode: gst::GstTagMergeMode);
+ pub fn gst_audio_decoder_negotiate(dec: *mut GstAudioDecoder) -> gboolean;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_audio_decoder_proxy_getcaps(decoder: *mut GstAudioDecoder, caps: *mut gst::GstCaps, filter: *mut gst::GstCaps) -> *mut gst::GstCaps;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_audio_decoder_set_allocation_caps(dec: *mut GstAudioDecoder, allocation_caps: *mut gst::GstCaps);
+ pub fn gst_audio_decoder_set_drainable(dec: *mut GstAudioDecoder, enabled: gboolean);
+ pub fn gst_audio_decoder_set_estimate_rate(dec: *mut GstAudioDecoder, enabled: gboolean);
+ pub fn gst_audio_decoder_set_latency(dec: *mut GstAudioDecoder, min: gst::GstClockTime, max: gst::GstClockTime);
+ pub fn gst_audio_decoder_set_max_errors(dec: *mut GstAudioDecoder, num: c_int);
+ pub fn gst_audio_decoder_set_min_latency(dec: *mut GstAudioDecoder, num: gst::GstClockTime);
+ pub fn gst_audio_decoder_set_needs_format(dec: *mut GstAudioDecoder, enabled: gboolean);
+ pub fn gst_audio_decoder_set_output_format(dec: *mut GstAudioDecoder, info: *const GstAudioInfo) -> gboolean;
+ pub fn gst_audio_decoder_set_plc(dec: *mut GstAudioDecoder, enabled: gboolean);
+ pub fn gst_audio_decoder_set_plc_aware(dec: *mut GstAudioDecoder, plc: gboolean);
+ pub fn gst_audio_decoder_set_tolerance(dec: *mut GstAudioDecoder, tolerance: gst::GstClockTime);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_audio_decoder_set_use_default_pad_acceptcaps(decoder: *mut GstAudioDecoder, use_: gboolean);
+
+ //=========================================================================
+ // GstAudioEncoder
+ //=========================================================================
+ pub fn gst_audio_encoder_get_type() -> GType;
+ pub fn gst_audio_encoder_allocate_output_buffer(enc: *mut GstAudioEncoder, size: size_t) -> *mut gst::GstBuffer;
+ pub fn gst_audio_encoder_finish_frame(enc: *mut GstAudioEncoder, buffer: *mut gst::GstBuffer, samples: c_int) -> gst::GstFlowReturn;
+ pub fn gst_audio_encoder_get_allocator(enc: *mut GstAudioEncoder, allocator: *mut *mut gst::GstAllocator, params: *mut gst::GstAllocationParams);
+ pub fn gst_audio_encoder_get_audio_info(enc: *mut GstAudioEncoder) -> *mut GstAudioInfo;
+ pub fn gst_audio_encoder_get_drainable(enc: *mut GstAudioEncoder) -> gboolean;
+ pub fn gst_audio_encoder_get_frame_max(enc: *mut GstAudioEncoder) -> c_int;
+ pub fn gst_audio_encoder_get_frame_samples_max(enc: *mut GstAudioEncoder) -> c_int;
+ pub fn gst_audio_encoder_get_frame_samples_min(enc: *mut GstAudioEncoder) -> c_int;
+ pub fn gst_audio_encoder_get_hard_min(enc: *mut GstAudioEncoder) -> gboolean;
+ pub fn gst_audio_encoder_get_hard_resync(enc: *mut GstAudioEncoder) -> gboolean;
+ pub fn gst_audio_encoder_get_latency(enc: *mut GstAudioEncoder, min: *mut gst::GstClockTime, max: *mut gst::GstClockTime);
+ pub fn gst_audio_encoder_get_lookahead(enc: *mut GstAudioEncoder) -> c_int;
+ pub fn gst_audio_encoder_get_mark_granule(enc: *mut GstAudioEncoder) -> gboolean;
+ pub fn gst_audio_encoder_get_perfect_timestamp(enc: *mut GstAudioEncoder) -> gboolean;
+ pub fn gst_audio_encoder_get_tolerance(enc: *mut GstAudioEncoder) -> gst::GstClockTime;
+ pub fn gst_audio_encoder_merge_tags(enc: *mut GstAudioEncoder, tags: *const gst::GstTagList, mode: gst::GstTagMergeMode);
+ pub fn gst_audio_encoder_negotiate(enc: *mut GstAudioEncoder) -> gboolean;
+ pub fn gst_audio_encoder_proxy_getcaps(enc: *mut GstAudioEncoder, caps: *mut gst::GstCaps, filter: *mut gst::GstCaps) -> *mut gst::GstCaps;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_audio_encoder_set_allocation_caps(enc: *mut GstAudioEncoder, allocation_caps: *mut gst::GstCaps);
+ pub fn gst_audio_encoder_set_drainable(enc: *mut GstAudioEncoder, enabled: gboolean);
+ pub fn gst_audio_encoder_set_frame_max(enc: *mut GstAudioEncoder, num: c_int);
+ pub fn gst_audio_encoder_set_frame_samples_max(enc: *mut GstAudioEncoder, num: c_int);
+ pub fn gst_audio_encoder_set_frame_samples_min(enc: *mut GstAudioEncoder, num: c_int);
+ pub fn gst_audio_encoder_set_hard_min(enc: *mut GstAudioEncoder, enabled: gboolean);
+ pub fn gst_audio_encoder_set_hard_resync(enc: *mut GstAudioEncoder, enabled: gboolean);
+ pub fn gst_audio_encoder_set_headers(enc: *mut GstAudioEncoder, headers: *mut glib::GList);
+ pub fn gst_audio_encoder_set_latency(enc: *mut GstAudioEncoder, min: gst::GstClockTime, max: gst::GstClockTime);
+ pub fn gst_audio_encoder_set_lookahead(enc: *mut GstAudioEncoder, num: c_int);
+ pub fn gst_audio_encoder_set_mark_granule(enc: *mut GstAudioEncoder, enabled: gboolean);
+ pub fn gst_audio_encoder_set_output_format(enc: *mut GstAudioEncoder, caps: *mut gst::GstCaps) -> gboolean;
+ pub fn gst_audio_encoder_set_perfect_timestamp(enc: *mut GstAudioEncoder, enabled: gboolean);
+ pub fn gst_audio_encoder_set_tolerance(enc: *mut GstAudioEncoder, tolerance: gst::GstClockTime);
+
+ //=========================================================================
+ // GstAudioFilter
+ //=========================================================================
+ pub fn gst_audio_filter_get_type() -> GType;
+
+ //=========================================================================
+ // GstAudioRingBuffer
+ //=========================================================================
+ pub fn gst_audio_ring_buffer_get_type() -> GType;
+ pub fn gst_audio_ring_buffer_debug_spec_buff(spec: *mut GstAudioRingBufferSpec);
+ pub fn gst_audio_ring_buffer_debug_spec_caps(spec: *mut GstAudioRingBufferSpec);
+ pub fn gst_audio_ring_buffer_parse_caps(spec: *mut GstAudioRingBufferSpec, caps: *mut gst::GstCaps) -> gboolean;
+ pub fn gst_audio_ring_buffer_acquire(buf: *mut GstAudioRingBuffer, spec: *mut GstAudioRingBufferSpec) -> gboolean;
+ pub fn gst_audio_ring_buffer_activate(buf: *mut GstAudioRingBuffer, active: gboolean) -> gboolean;
+ pub fn gst_audio_ring_buffer_advance(buf: *mut GstAudioRingBuffer, advance: c_uint);
+ pub fn gst_audio_ring_buffer_clear(buf: *mut GstAudioRingBuffer, segment: c_int);
+ pub fn gst_audio_ring_buffer_clear_all(buf: *mut GstAudioRingBuffer);
+ pub fn gst_audio_ring_buffer_close_device(buf: *mut GstAudioRingBuffer) -> gboolean;
+ pub fn gst_audio_ring_buffer_commit(buf: *mut GstAudioRingBuffer, sample: *mut u64, data: *mut u8, in_samples: c_int, out_samples: c_int, accum: *mut c_int) -> c_uint;
+ pub fn gst_audio_ring_buffer_convert(buf: *mut GstAudioRingBuffer, src_fmt: gst::GstFormat, src_val: i64, dest_fmt: gst::GstFormat, dest_val: *mut i64) -> gboolean;
+ pub fn gst_audio_ring_buffer_delay(buf: *mut GstAudioRingBuffer) -> c_uint;
+ pub fn gst_audio_ring_buffer_device_is_open(buf: *mut GstAudioRingBuffer) -> gboolean;
+ pub fn gst_audio_ring_buffer_is_acquired(buf: *mut GstAudioRingBuffer) -> gboolean;
+ pub fn gst_audio_ring_buffer_is_active(buf: *mut GstAudioRingBuffer) -> gboolean;
+ pub fn gst_audio_ring_buffer_is_flushing(buf: *mut GstAudioRingBuffer) -> gboolean;
+ pub fn gst_audio_ring_buffer_may_start(buf: *mut GstAudioRingBuffer, allowed: gboolean);
+ pub fn gst_audio_ring_buffer_open_device(buf: *mut GstAudioRingBuffer) -> gboolean;
+ pub fn gst_audio_ring_buffer_pause(buf: *mut GstAudioRingBuffer) -> gboolean;
+ pub fn gst_audio_ring_buffer_prepare_read(buf: *mut GstAudioRingBuffer, segment: *mut c_int, readptr: *mut *mut u8, len: *mut c_int) -> gboolean;
+ pub fn gst_audio_ring_buffer_read(buf: *mut GstAudioRingBuffer, sample: u64, data: *mut u8, len: c_uint, timestamp: *mut gst::GstClockTime) -> c_uint;
+ pub fn gst_audio_ring_buffer_release(buf: *mut GstAudioRingBuffer) -> gboolean;
+ pub fn gst_audio_ring_buffer_samples_done(buf: *mut GstAudioRingBuffer) -> u64;
+ pub fn gst_audio_ring_buffer_set_callback(buf: *mut GstAudioRingBuffer, cb: GstAudioRingBufferCallback, user_data: gpointer);
+ #[cfg(feature = "v1_12")]
+ pub fn gst_audio_ring_buffer_set_callback_full(buf: *mut GstAudioRingBuffer, cb: GstAudioRingBufferCallback, user_data: gpointer, notify: glib::GDestroyNotify);
+ pub fn gst_audio_ring_buffer_set_channel_positions(buf: *mut GstAudioRingBuffer, position: *const GstAudioChannelPosition);
+ pub fn gst_audio_ring_buffer_set_flushing(buf: *mut GstAudioRingBuffer, flushing: gboolean);
+ pub fn gst_audio_ring_buffer_set_sample(buf: *mut GstAudioRingBuffer, sample: u64);
+ pub fn gst_audio_ring_buffer_set_timestamp(buf: *mut GstAudioRingBuffer, readseg: c_int, timestamp: gst::GstClockTime);
+ pub fn gst_audio_ring_buffer_start(buf: *mut GstAudioRingBuffer) -> gboolean;
+ pub fn gst_audio_ring_buffer_stop(buf: *mut GstAudioRingBuffer) -> gboolean;
+
+ //=========================================================================
+ // GstAudioSink
+ //=========================================================================
+ pub fn gst_audio_sink_get_type() -> GType;
+
+ //=========================================================================
+ // GstAudioSrc
+ //=========================================================================
+ pub fn gst_audio_src_get_type() -> GType;
+
+ //=========================================================================
+ // GstStreamVolume
+ //=========================================================================
+ pub fn gst_stream_volume_get_type() -> GType;
+ pub fn gst_stream_volume_convert_volume(from: GstStreamVolumeFormat, to: GstStreamVolumeFormat, val: c_double) -> c_double;
+ pub fn gst_stream_volume_get_mute(volume: *mut GstStreamVolume) -> gboolean;
+ pub fn gst_stream_volume_get_volume(volume: *mut GstStreamVolume, format: GstStreamVolumeFormat) -> c_double;
+ pub fn gst_stream_volume_set_mute(volume: *mut GstStreamVolume, mute: gboolean);
+ pub fn gst_stream_volume_set_volume(volume: *mut GstStreamVolume, format: GstStreamVolumeFormat, val: c_double);
+
+ //=========================================================================
+ // Other functions
+ //=========================================================================
+ pub fn gst_audio_buffer_clip(buffer: *mut gst::GstBuffer, segment: *const gst::GstSegment, rate: c_int, bpf: c_int) -> *mut gst::GstBuffer;
+ pub fn gst_audio_buffer_reorder_channels(buffer: *mut gst::GstBuffer, format: GstAudioFormat, channels: c_int, from: *mut GstAudioChannelPosition, to: *mut GstAudioChannelPosition) -> gboolean;
+ #[cfg(feature = "v1_8")]
+ pub fn gst_audio_channel_get_fallback_mask(channels: c_int) -> u64;
+ pub fn gst_audio_channel_positions_from_mask(channels: c_int, channel_mask: u64, position: *mut GstAudioChannelPosition) -> gboolean;
+ pub fn gst_audio_channel_positions_to_mask(position: *mut GstAudioChannelPosition, channels: c_int, force_order: gboolean, channel_mask: *mut u64) -> gboolean;
+ pub fn gst_audio_channel_positions_to_string(position: *mut GstAudioChannelPosition, channels: c_int) -> *mut c_char;
+ pub fn gst_audio_channel_positions_to_valid_order(position: *mut GstAudioChannelPosition, channels: c_int) -> gboolean;
+ pub fn gst_audio_check_valid_channel_positions(position: *mut GstAudioChannelPosition, channels: c_int, force_order: gboolean) -> gboolean;
+ pub fn gst_audio_clipping_meta_api_get_type() -> GType;
+ pub fn gst_audio_downmix_meta_api_get_type() -> GType;
+ pub fn gst_audio_format_info_get_type() -> GType;
+ pub fn gst_audio_get_channel_reorder_map(channels: c_int, from: *mut GstAudioChannelPosition, to: *mut GstAudioChannelPosition, reorder_map: *mut c_int) -> gboolean;
+ pub fn gst_audio_iec61937_frame_size(spec: *const GstAudioRingBufferSpec) -> c_uint;
+ pub fn gst_audio_iec61937_payload(src: *mut u8, src_n: c_uint, dst: *mut u8, dst_n: c_uint, spec: *const GstAudioRingBufferSpec, endianness: c_int) -> gboolean;
+ pub fn gst_audio_reorder_channels(data: gpointer, size: size_t, format: GstAudioFormat, channels: c_int, from: *mut GstAudioChannelPosition, to: *mut GstAudioChannelPosition) -> gboolean;
+ #[cfg(feature = "v1_8")]
+ pub fn gst_buffer_add_audio_clipping_meta(buffer: *mut gst::GstBuffer, format: gst::GstFormat, start: u64, end: u64) -> *mut GstAudioClippingMeta;
+ pub fn gst_buffer_add_audio_downmix_meta(buffer: *mut gst::GstBuffer, from_position: *mut GstAudioChannelPosition, from_channels: c_int, to_position: *mut GstAudioChannelPosition, to_channels: c_int, matrix: *mut *const c_float) -> *mut GstAudioDownmixMeta;
+ pub fn gst_buffer_get_audio_downmix_meta_for_channels(buffer: *mut gst::GstBuffer, to_position: *mut GstAudioChannelPosition, to_channels: c_int) -> *mut GstAudioDownmixMeta;
+
+}
diff --git a/gstreamer-base-sys/Cargo.toml b/gstreamer-base-sys/Cargo.toml
new file mode 100644
index 000000000..a477fa7a9
--- /dev/null
+++ b/gstreamer-base-sys/Cargo.toml
@@ -0,0 +1,44 @@
+[build-dependencies]
+pkg-config = "0.3.7"
+
+[dependencies]
+bitflags = "0.8"
+glib-sys = "0.3"
+gobject-sys = "0.3"
+libc = "0.2"
+
+[dependencies.gstreamer-sys]
+path = "../gstreamer-sys"
+version = "0.1.0"
+
+[features]
+v1_0_10 = []
+v1_10 = ["v1_8"]
+v1_12 = ["v1_10"]
+v1_2 = ["v1_0_10"]
+v1_2_3 = ["v1_2"]
+v1_4 = ["v1_2_3"]
+v1_6 = ["v1_4"]
+v1_8 = ["v1_6"]
+
+[lib]
+name = "gstreamer_base_sys"
+
+[package]
+build = "build.rs"
+links = "gstbase-1.0"
+name = "gstreamer-base-sys"
+version = "0.1.0"
+authors = ["Sebastian Dröge "]
+description = "FFI bindings to libgstbase-1.0"
+homepage = "https://gstreamer.freedesktop.org"
+keywords = ["ffi", "gstreamer", "gnome", "multimedia"]
+repository = "https://github.com/sdroege/gstreamer-sys"
+license = "MIT"
+
+include = [
+ "src/*.rs",
+ "Cargo.toml",
+ "build.rs",
+ "LICENSE",
+]
diff --git a/gstreamer-base-sys/LICENSE b/gstreamer-base-sys/LICENSE
new file mode 100644
index 000000000..3d76f6e2f
--- /dev/null
+++ b/gstreamer-base-sys/LICENSE
@@ -0,0 +1,23 @@
+The MIT License (MIT)
+
+Copyright (c) 2017 Sebastian Dröge .
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
diff --git a/gstreamer-base-sys/build.rs b/gstreamer-base-sys/build.rs
new file mode 100644
index 000000000..a6cd10fe4
--- /dev/null
+++ b/gstreamer-base-sys/build.rs
@@ -0,0 +1,70 @@
+extern crate pkg_config;
+
+use pkg_config::{Config, Error};
+use std::env;
+use std::io::prelude::*;
+use std::io;
+use std::process;
+
+fn main() {
+ if let Err(s) = find() {
+ let _ = writeln!(io::stderr(), "{}", s);
+ process::exit(1);
+ }
+}
+
+fn find() -> Result<(), Error> {
+ let package_name = "gstreamer-base-1.0";
+ let shared_libs = ["gstbase-1.0"];
+ let version = if cfg!(feature = "v1_12") {
+ "1.12"
+ } else if cfg!(feature = "v1_10") {
+ "1.10"
+ } else if cfg!(feature = "v1_6") {
+ "1.6"
+ } else if cfg!(feature = "v1_4") {
+ "1.4"
+ } else if cfg!(feature = "v1_2") {
+ "1.2"
+ } else {
+ "1.0"
+ };
+
+ if let Ok(lib_dir) = env::var("GTK_LIB_DIR") {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ println!("cargo:rustc-link-search=native={}", lib_dir);
+ return Ok(())
+ }
+
+ let target = env::var("TARGET").unwrap();
+ let hardcode_shared_libs = target.contains("windows");
+
+ let mut config = Config::new();
+ config.atleast_version(version);
+ if hardcode_shared_libs {
+ config.cargo_metadata(false);
+ }
+ match config.probe(package_name) {
+ Ok(library) => {
+ if hardcode_shared_libs {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ for path in library.link_paths.iter() {
+ println!("cargo:rustc-link-search=native={}", path.to_str().unwrap());
+ }
+ }
+ Ok(())
+ }
+ Err(Error::EnvNoPkgConfig(_)) | Err(Error::Command { .. }) => {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ Ok(())
+ }
+ Err(err) => Err(err),
+ }
+}
+
diff --git a/gstreamer-base-sys/src/lib.rs b/gstreamer-base-sys/src/lib.rs
new file mode 100644
index 000000000..2dd2307df
--- /dev/null
+++ b/gstreamer-base-sys/src/lib.rs
@@ -0,0 +1,826 @@
+// This file was generated by gir (10e1d4f) from gir-files (???)
+// DO NOT EDIT
+
+#![allow(non_camel_case_types, non_upper_case_globals)]
+
+extern crate libc;
+#[macro_use] extern crate bitflags;
+extern crate glib_sys as glib;
+extern crate gobject_sys as gobject;
+extern crate gstreamer_sys as gst;
+
+#[allow(unused_imports)]
+use libc::{c_int, c_char, c_uchar, c_float, c_uint, c_double,
+ c_short, c_ushort, c_long, c_ulong,
+ c_void, size_t, ssize_t, time_t, FILE};
+
+#[allow(unused_imports)]
+use glib::{gboolean, gconstpointer, gpointer, GType, Volatile};
+
+// Constants
+pub const GST_BASE_PARSE_FLAG_DRAINING: c_int = 2;
+pub const GST_BASE_PARSE_FLAG_LOST_SYNC: c_int = 1;
+pub const GST_BASE_TRANSFORM_SINK_NAME: *const c_char = b"sink\0" as *const u8 as *const c_char;
+pub const GST_BASE_TRANSFORM_SRC_NAME: *const c_char = b"src\0" as *const u8 as *const c_char;
+
+// Flags
+bitflags! {
+ #[repr(C)]
+ pub flags GstBaseParseFrameFlags: c_uint {
+ const GST_BASE_PARSE_FRAME_FLAG_NONE = 0,
+ const GST_BASE_PARSE_FRAME_FLAG_NEW_FRAME = 1,
+ const GST_BASE_PARSE_FRAME_FLAG_NO_FRAME = 2,
+ const GST_BASE_PARSE_FRAME_FLAG_CLIP = 4,
+ const GST_BASE_PARSE_FRAME_FLAG_DROP = 8,
+ const GST_BASE_PARSE_FRAME_FLAG_QUEUE = 16,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstBaseSrcFlags: c_uint {
+ const GST_BASE_SRC_FLAG_STARTING = 16384,
+ const GST_BASE_SRC_FLAG_STARTED = 32768,
+ const GST_BASE_SRC_FLAG_LAST = 1048576,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstCollectPadsStateFlags: c_uint {
+ const GST_COLLECT_PADS_STATE_EOS = 1,
+ const GST_COLLECT_PADS_STATE_FLUSHING = 2,
+ const GST_COLLECT_PADS_STATE_NEW_SEGMENT = 4,
+ const GST_COLLECT_PADS_STATE_WAITING = 8,
+ const GST_COLLECT_PADS_STATE_LOCKED = 16,
+ }
+}
+
+// Callbacks
+pub type GstCollectDataDestroyNotify = Option;
+pub type GstCollectPadsBufferFunction = Option gst::GstFlowReturn>;
+pub type GstCollectPadsClipFunction = Option gst::GstFlowReturn>;
+pub type GstCollectPadsCompareFunction = Option c_int>;
+pub type GstCollectPadsEventFunction = Option gboolean>;
+pub type GstCollectPadsFlushFunction = Option;
+pub type GstCollectPadsFunction = Option gst::GstFlowReturn>;
+pub type GstCollectPadsQueryFunction = Option gboolean>;
+pub type GstDataQueueCheckFullFunction = Option gboolean>;
+pub type GstDataQueueEmptyCallback = Option;
+pub type GstDataQueueFullCallback = Option;
+pub type GstTypeFindHelperGetRangeFunction = Option gst::GstFlowReturn>;
+
+// Records
+#[repr(C)]
+pub struct GstAdapterClass(c_void);
+
+#[repr(C)]
+pub struct GstBaseParseClass {
+ pub parent_class: gst::GstElementClass,
+ pub start: Option gboolean>,
+ pub stop: Option gboolean>,
+ pub set_sink_caps: Option gboolean>,
+ pub handle_frame: Option gst::GstFlowReturn>,
+ pub pre_push_frame: Option gst::GstFlowReturn>,
+ pub convert: Option gboolean>,
+ pub sink_event: Option gboolean>,
+ pub src_event: Option gboolean>,
+ pub get_sink_caps: Option *mut gst::GstCaps>,
+ pub detect: Option gst::GstFlowReturn>,
+ pub sink_query: Option gboolean>,
+ pub src_query: Option gboolean>,
+ _gst_reserved: [gpointer; 18],
+}
+
+#[repr(C)]
+pub struct GstBaseParseFrame {
+ pub buffer: *mut gst::GstBuffer,
+ pub out_buffer: *mut gst::GstBuffer,
+ pub flags: c_uint,
+ pub offset: u64,
+ pub overhead: c_int,
+ size: c_int,
+ _gst_reserved_i: [c_uint; 2],
+ _gst_reserved_p: [gpointer; 2],
+ _private_flags: c_uint,
+}
+
+#[repr(C)]
+pub struct GstBaseParsePrivate(c_void);
+
+#[repr(C)]
+pub struct GstBaseSinkClass {
+ pub parent_class: gst::GstElementClass,
+ pub get_caps: Option *mut gst::GstCaps>,
+ pub set_caps: Option gboolean>,
+ pub fixate: Option *mut gst::GstCaps>,
+ pub activate_pull: Option gboolean>,
+ pub get_times: Option,
+ pub propose_allocation: Option gboolean>,
+ pub start: Option gboolean>,
+ pub stop: Option gboolean>,
+ pub unlock: Option gboolean>,
+ pub unlock_stop: Option gboolean>,
+ pub query: Option gboolean>,
+ pub event: Option gboolean>,
+ pub wait_event: Option gst::GstFlowReturn>,
+ pub prepare: Option gst::GstFlowReturn>,
+ pub prepare_list: Option gst::GstFlowReturn>,
+ pub preroll: Option gst::GstFlowReturn>,
+ pub render: Option gst::GstFlowReturn>,
+ pub render_list: Option gst::GstFlowReturn>,
+ _gst_reserved: [gpointer; 20],
+}
+
+#[repr(C)]
+pub struct GstBaseSinkPrivate(c_void);
+
+#[repr(C)]
+pub struct GstBaseSrcClass {
+ pub parent_class: gst::GstElementClass,
+ pub get_caps: Option *mut gst::GstCaps>,
+ pub negotiate: Option gboolean>,
+ pub fixate: Option *mut gst::GstCaps>,
+ pub set_caps: Option gboolean>,
+ pub decide_allocation: Option gboolean>,
+ pub start: Option gboolean>,
+ pub stop: Option gboolean>,
+ pub get_times: Option,
+ pub get_size: Option gboolean>,
+ pub is_seekable: Option gboolean>,
+ pub prepare_seek_segment: Option gboolean>,
+ pub do_seek: Option gboolean>,
+ pub unlock: Option gboolean>,
+ pub unlock_stop: Option gboolean>,
+ pub query: Option gboolean>,
+ pub event: Option gboolean>,
+ pub create: Option gst::GstFlowReturn>,
+ pub alloc: Option gst::GstFlowReturn>,
+ pub fill: Option gst::GstFlowReturn>,
+ _gst_reserved: [gpointer; 20],
+}
+
+#[repr(C)]
+pub struct GstBaseSrcPrivate(c_void);
+
+#[repr(C)]
+pub struct GstBaseTransformClass {
+ pub parent_class: gst::GstElementClass,
+ pub passthrough_on_same_caps: gboolean,
+ pub transform_ip_on_passthrough: gboolean,
+ pub transform_caps: Option *mut gst::GstCaps>,
+ pub fixate_caps: Option *mut gst::GstCaps>,
+ pub accept_caps: Option gboolean>,
+ pub set_caps: Option gboolean>,
+ pub query: Option gboolean>,
+ pub decide_allocation: Option gboolean>,
+ pub filter_meta: Option gboolean>,
+ pub propose_allocation: Option gboolean>,
+ pub transform_size: Option gboolean>,
+ pub get_unit_size: Option gboolean>,
+ pub start: Option gboolean>,
+ pub stop: Option gboolean>,
+ pub sink_event: Option gboolean>,
+ pub src_event: Option gboolean>,
+ pub prepare_output_buffer: Option gst::GstFlowReturn>,
+ pub copy_metadata: Option gboolean>,
+ pub transform_meta: Option gboolean>,
+ pub before_transform: Option,
+ pub transform: Option gst::GstFlowReturn>,
+ pub transform_ip: Option gst::GstFlowReturn>,
+ pub submit_input_buffer: Option gst::GstFlowReturn>,
+ pub generate_output: Option gst::GstFlowReturn>,
+ _gst_reserved: [gpointer; 18],
+}
+
+#[repr(C)]
+pub struct GstBaseTransformPrivate(c_void);
+
+#[repr(C)]
+pub struct GstBitReader {
+ pub data: *mut u8,
+ pub size: c_uint,
+ pub byte: c_uint,
+ pub bit: c_uint,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstByteReader {
+ pub data: *mut u8,
+ pub size: c_uint,
+ pub byte: c_uint,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstByteWriter {
+ pub parent: GstByteReader,
+ pub alloc_size: c_uint,
+ pub fixed: gboolean,
+ pub owned: gboolean,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstCollectData {
+ pub collect: *mut GstCollectPads,
+ pub pad: *mut gst::GstPad,
+ pub buffer: *mut gst::GstBuffer,
+ pub pos: c_uint,
+ pub segment: gst::GstSegment,
+ state: GstCollectPadsStateFlags,
+ priv_: *mut GstCollectDataPrivate,
+ _truncated_record_marker: c_void,
+ //union,
+}
+
+#[repr(C)]
+pub struct GstCollectDataPrivate(c_void);
+
+#[repr(C)]
+pub struct GstCollectPadsClass {
+ pub parent_class: gst::GstObjectClass,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstCollectPadsPrivate(c_void);
+
+#[repr(C)]
+pub struct GstDataQueueClass {
+ pub parent_class: gobject::GObjectClass,
+ pub empty: Option,
+ pub full: Option,
+ pub _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstDataQueueItem {
+ pub object: *mut gst::GstMiniObject,
+ pub size: c_uint,
+ pub duration: u64,
+ pub visible: gboolean,
+ pub destroy: glib::GDestroyNotify,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstDataQueuePrivate(c_void);
+
+#[repr(C)]
+pub struct GstDataQueueSize {
+ pub visible: c_uint,
+ pub bytes: c_uint,
+ pub time: u64,
+}
+
+#[repr(C)]
+pub struct GstFlowCombiner(c_void);
+
+#[repr(C)]
+pub struct GstPushSrcClass {
+ pub parent_class: GstBaseSrcClass,
+ pub create: Option gst::GstFlowReturn>,
+ pub alloc: Option gst::GstFlowReturn>,
+ pub fill: Option gst::GstFlowReturn>,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstQueueArray(c_void);
+
+// Classes
+#[repr(C)]
+pub struct GstAdapter(c_void);
+
+#[repr(C)]
+pub struct GstBaseParse {
+ pub element: gst::GstElement,
+ pub sinkpad: *mut gst::GstPad,
+ pub srcpad: *mut gst::GstPad,
+ pub flags: c_uint,
+ pub segment: gst::GstSegment,
+ _gst_reserved: [gpointer; 20],
+ priv_: *mut GstBaseParsePrivate,
+}
+
+#[repr(C)]
+pub struct GstBaseSink {
+ pub element: gst::GstElement,
+ pub sinkpad: *mut gst::GstPad,
+ pub pad_mode: gst::GstPadMode,
+ pub offset: u64,
+ pub can_activate_pull: gboolean,
+ pub can_activate_push: gboolean,
+ _truncated_record_marker: c_void,
+ //union,
+ //preroll_cond: GCond,
+ //eos: gboolean,
+ //need_preroll: gboolean,
+ //have_preroll: gboolean,
+ //playing_async: gboolean,
+ //have_newsegment: gboolean,
+ //segment: GstSegment,
+ //clock_id: GstClockID,
+ //sync: gboolean,
+ //flushing: gboolean,
+ //running: gboolean,
+ //max_lateness: gint64,
+ //priv: GstBaseSinkPrivate*,
+ //_gst_reserved: gpointer,
+}
+
+#[repr(C)]
+pub struct GstBaseSrc {
+ pub element: gst::GstElement,
+ pub srcpad: *mut gst::GstPad,
+ _truncated_record_marker: c_void,
+ //union,
+ //live_cond: GCond,
+ //is_live: gboolean,
+ //live_running: gboolean,
+ //blocksize: guint,
+ //can_activate_push: gboolean,
+ //random_access: gboolean,
+ //clock_id: GstClockID,
+ //segment: GstSegment,
+ //need_newsegment: gboolean,
+ //num_buffers: gint,
+ //num_buffers_left: gint,
+ //typefind: gboolean,
+ //running: gboolean,
+ //pending_seek: GstEvent*,
+ //priv: GstBaseSrcPrivate*,
+ //_gst_reserved: gpointer,
+}
+
+#[repr(C)]
+pub struct GstBaseTransform {
+ pub element: gst::GstElement,
+ pub sinkpad: *mut gst::GstPad,
+ pub srcpad: *mut gst::GstPad,
+ pub have_segment: gboolean,
+ pub segment: gst::GstSegment,
+ pub queued_buf: *mut gst::GstBuffer,
+ priv_: *mut GstBaseTransformPrivate,
+ _gst_reserved: [gpointer; 19],
+}
+
+#[repr(C)]
+pub struct GstCollectPads {
+ pub object: gst::GstObject,
+ pub data: *mut glib::GSList,
+ stream_lock: glib::GRecMutex,
+ priv_: *mut GstCollectPadsPrivate,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstDataQueue {
+ pub object: gobject::GObject,
+ priv_: *mut GstDataQueuePrivate,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstPushSrc {
+ pub parent: GstBaseSrc,
+ _gst_reserved: [gpointer; 4],
+}
+
+extern "C" {
+
+ //=========================================================================
+ // GstBaseParseFrame
+ //=========================================================================
+ pub fn gst_base_parse_frame_get_type() -> GType;
+ pub fn gst_base_parse_frame_new(buffer: *mut gst::GstBuffer, flags: GstBaseParseFrameFlags, overhead: c_int) -> *mut GstBaseParseFrame;
+ pub fn gst_base_parse_frame_free(frame: *mut GstBaseParseFrame);
+ pub fn gst_base_parse_frame_init(frame: *mut GstBaseParseFrame);
+
+ //=========================================================================
+ // GstBitReader
+ //=========================================================================
+ pub fn gst_bit_reader_free(reader: *mut GstBitReader);
+ pub fn gst_bit_reader_get_bits_uint16(reader: *mut GstBitReader, val: *mut u16, nbits: c_uint) -> gboolean;
+ pub fn gst_bit_reader_get_bits_uint32(reader: *mut GstBitReader, val: *mut u32, nbits: c_uint) -> gboolean;
+ pub fn gst_bit_reader_get_bits_uint64(reader: *mut GstBitReader, val: *mut u64, nbits: c_uint) -> gboolean;
+ pub fn gst_bit_reader_get_bits_uint8(reader: *mut GstBitReader, val: *mut u8, nbits: c_uint) -> gboolean;
+ pub fn gst_bit_reader_get_pos(reader: *const GstBitReader) -> c_uint;
+ pub fn gst_bit_reader_get_remaining(reader: *const GstBitReader) -> c_uint;
+ pub fn gst_bit_reader_get_size(reader: *const GstBitReader) -> c_uint;
+ pub fn gst_bit_reader_init(reader: *mut GstBitReader, data: *mut u8, size: c_uint);
+ pub fn gst_bit_reader_peek_bits_uint16(reader: *const GstBitReader, val: *mut u16, nbits: c_uint) -> gboolean;
+ pub fn gst_bit_reader_peek_bits_uint32(reader: *const GstBitReader, val: *mut u32, nbits: c_uint) -> gboolean;
+ pub fn gst_bit_reader_peek_bits_uint64(reader: *const GstBitReader, val: *mut u64, nbits: c_uint) -> gboolean;
+ pub fn gst_bit_reader_peek_bits_uint8(reader: *const GstBitReader, val: *mut u8, nbits: c_uint) -> gboolean;
+ pub fn gst_bit_reader_set_pos(reader: *mut GstBitReader, pos: c_uint) -> gboolean;
+ pub fn gst_bit_reader_skip(reader: *mut GstBitReader, nbits: c_uint) -> gboolean;
+ pub fn gst_bit_reader_skip_to_byte(reader: *mut GstBitReader) -> gboolean;
+ pub fn gst_bit_reader_new(data: *mut u8, size: c_uint) -> *mut GstBitReader;
+
+ //=========================================================================
+ // GstByteReader
+ //=========================================================================
+ pub fn gst_byte_reader_dup_data(reader: *mut GstByteReader, size: c_uint, val: *mut *mut u8) -> gboolean;
+ pub fn gst_byte_reader_dup_string_utf16(reader: *mut GstByteReader, str: *mut *mut u16) -> gboolean;
+ pub fn gst_byte_reader_dup_string_utf32(reader: *mut GstByteReader, str: *mut *mut u32) -> gboolean;
+ pub fn gst_byte_reader_dup_string_utf8(reader: *mut GstByteReader, str: *mut *mut c_char) -> gboolean;
+ pub fn gst_byte_reader_free(reader: *mut GstByteReader);
+ pub fn gst_byte_reader_get_data(reader: *mut GstByteReader, size: c_uint, val: *mut *mut u8) -> gboolean;
+ pub fn gst_byte_reader_get_float32_be(reader: *mut GstByteReader, val: *mut c_float) -> gboolean;
+ pub fn gst_byte_reader_get_float32_le(reader: *mut GstByteReader, val: *mut c_float) -> gboolean;
+ pub fn gst_byte_reader_get_float64_be(reader: *mut GstByteReader, val: *mut c_double) -> gboolean;
+ pub fn gst_byte_reader_get_float64_le(reader: *mut GstByteReader, val: *mut c_double) -> gboolean;
+ pub fn gst_byte_reader_get_int16_be(reader: *mut GstByteReader, val: *mut i16) -> gboolean;
+ pub fn gst_byte_reader_get_int16_le(reader: *mut GstByteReader, val: *mut i16) -> gboolean;
+ pub fn gst_byte_reader_get_int24_be(reader: *mut GstByteReader, val: *mut i32) -> gboolean;
+ pub fn gst_byte_reader_get_int24_le(reader: *mut GstByteReader, val: *mut i32) -> gboolean;
+ pub fn gst_byte_reader_get_int32_be(reader: *mut GstByteReader, val: *mut i32) -> gboolean;
+ pub fn gst_byte_reader_get_int32_le(reader: *mut GstByteReader, val: *mut i32) -> gboolean;
+ pub fn gst_byte_reader_get_int64_be(reader: *mut GstByteReader, val: *mut i64) -> gboolean;
+ pub fn gst_byte_reader_get_int64_le(reader: *mut GstByteReader, val: *mut i64) -> gboolean;
+ pub fn gst_byte_reader_get_int8(reader: *mut GstByteReader, val: *mut i8) -> gboolean;
+ pub fn gst_byte_reader_get_pos(reader: *const GstByteReader) -> c_uint;
+ pub fn gst_byte_reader_get_remaining(reader: *const GstByteReader) -> c_uint;
+ pub fn gst_byte_reader_get_size(reader: *const GstByteReader) -> c_uint;
+ pub fn gst_byte_reader_get_string_utf8(reader: *mut GstByteReader, str: *mut *mut c_char) -> gboolean;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_byte_reader_get_sub_reader(reader: *mut GstByteReader, sub_reader: *mut GstByteReader, size: c_uint) -> gboolean;
+ pub fn gst_byte_reader_get_uint16_be(reader: *mut GstByteReader, val: *mut u16) -> gboolean;
+ pub fn gst_byte_reader_get_uint16_le(reader: *mut GstByteReader, val: *mut u16) -> gboolean;
+ pub fn gst_byte_reader_get_uint24_be(reader: *mut GstByteReader, val: *mut u32) -> gboolean;
+ pub fn gst_byte_reader_get_uint24_le(reader: *mut GstByteReader, val: *mut u32) -> gboolean;
+ pub fn gst_byte_reader_get_uint32_be(reader: *mut GstByteReader, val: *mut u32) -> gboolean;
+ pub fn gst_byte_reader_get_uint32_le(reader: *mut GstByteReader, val: *mut u32) -> gboolean;
+ pub fn gst_byte_reader_get_uint64_be(reader: *mut GstByteReader, val: *mut u64) -> gboolean;
+ pub fn gst_byte_reader_get_uint64_le(reader: *mut GstByteReader, val: *mut u64) -> gboolean;
+ pub fn gst_byte_reader_get_uint8(reader: *mut GstByteReader, val: *mut u8) -> gboolean;
+ pub fn gst_byte_reader_init(reader: *mut GstByteReader, data: *mut u8, size: c_uint);
+ pub fn gst_byte_reader_masked_scan_uint32(reader: *const GstByteReader, mask: u32, pattern: u32, offset: c_uint, size: c_uint) -> c_uint;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_byte_reader_masked_scan_uint32_peek(reader: *const GstByteReader, mask: u32, pattern: u32, offset: c_uint, size: c_uint, value: *mut u32) -> c_uint;
+ pub fn gst_byte_reader_peek_data(reader: *const GstByteReader, size: c_uint, val: *mut *mut u8) -> gboolean;
+ pub fn gst_byte_reader_peek_float32_be(reader: *const GstByteReader, val: *mut c_float) -> gboolean;
+ pub fn gst_byte_reader_peek_float32_le(reader: *const GstByteReader, val: *mut c_float) -> gboolean;
+ pub fn gst_byte_reader_peek_float64_be(reader: *const GstByteReader, val: *mut c_double) -> gboolean;
+ pub fn gst_byte_reader_peek_float64_le(reader: *const GstByteReader, val: *mut c_double) -> gboolean;
+ pub fn gst_byte_reader_peek_int16_be(reader: *const GstByteReader, val: *mut i16) -> gboolean;
+ pub fn gst_byte_reader_peek_int16_le(reader: *const GstByteReader, val: *mut i16) -> gboolean;
+ pub fn gst_byte_reader_peek_int24_be(reader: *const GstByteReader, val: *mut i32) -> gboolean;
+ pub fn gst_byte_reader_peek_int24_le(reader: *const GstByteReader, val: *mut i32) -> gboolean;
+ pub fn gst_byte_reader_peek_int32_be(reader: *const GstByteReader, val: *mut i32) -> gboolean;
+ pub fn gst_byte_reader_peek_int32_le(reader: *const GstByteReader, val: *mut i32) -> gboolean;
+ pub fn gst_byte_reader_peek_int64_be(reader: *const GstByteReader, val: *mut i64) -> gboolean;
+ pub fn gst_byte_reader_peek_int64_le(reader: *const GstByteReader, val: *mut i64) -> gboolean;
+ pub fn gst_byte_reader_peek_int8(reader: *const GstByteReader, val: *mut i8) -> gboolean;
+ pub fn gst_byte_reader_peek_string_utf8(reader: *const GstByteReader, str: *mut *mut c_char) -> gboolean;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_byte_reader_peek_sub_reader(reader: *mut GstByteReader, sub_reader: *mut GstByteReader, size: c_uint) -> gboolean;
+ pub fn gst_byte_reader_peek_uint16_be(reader: *const GstByteReader, val: *mut u16) -> gboolean;
+ pub fn gst_byte_reader_peek_uint16_le(reader: *const GstByteReader, val: *mut u16) -> gboolean;
+ pub fn gst_byte_reader_peek_uint24_be(reader: *const GstByteReader, val: *mut u32) -> gboolean;
+ pub fn gst_byte_reader_peek_uint24_le(reader: *const GstByteReader, val: *mut u32) -> gboolean;
+ pub fn gst_byte_reader_peek_uint32_be(reader: *const GstByteReader, val: *mut u32) -> gboolean;
+ pub fn gst_byte_reader_peek_uint32_le(reader: *const GstByteReader, val: *mut u32) -> gboolean;
+ pub fn gst_byte_reader_peek_uint64_be(reader: *const GstByteReader, val: *mut u64) -> gboolean;
+ pub fn gst_byte_reader_peek_uint64_le(reader: *const GstByteReader, val: *mut u64) -> gboolean;
+ pub fn gst_byte_reader_peek_uint8(reader: *const GstByteReader, val: *mut u8) -> gboolean;
+ pub fn gst_byte_reader_set_pos(reader: *mut GstByteReader, pos: c_uint) -> gboolean;
+ pub fn gst_byte_reader_skip(reader: *mut GstByteReader, nbytes: c_uint) -> gboolean;
+ pub fn gst_byte_reader_skip_string_utf16(reader: *mut GstByteReader) -> gboolean;
+ pub fn gst_byte_reader_skip_string_utf32(reader: *mut GstByteReader) -> gboolean;
+ pub fn gst_byte_reader_skip_string_utf8(reader: *mut GstByteReader) -> gboolean;
+ pub fn gst_byte_reader_new(data: *mut u8, size: c_uint) -> *mut GstByteReader;
+
+ //=========================================================================
+ // GstByteWriter
+ //=========================================================================
+ pub fn gst_byte_writer_ensure_free_space(writer: *mut GstByteWriter, size: c_uint) -> gboolean;
+ pub fn gst_byte_writer_fill(writer: *mut GstByteWriter, value: u8, size: c_uint) -> gboolean;
+ pub fn gst_byte_writer_free(writer: *mut GstByteWriter);
+ pub fn gst_byte_writer_free_and_get_buffer(writer: *mut GstByteWriter) -> *mut gst::GstBuffer;
+ pub fn gst_byte_writer_free_and_get_data(writer: *mut GstByteWriter) -> *mut u8;
+ pub fn gst_byte_writer_get_remaining(writer: *const GstByteWriter) -> c_uint;
+ pub fn gst_byte_writer_init(writer: *mut GstByteWriter);
+ pub fn gst_byte_writer_init_with_data(writer: *mut GstByteWriter, data: *mut u8, size: c_uint, initialized: gboolean);
+ pub fn gst_byte_writer_init_with_size(writer: *mut GstByteWriter, size: c_uint, fixed: gboolean);
+ pub fn gst_byte_writer_put_buffer(writer: *mut GstByteWriter, buffer: *mut gst::GstBuffer, offset: size_t, size: ssize_t) -> gboolean;
+ pub fn gst_byte_writer_put_data(writer: *mut GstByteWriter, data: *mut u8, size: c_uint) -> gboolean;
+ pub fn gst_byte_writer_put_float32_be(writer: *mut GstByteWriter, val: c_float) -> gboolean;
+ pub fn gst_byte_writer_put_float32_le(writer: *mut GstByteWriter, val: c_float) -> gboolean;
+ pub fn gst_byte_writer_put_float64_be(writer: *mut GstByteWriter, val: c_double) -> gboolean;
+ pub fn gst_byte_writer_put_float64_le(writer: *mut GstByteWriter, val: c_double) -> gboolean;
+ pub fn gst_byte_writer_put_int16_be(writer: *mut GstByteWriter, val: i16) -> gboolean;
+ pub fn gst_byte_writer_put_int16_le(writer: *mut GstByteWriter, val: i16) -> gboolean;
+ pub fn gst_byte_writer_put_int24_be(writer: *mut GstByteWriter, val: i32) -> gboolean;
+ pub fn gst_byte_writer_put_int24_le(writer: *mut GstByteWriter, val: i32) -> gboolean;
+ pub fn gst_byte_writer_put_int32_be(writer: *mut GstByteWriter, val: i32) -> gboolean;
+ pub fn gst_byte_writer_put_int32_le(writer: *mut GstByteWriter, val: i32) -> gboolean;
+ pub fn gst_byte_writer_put_int64_be(writer: *mut GstByteWriter, val: i64) -> gboolean;
+ pub fn gst_byte_writer_put_int64_le(writer: *mut GstByteWriter, val: i64) -> gboolean;
+ pub fn gst_byte_writer_put_int8(writer: *mut GstByteWriter, val: i8) -> gboolean;
+ pub fn gst_byte_writer_put_string_utf16(writer: *mut GstByteWriter, data: *mut u16) -> gboolean;
+ pub fn gst_byte_writer_put_string_utf32(writer: *mut GstByteWriter, data: *mut u32) -> gboolean;
+ pub fn gst_byte_writer_put_string_utf8(writer: *mut GstByteWriter, data: *mut c_char) -> gboolean;
+ pub fn gst_byte_writer_put_uint16_be(writer: *mut GstByteWriter, val: u16) -> gboolean;
+ pub fn gst_byte_writer_put_uint16_le(writer: *mut GstByteWriter, val: u16) -> gboolean;
+ pub fn gst_byte_writer_put_uint24_be(writer: *mut GstByteWriter, val: u32) -> gboolean;
+ pub fn gst_byte_writer_put_uint24_le(writer: *mut GstByteWriter, val: u32) -> gboolean;
+ pub fn gst_byte_writer_put_uint32_be(writer: *mut GstByteWriter, val: u32) -> gboolean;
+ pub fn gst_byte_writer_put_uint32_le(writer: *mut GstByteWriter, val: u32) -> gboolean;
+ pub fn gst_byte_writer_put_uint64_be(writer: *mut GstByteWriter, val: u64) -> gboolean;
+ pub fn gst_byte_writer_put_uint64_le(writer: *mut GstByteWriter, val: u64) -> gboolean;
+ pub fn gst_byte_writer_put_uint8(writer: *mut GstByteWriter, val: u8) -> gboolean;
+ pub fn gst_byte_writer_reset(writer: *mut GstByteWriter);
+ pub fn gst_byte_writer_reset_and_get_buffer(writer: *mut GstByteWriter) -> *mut gst::GstBuffer;
+ pub fn gst_byte_writer_reset_and_get_data(writer: *mut GstByteWriter) -> *mut u8;
+ pub fn gst_byte_writer_new() -> *mut GstByteWriter;
+ pub fn gst_byte_writer_new_with_data(data: *mut u8, size: c_uint, initialized: gboolean) -> *mut GstByteWriter;
+ pub fn gst_byte_writer_new_with_size(size: c_uint, fixed: gboolean) -> *mut GstByteWriter;
+
+ //=========================================================================
+ // GstFlowCombiner
+ //=========================================================================
+ pub fn gst_flow_combiner_get_type() -> GType;
+ #[cfg(feature = "v1_4")]
+ pub fn gst_flow_combiner_new() -> *mut GstFlowCombiner;
+ #[cfg(feature = "v1_4")]
+ pub fn gst_flow_combiner_add_pad(combiner: *mut GstFlowCombiner, pad: *mut gst::GstPad);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_flow_combiner_clear(combiner: *mut GstFlowCombiner);
+ #[cfg(feature = "v1_4")]
+ pub fn gst_flow_combiner_free(combiner: *mut GstFlowCombiner);
+ #[cfg(feature = "v1_4")]
+ pub fn gst_flow_combiner_remove_pad(combiner: *mut GstFlowCombiner, pad: *mut gst::GstPad);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_flow_combiner_reset(combiner: *mut GstFlowCombiner);
+ #[cfg(feature = "v1_4")]
+ pub fn gst_flow_combiner_update_flow(combiner: *mut GstFlowCombiner, fret: gst::GstFlowReturn) -> gst::GstFlowReturn;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_flow_combiner_update_pad_flow(combiner: *mut GstFlowCombiner, pad: *mut gst::GstPad, fret: gst::GstFlowReturn) -> gst::GstFlowReturn;
+
+ //=========================================================================
+ // GstQueueArray
+ //=========================================================================
+ #[cfg(feature = "v1_2")]
+ pub fn gst_queue_array_drop_element(array: *mut GstQueueArray, idx: c_uint) -> gpointer;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_queue_array_drop_struct(array: *mut GstQueueArray, idx: c_uint, p_struct: gpointer) -> gboolean;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_queue_array_find(array: *mut GstQueueArray, func: glib::GCompareFunc, data: gpointer) -> c_uint;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_queue_array_free(array: *mut GstQueueArray);
+ #[cfg(feature = "v1_2")]
+ pub fn gst_queue_array_get_length(array: *mut GstQueueArray) -> c_uint;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_queue_array_is_empty(array: *mut GstQueueArray) -> gboolean;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_queue_array_peek_head(array: *mut GstQueueArray) -> gpointer;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_queue_array_peek_head_struct(array: *mut GstQueueArray) -> gpointer;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_queue_array_pop_head(array: *mut GstQueueArray) -> gpointer;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_queue_array_pop_head_struct(array: *mut GstQueueArray) -> gpointer;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_queue_array_push_tail(array: *mut GstQueueArray, data: gpointer);
+ pub fn gst_queue_array_push_tail_struct(array: *mut GstQueueArray, p_struct: gpointer);
+ #[cfg(feature = "v1_2")]
+ pub fn gst_queue_array_new(initial_size: c_uint) -> *mut GstQueueArray;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_queue_array_new_for_struct(struct_size: size_t, initial_size: c_uint) -> *mut GstQueueArray;
+
+ //=========================================================================
+ // GstAdapter
+ //=========================================================================
+ pub fn gst_adapter_get_type() -> GType;
+ pub fn gst_adapter_new() -> *mut GstAdapter;
+ pub fn gst_adapter_available(adapter: *mut GstAdapter) -> size_t;
+ pub fn gst_adapter_available_fast(adapter: *mut GstAdapter) -> size_t;
+ pub fn gst_adapter_clear(adapter: *mut GstAdapter);
+ pub fn gst_adapter_copy(adapter: *mut GstAdapter, dest: gpointer, offset: size_t, size: size_t);
+ #[cfg(feature = "v1_4")]
+ pub fn gst_adapter_copy_bytes(adapter: *mut GstAdapter, offset: size_t, size: size_t) -> *mut glib::GBytes;
+ pub fn gst_adapter_distance_from_discont(adapter: *mut GstAdapter) -> u64;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_adapter_dts_at_discont(adapter: *mut GstAdapter) -> gst::GstClockTime;
+ pub fn gst_adapter_flush(adapter: *mut GstAdapter, flush: size_t);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_adapter_get_buffer(adapter: *mut GstAdapter, nbytes: size_t) -> *mut gst::GstBuffer;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_adapter_get_buffer_fast(adapter: *mut GstAdapter, nbytes: size_t) -> *mut gst::GstBuffer;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_adapter_get_buffer_list(adapter: *mut GstAdapter, nbytes: size_t) -> *mut gst::GstBufferList;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_adapter_get_list(adapter: *mut GstAdapter, nbytes: size_t) -> *mut glib::GList;
+ pub fn gst_adapter_map(adapter: *mut GstAdapter, size: size_t) -> gconstpointer;
+ pub fn gst_adapter_masked_scan_uint32(adapter: *mut GstAdapter, mask: u32, pattern: u32, offset: size_t, size: size_t) -> ssize_t;
+ pub fn gst_adapter_masked_scan_uint32_peek(adapter: *mut GstAdapter, mask: u32, pattern: u32, offset: size_t, size: size_t, value: *mut u32) -> ssize_t;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_adapter_offset_at_discont(adapter: *mut GstAdapter) -> u64;
+ pub fn gst_adapter_prev_dts(adapter: *mut GstAdapter, distance: *mut u64) -> gst::GstClockTime;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_adapter_prev_dts_at_offset(adapter: *mut GstAdapter, offset: size_t, distance: *mut u64) -> gst::GstClockTime;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_adapter_prev_offset(adapter: *mut GstAdapter, distance: *mut u64) -> u64;
+ pub fn gst_adapter_prev_pts(adapter: *mut GstAdapter, distance: *mut u64) -> gst::GstClockTime;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_adapter_prev_pts_at_offset(adapter: *mut GstAdapter, offset: size_t, distance: *mut u64) -> gst::GstClockTime;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_adapter_pts_at_discont(adapter: *mut GstAdapter) -> gst::GstClockTime;
+ pub fn gst_adapter_push(adapter: *mut GstAdapter, buf: *mut gst::GstBuffer);
+ pub fn gst_adapter_take(adapter: *mut GstAdapter, nbytes: size_t) -> gpointer;
+ pub fn gst_adapter_take_buffer(adapter: *mut GstAdapter, nbytes: size_t) -> *mut gst::GstBuffer;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_adapter_take_buffer_fast(adapter: *mut GstAdapter, nbytes: size_t) -> *mut gst::GstBuffer;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_adapter_take_buffer_list(adapter: *mut GstAdapter, nbytes: size_t) -> *mut gst::GstBufferList;
+ pub fn gst_adapter_take_list(adapter: *mut GstAdapter, nbytes: size_t) -> *mut glib::GList;
+ pub fn gst_adapter_unmap(adapter: *mut GstAdapter);
+
+ //=========================================================================
+ // GstBaseParse
+ //=========================================================================
+ pub fn gst_base_parse_get_type() -> GType;
+ pub fn gst_base_parse_add_index_entry(parse: *mut GstBaseParse, offset: u64, ts: gst::GstClockTime, key: gboolean, force: gboolean) -> gboolean;
+ pub fn gst_base_parse_convert_default(parse: *mut GstBaseParse, src_format: gst::GstFormat, src_value: i64, dest_format: gst::GstFormat, dest_value: *mut i64) -> gboolean;
+ #[cfg(feature = "v1_12")]
+ pub fn gst_base_parse_drain(parse: *mut GstBaseParse);
+ pub fn gst_base_parse_finish_frame(parse: *mut GstBaseParse, frame: *mut GstBaseParseFrame, size: c_int) -> gst::GstFlowReturn;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_base_parse_merge_tags(parse: *mut GstBaseParse, tags: *mut gst::GstTagList, mode: gst::GstTagMergeMode);
+ pub fn gst_base_parse_push_frame(parse: *mut GstBaseParse, frame: *mut GstBaseParseFrame) -> gst::GstFlowReturn;
+ pub fn gst_base_parse_set_average_bitrate(parse: *mut GstBaseParse, bitrate: c_uint);
+ pub fn gst_base_parse_set_duration(parse: *mut GstBaseParse, fmt: gst::GstFormat, duration: i64, interval: c_int);
+ pub fn gst_base_parse_set_frame_rate(parse: *mut GstBaseParse, fps_num: c_uint, fps_den: c_uint, lead_in: c_uint, lead_out: c_uint);
+ pub fn gst_base_parse_set_has_timing_info(parse: *mut GstBaseParse, has_timing: gboolean);
+ pub fn gst_base_parse_set_infer_ts(parse: *mut GstBaseParse, infer_ts: gboolean);
+ pub fn gst_base_parse_set_latency(parse: *mut GstBaseParse, min_latency: gst::GstClockTime, max_latency: gst::GstClockTime);
+ pub fn gst_base_parse_set_min_frame_size(parse: *mut GstBaseParse, min_size: c_uint);
+ pub fn gst_base_parse_set_passthrough(parse: *mut GstBaseParse, passthrough: gboolean);
+ pub fn gst_base_parse_set_pts_interpolation(parse: *mut GstBaseParse, pts_interpolate: gboolean);
+ pub fn gst_base_parse_set_syncable(parse: *mut GstBaseParse, syncable: gboolean);
+ #[cfg(feature = "v1_2")]
+ pub fn gst_base_parse_set_ts_at_offset(parse: *mut GstBaseParse, offset: size_t);
+
+ //=========================================================================
+ // GstBaseSink
+ //=========================================================================
+ pub fn gst_base_sink_get_type() -> GType;
+ pub fn gst_base_sink_do_preroll(sink: *mut GstBaseSink, obj: *mut gst::GstMiniObject) -> gst::GstFlowReturn;
+ pub fn gst_base_sink_get_blocksize(sink: *mut GstBaseSink) -> c_uint;
+ #[cfg(feature = "v1_12")]
+ pub fn gst_base_sink_get_drop_out_of_segment(sink: *mut GstBaseSink) -> gboolean;
+ pub fn gst_base_sink_get_last_sample(sink: *mut GstBaseSink) -> *mut gst::GstSample;
+ pub fn gst_base_sink_get_latency(sink: *mut GstBaseSink) -> gst::GstClockTime;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_base_sink_get_max_bitrate(sink: *mut GstBaseSink) -> u64;
+ pub fn gst_base_sink_get_max_lateness(sink: *mut GstBaseSink) -> i64;
+ pub fn gst_base_sink_get_render_delay(sink: *mut GstBaseSink) -> gst::GstClockTime;
+ pub fn gst_base_sink_get_sync(sink: *mut GstBaseSink) -> gboolean;
+ pub fn gst_base_sink_get_throttle_time(sink: *mut GstBaseSink) -> u64;
+ pub fn gst_base_sink_get_ts_offset(sink: *mut GstBaseSink) -> gst::GstClockTimeDiff;
+ pub fn gst_base_sink_is_async_enabled(sink: *mut GstBaseSink) -> gboolean;
+ pub fn gst_base_sink_is_last_sample_enabled(sink: *mut GstBaseSink) -> gboolean;
+ pub fn gst_base_sink_is_qos_enabled(sink: *mut GstBaseSink) -> gboolean;
+ pub fn gst_base_sink_query_latency(sink: *mut GstBaseSink, live: *mut gboolean, upstream_live: *mut gboolean, min_latency: *mut gst::GstClockTime, max_latency: *mut gst::GstClockTime) -> gboolean;
+ pub fn gst_base_sink_set_async_enabled(sink: *mut GstBaseSink, enabled: gboolean);
+ pub fn gst_base_sink_set_blocksize(sink: *mut GstBaseSink, blocksize: c_uint);
+ #[cfg(feature = "v1_12")]
+ pub fn gst_base_sink_set_drop_out_of_segment(sink: *mut GstBaseSink, drop_out_of_segment: gboolean);
+ pub fn gst_base_sink_set_last_sample_enabled(sink: *mut GstBaseSink, enabled: gboolean);
+ #[cfg(feature = "v1_2")]
+ pub fn gst_base_sink_set_max_bitrate(sink: *mut GstBaseSink, max_bitrate: u64);
+ pub fn gst_base_sink_set_max_lateness(sink: *mut GstBaseSink, max_lateness: i64);
+ pub fn gst_base_sink_set_qos_enabled(sink: *mut GstBaseSink, enabled: gboolean);
+ pub fn gst_base_sink_set_render_delay(sink: *mut GstBaseSink, delay: gst::GstClockTime);
+ pub fn gst_base_sink_set_sync(sink: *mut GstBaseSink, sync: gboolean);
+ pub fn gst_base_sink_set_throttle_time(sink: *mut GstBaseSink, throttle: u64);
+ pub fn gst_base_sink_set_ts_offset(sink: *mut GstBaseSink, offset: gst::GstClockTimeDiff);
+ pub fn gst_base_sink_wait(sink: *mut GstBaseSink, time: gst::GstClockTime, jitter: *mut gst::GstClockTimeDiff) -> gst::GstFlowReturn;
+ pub fn gst_base_sink_wait_clock(sink: *mut GstBaseSink, time: gst::GstClockTime, jitter: *mut gst::GstClockTimeDiff) -> gst::GstClockReturn;
+ pub fn gst_base_sink_wait_preroll(sink: *mut GstBaseSink) -> gst::GstFlowReturn;
+
+ //=========================================================================
+ // GstBaseSrc
+ //=========================================================================
+ pub fn gst_base_src_get_type() -> GType;
+ pub fn gst_base_src_get_allocator(src: *mut GstBaseSrc, allocator: *mut *mut gst::GstAllocator, params: *mut gst::GstAllocationParams);
+ pub fn gst_base_src_get_blocksize(src: *mut GstBaseSrc) -> c_uint;
+ pub fn gst_base_src_get_buffer_pool(src: *mut GstBaseSrc) -> *mut gst::GstBufferPool;
+ pub fn gst_base_src_get_do_timestamp(src: *mut GstBaseSrc) -> gboolean;
+ pub fn gst_base_src_is_async(src: *mut GstBaseSrc) -> gboolean;
+ pub fn gst_base_src_is_live(src: *mut GstBaseSrc) -> gboolean;
+ pub fn gst_base_src_new_seamless_segment(src: *mut GstBaseSrc, start: i64, stop: i64, time: i64) -> gboolean;
+ pub fn gst_base_src_query_latency(src: *mut GstBaseSrc, live: *mut gboolean, min_latency: *mut gst::GstClockTime, max_latency: *mut gst::GstClockTime) -> gboolean;
+ pub fn gst_base_src_set_async(src: *mut GstBaseSrc, async: gboolean);
+ #[cfg(feature = "v1_4")]
+ pub fn gst_base_src_set_automatic_eos(src: *mut GstBaseSrc, automatic_eos: gboolean);
+ pub fn gst_base_src_set_blocksize(src: *mut GstBaseSrc, blocksize: c_uint);
+ pub fn gst_base_src_set_caps(src: *mut GstBaseSrc, caps: *mut gst::GstCaps) -> gboolean;
+ pub fn gst_base_src_set_do_timestamp(src: *mut GstBaseSrc, timestamp: gboolean);
+ pub fn gst_base_src_set_dynamic_size(src: *mut GstBaseSrc, dynamic: gboolean);
+ pub fn gst_base_src_set_format(src: *mut GstBaseSrc, format: gst::GstFormat);
+ pub fn gst_base_src_set_live(src: *mut GstBaseSrc, live: gboolean);
+ pub fn gst_base_src_start_complete(basesrc: *mut GstBaseSrc, ret: gst::GstFlowReturn);
+ pub fn gst_base_src_start_wait(basesrc: *mut GstBaseSrc) -> gst::GstFlowReturn;
+ pub fn gst_base_src_wait_playing(src: *mut GstBaseSrc) -> gst::GstFlowReturn;
+
+ //=========================================================================
+ // GstBaseTransform
+ //=========================================================================
+ pub fn gst_base_transform_get_type() -> GType;
+ pub fn gst_base_transform_get_allocator(trans: *mut GstBaseTransform, allocator: *mut *mut gst::GstAllocator, params: *mut gst::GstAllocationParams);
+ pub fn gst_base_transform_get_buffer_pool(trans: *mut GstBaseTransform) -> *mut gst::GstBufferPool;
+ pub fn gst_base_transform_is_in_place(trans: *mut GstBaseTransform) -> gboolean;
+ pub fn gst_base_transform_is_passthrough(trans: *mut GstBaseTransform) -> gboolean;
+ pub fn gst_base_transform_is_qos_enabled(trans: *mut GstBaseTransform) -> gboolean;
+ pub fn gst_base_transform_reconfigure_sink(trans: *mut GstBaseTransform);
+ pub fn gst_base_transform_reconfigure_src(trans: *mut GstBaseTransform);
+ pub fn gst_base_transform_set_gap_aware(trans: *mut GstBaseTransform, gap_aware: gboolean);
+ pub fn gst_base_transform_set_in_place(trans: *mut GstBaseTransform, in_place: gboolean);
+ pub fn gst_base_transform_set_passthrough(trans: *mut GstBaseTransform, passthrough: gboolean);
+ #[cfg(feature = "v1_0_1")]
+ pub fn gst_base_transform_set_prefer_passthrough(trans: *mut GstBaseTransform, prefer_passthrough: gboolean);
+ pub fn gst_base_transform_set_qos_enabled(trans: *mut GstBaseTransform, enabled: gboolean);
+ pub fn gst_base_transform_update_qos(trans: *mut GstBaseTransform, proportion: c_double, diff: gst::GstClockTimeDiff, timestamp: gst::GstClockTime);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_base_transform_update_src_caps(trans: *mut GstBaseTransform, updated_caps: *mut gst::GstCaps) -> gboolean;
+
+ //=========================================================================
+ // GstCollectPads
+ //=========================================================================
+ pub fn gst_collect_pads_get_type() -> GType;
+ pub fn gst_collect_pads_new() -> *mut GstCollectPads;
+ pub fn gst_collect_pads_add_pad(pads: *mut GstCollectPads, pad: *mut gst::GstPad, size: c_uint, destroy_notify: GstCollectDataDestroyNotify, lock: gboolean) -> *mut GstCollectData;
+ pub fn gst_collect_pads_available(pads: *mut GstCollectPads) -> c_uint;
+ pub fn gst_collect_pads_clip_running_time(pads: *mut GstCollectPads, cdata: *mut GstCollectData, buf: *mut gst::GstBuffer, outbuf: *mut *mut gst::GstBuffer, user_data: gpointer) -> gst::GstFlowReturn;
+ pub fn gst_collect_pads_event_default(pads: *mut GstCollectPads, data: *mut GstCollectData, event: *mut gst::GstEvent, discard: gboolean) -> gboolean;
+ pub fn gst_collect_pads_flush(pads: *mut GstCollectPads, data: *mut GstCollectData, size: c_uint) -> c_uint;
+ pub fn gst_collect_pads_peek(pads: *mut GstCollectPads, data: *mut GstCollectData) -> *mut gst::GstBuffer;
+ pub fn gst_collect_pads_pop(pads: *mut GstCollectPads, data: *mut GstCollectData) -> *mut gst::GstBuffer;
+ pub fn gst_collect_pads_query_default(pads: *mut GstCollectPads, data: *mut GstCollectData, query: *mut gst::GstQuery, discard: gboolean) -> gboolean;
+ pub fn gst_collect_pads_read_buffer(pads: *mut GstCollectPads, data: *mut GstCollectData, size: c_uint) -> *mut gst::GstBuffer;
+ pub fn gst_collect_pads_remove_pad(pads: *mut GstCollectPads, pad: *mut gst::GstPad) -> gboolean;
+ pub fn gst_collect_pads_set_buffer_function(pads: *mut GstCollectPads, func: GstCollectPadsBufferFunction, user_data: gpointer);
+ pub fn gst_collect_pads_set_clip_function(pads: *mut GstCollectPads, clipfunc: GstCollectPadsClipFunction, user_data: gpointer);
+ pub fn gst_collect_pads_set_compare_function(pads: *mut GstCollectPads, func: GstCollectPadsCompareFunction, user_data: gpointer);
+ pub fn gst_collect_pads_set_event_function(pads: *mut GstCollectPads, func: GstCollectPadsEventFunction, user_data: gpointer);
+ #[cfg(feature = "v1_4")]
+ pub fn gst_collect_pads_set_flush_function(pads: *mut GstCollectPads, func: GstCollectPadsFlushFunction, user_data: gpointer);
+ pub fn gst_collect_pads_set_flushing(pads: *mut GstCollectPads, flushing: gboolean);
+ pub fn gst_collect_pads_set_function(pads: *mut GstCollectPads, func: GstCollectPadsFunction, user_data: gpointer);
+ pub fn gst_collect_pads_set_query_function(pads: *mut GstCollectPads, func: GstCollectPadsQueryFunction, user_data: gpointer);
+ pub fn gst_collect_pads_set_waiting(pads: *mut GstCollectPads, data: *mut GstCollectData, waiting: gboolean);
+ #[cfg(feature = "v1_4")]
+ pub fn gst_collect_pads_src_event_default(pads: *mut GstCollectPads, pad: *mut gst::GstPad, event: *mut gst::GstEvent) -> gboolean;
+ pub fn gst_collect_pads_start(pads: *mut GstCollectPads);
+ pub fn gst_collect_pads_stop(pads: *mut GstCollectPads);
+ pub fn gst_collect_pads_take_buffer(pads: *mut GstCollectPads, data: *mut GstCollectData, size: c_uint) -> *mut gst::GstBuffer;
+
+ //=========================================================================
+ // GstDataQueue
+ //=========================================================================
+ pub fn gst_data_queue_get_type() -> GType;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_new(checkfull: GstDataQueueCheckFullFunction, fullcallback: GstDataQueueFullCallback, emptycallback: GstDataQueueEmptyCallback, checkdata: gpointer) -> *mut GstDataQueue;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_drop_head(queue: *mut GstDataQueue, type_: GType) -> gboolean;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_flush(queue: *mut GstDataQueue);
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_get_level(queue: *mut GstDataQueue, level: *mut GstDataQueueSize);
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_is_empty(queue: *mut GstDataQueue) -> gboolean;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_is_full(queue: *mut GstDataQueue) -> gboolean;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_limits_changed(queue: *mut GstDataQueue);
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_peek(queue: *mut GstDataQueue, item: *mut *mut GstDataQueueItem) -> gboolean;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_pop(queue: *mut GstDataQueue, item: *mut *mut GstDataQueueItem) -> gboolean;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_push(queue: *mut GstDataQueue, item: *mut GstDataQueueItem) -> gboolean;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_push_force(queue: *mut GstDataQueue, item: *mut GstDataQueueItem) -> gboolean;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_data_queue_set_flushing(queue: *mut GstDataQueue, flushing: gboolean);
+
+ //=========================================================================
+ // GstPushSrc
+ //=========================================================================
+ pub fn gst_push_src_get_type() -> GType;
+
+ //=========================================================================
+ // Other functions
+ //=========================================================================
+ pub fn gst_type_find_helper(src: *mut gst::GstPad, size: u64) -> *mut gst::GstCaps;
+ pub fn gst_type_find_helper_for_buffer(obj: *mut gst::GstObject, buf: *mut gst::GstBuffer, prob: *mut gst::GstTypeFindProbability) -> *mut gst::GstCaps;
+ pub fn gst_type_find_helper_for_data(obj: *mut gst::GstObject, data: *const u8, size: size_t, prob: *mut gst::GstTypeFindProbability) -> *mut gst::GstCaps;
+ pub fn gst_type_find_helper_for_extension(obj: *mut gst::GstObject, extension: *const c_char) -> *mut gst::GstCaps;
+ pub fn gst_type_find_helper_get_range(obj: *mut gst::GstObject, parent: *mut gst::GstObject, func: GstTypeFindHelperGetRangeFunction, size: u64, extension: *const c_char, prob: *mut gst::GstTypeFindProbability) -> *mut gst::GstCaps;
+
+}
diff --git a/gstreamer-pbutils-sys/Cargo.toml b/gstreamer-pbutils-sys/Cargo.toml
new file mode 100644
index 000000000..7c724ffa7
--- /dev/null
+++ b/gstreamer-pbutils-sys/Cargo.toml
@@ -0,0 +1,56 @@
+[build-dependencies]
+pkg-config = "0.3.7"
+
+[dependencies]
+bitflags = "0.8"
+glib-sys = "0.3"
+gobject-sys = "0.3"
+libc = "0.2"
+
+[dependencies.gstreamer-sys]
+path = "../gstreamer-sys"
+version = "0.1.0"
+
+[dependencies.gstreamer-audio-sys]
+path = "../gstreamer-audio-sys"
+version = "0.1.0"
+
+[dependencies.gstreamer-video-sys]
+path = "../gstreamer-video-sys"
+version = "0.1.0"
+
+[dependencies.gstreamer-tag-sys]
+path = "../gstreamer-tag-sys"
+version = "0.1.0"
+
+[features]
+v1_0_10 = []
+v1_10 = ["v1_8"]
+v1_12 = ["v1_10"]
+v1_2 = ["v1_0_10"]
+v1_2_3 = ["v1_2"]
+v1_4 = ["v1_2_3"]
+v1_6 = ["v1_4"]
+v1_8 = ["v1_6"]
+
+[lib]
+name = "gstreamer_pbutils_sys"
+
+[package]
+build = "build.rs"
+links = "gstpbutils-1.0"
+name = "gstreamer-pbutils-sys"
+version = "0.1.0"
+authors = ["Sebastian Dröge "]
+description = "FFI bindings to libgstpbutils-1.0"
+homepage = "https://gstreamer.freedesktop.org"
+keywords = ["ffi", "gstreamer", "gnome", "multimedia"]
+repository = "https://github.com/sdroege/gstreamer-sys"
+license = "MIT"
+
+include = [
+ "src/*.rs",
+ "Cargo.toml",
+ "build.rs",
+ "LICENSE",
+]
diff --git a/gstreamer-pbutils-sys/LICENSE b/gstreamer-pbutils-sys/LICENSE
new file mode 100644
index 000000000..3d76f6e2f
--- /dev/null
+++ b/gstreamer-pbutils-sys/LICENSE
@@ -0,0 +1,23 @@
+The MIT License (MIT)
+
+Copyright (c) 2017 Sebastian Dröge .
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
diff --git a/gstreamer-pbutils-sys/build.rs b/gstreamer-pbutils-sys/build.rs
new file mode 100644
index 000000000..40967ceab
--- /dev/null
+++ b/gstreamer-pbutils-sys/build.rs
@@ -0,0 +1,64 @@
+extern crate pkg_config;
+
+use pkg_config::{Config, Error};
+use std::env;
+use std::io::prelude::*;
+use std::io;
+use std::process;
+
+fn main() {
+ if let Err(s) = find() {
+ let _ = writeln!(io::stderr(), "{}", s);
+ process::exit(1);
+ }
+}
+
+fn find() -> Result<(), Error> {
+ let package_name = "gstreamer-pbutils-1.0";
+ let shared_libs = ["gstpbutils-1.0"];
+ let version = if cfg!(feature = "v1_8") {
+ "1.8"
+ } else if cfg!(feature = "v1_6") {
+ "1.6"
+ } else {
+ "1.0"
+ };
+
+ if let Ok(lib_dir) = env::var("GTK_LIB_DIR") {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ println!("cargo:rustc-link-search=native={}", lib_dir);
+ return Ok(())
+ }
+
+ let target = env::var("TARGET").unwrap();
+ let hardcode_shared_libs = target.contains("windows");
+
+ let mut config = Config::new();
+ config.atleast_version(version);
+ if hardcode_shared_libs {
+ config.cargo_metadata(false);
+ }
+ match config.probe(package_name) {
+ Ok(library) => {
+ if hardcode_shared_libs {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ for path in library.link_paths.iter() {
+ println!("cargo:rustc-link-search=native={}", path.to_str().unwrap());
+ }
+ }
+ Ok(())
+ }
+ Err(Error::EnvNoPkgConfig(_)) | Err(Error::Command { .. }) => {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ Ok(())
+ }
+ Err(err) => Err(err),
+ }
+}
+
diff --git a/gstreamer-pbutils-sys/src/lib.rs b/gstreamer-pbutils-sys/src/lib.rs
new file mode 100644
index 000000000..fd3a58734
--- /dev/null
+++ b/gstreamer-pbutils-sys/src/lib.rs
@@ -0,0 +1,467 @@
+// This file was generated by gir (10e1d4f+) from gir-files (???)
+// DO NOT EDIT
+
+#![allow(non_camel_case_types, non_upper_case_globals)]
+
+extern crate libc;
+#[macro_use] extern crate bitflags;
+extern crate glib_sys as glib;
+extern crate gobject_sys as gobject;
+extern crate gstreamer_sys as gst;
+extern crate gstreamer_tag_sys as gst_tag;
+extern crate gstreamer_audio_sys as gst_audio;
+extern crate gstreamer_video_sys as gst_video;
+
+#[allow(unused_imports)]
+use libc::{c_int, c_char, c_uchar, c_float, c_uint, c_double,
+ c_short, c_ushort, c_long, c_ulong,
+ c_void, size_t, ssize_t, intptr_t, uintptr_t, time_t, FILE};
+
+#[allow(unused_imports)]
+use glib::{gboolean, gconstpointer, gpointer, GType, Volatile};
+
+// Aliases
+pub type GstDiscovererAudioInfoClass = gobject::GObjectClass;
+pub type GstDiscovererContainerInfoClass = gobject::GObjectClass;
+pub type GstDiscovererInfoClass = gobject::GObjectClass;
+pub type GstDiscovererStreamInfoClass = gobject::GObjectClass;
+pub type GstDiscovererSubtitleInfoClass = gobject::GObjectClass;
+pub type GstDiscovererVideoInfoClass = gobject::GObjectClass;
+pub type GstEncodingTargetClass = gobject::GObjectClass;
+
+// Enums
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstAudioVisualizerShader {
+ None = 0,
+ Fade = 1,
+ FadeAndMoveUp = 2,
+ FadeAndMoveDown = 3,
+ FadeAndMoveLeft = 4,
+ FadeAndMoveRight = 5,
+ FadeAndMoveHorizOut = 6,
+ FadeAndMoveHorizIn = 7,
+ FadeAndMoveVertOut = 8,
+ FadeAndMoveVertIn = 9,
+}
+pub const GST_AUDIO_VISUALIZER_SHADER_NONE: GstAudioVisualizerShader = GstAudioVisualizerShader::None;
+pub const GST_AUDIO_VISUALIZER_SHADER_FADE: GstAudioVisualizerShader = GstAudioVisualizerShader::Fade;
+pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_UP: GstAudioVisualizerShader = GstAudioVisualizerShader::FadeAndMoveUp;
+pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_DOWN: GstAudioVisualizerShader = GstAudioVisualizerShader::FadeAndMoveDown;
+pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_LEFT: GstAudioVisualizerShader = GstAudioVisualizerShader::FadeAndMoveLeft;
+pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_RIGHT: GstAudioVisualizerShader = GstAudioVisualizerShader::FadeAndMoveRight;
+pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_HORIZ_OUT: GstAudioVisualizerShader = GstAudioVisualizerShader::FadeAndMoveHorizOut;
+pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_HORIZ_IN: GstAudioVisualizerShader = GstAudioVisualizerShader::FadeAndMoveHorizIn;
+pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_VERT_OUT: GstAudioVisualizerShader = GstAudioVisualizerShader::FadeAndMoveVertOut;
+pub const GST_AUDIO_VISUALIZER_SHADER_FADE_AND_MOVE_VERT_IN: GstAudioVisualizerShader = GstAudioVisualizerShader::FadeAndMoveVertIn;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstDiscovererResult {
+ Ok = 0,
+ UriInvalid = 1,
+ Error = 2,
+ Timeout = 3,
+ Busy = 4,
+ MissingPlugins = 5,
+}
+pub const GST_DISCOVERER_OK: GstDiscovererResult = GstDiscovererResult::Ok;
+pub const GST_DISCOVERER_URI_INVALID: GstDiscovererResult = GstDiscovererResult::UriInvalid;
+pub const GST_DISCOVERER_ERROR: GstDiscovererResult = GstDiscovererResult::Error;
+pub const GST_DISCOVERER_TIMEOUT: GstDiscovererResult = GstDiscovererResult::Timeout;
+pub const GST_DISCOVERER_BUSY: GstDiscovererResult = GstDiscovererResult::Busy;
+pub const GST_DISCOVERER_MISSING_PLUGINS: GstDiscovererResult = GstDiscovererResult::MissingPlugins;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstInstallPluginsReturn {
+ Success = 0,
+ NotFound = 1,
+ Error = 2,
+ PartialSuccess = 3,
+ UserAbort = 4,
+ Crashed = 100,
+ Invalid = 101,
+ StartedOk = 200,
+ InternalFailure = 201,
+ HelperMissing = 202,
+ InstallInProgress = 203,
+}
+pub const GST_INSTALL_PLUGINS_SUCCESS: GstInstallPluginsReturn = GstInstallPluginsReturn::Success;
+pub const GST_INSTALL_PLUGINS_NOT_FOUND: GstInstallPluginsReturn = GstInstallPluginsReturn::NotFound;
+pub const GST_INSTALL_PLUGINS_ERROR: GstInstallPluginsReturn = GstInstallPluginsReturn::Error;
+pub const GST_INSTALL_PLUGINS_PARTIAL_SUCCESS: GstInstallPluginsReturn = GstInstallPluginsReturn::PartialSuccess;
+pub const GST_INSTALL_PLUGINS_USER_ABORT: GstInstallPluginsReturn = GstInstallPluginsReturn::UserAbort;
+pub const GST_INSTALL_PLUGINS_CRASHED: GstInstallPluginsReturn = GstInstallPluginsReturn::Crashed;
+pub const GST_INSTALL_PLUGINS_INVALID: GstInstallPluginsReturn = GstInstallPluginsReturn::Invalid;
+pub const GST_INSTALL_PLUGINS_STARTED_OK: GstInstallPluginsReturn = GstInstallPluginsReturn::StartedOk;
+pub const GST_INSTALL_PLUGINS_INTERNAL_FAILURE: GstInstallPluginsReturn = GstInstallPluginsReturn::InternalFailure;
+pub const GST_INSTALL_PLUGINS_HELPER_MISSING: GstInstallPluginsReturn = GstInstallPluginsReturn::HelperMissing;
+pub const GST_INSTALL_PLUGINS_INSTALL_IN_PROGRESS: GstInstallPluginsReturn = GstInstallPluginsReturn::InstallInProgress;
+
+// Constants
+pub const GST_ENCODING_CATEGORY_CAPTURE: *const c_char = b"capture\0" as *const u8 as *const c_char;
+pub const GST_ENCODING_CATEGORY_DEVICE: *const c_char = b"device\0" as *const u8 as *const c_char;
+pub const GST_ENCODING_CATEGORY_FILE_EXTENSION: *const c_char = b"file-extension\0" as *const u8 as *const c_char;
+pub const GST_ENCODING_CATEGORY_ONLINE_SERVICE: *const c_char = b"online-service\0" as *const u8 as *const c_char;
+pub const GST_ENCODING_CATEGORY_STORAGE_EDITING: *const c_char = b"storage-editing\0" as *const u8 as *const c_char;
+pub const GST_PLUGINS_BASE_VERSION_MAJOR: c_int = 1;
+pub const GST_PLUGINS_BASE_VERSION_MICRO: c_int = 90;
+pub const GST_PLUGINS_BASE_VERSION_MINOR: c_int = 11;
+pub const GST_PLUGINS_BASE_VERSION_NANO: c_int = 0;
+
+// Flags
+bitflags! {
+ #[repr(C)]
+ pub flags GstDiscovererSerializeFlags: c_uint {
+ const GST_DISCOVERER_SERIALIZE_BASIC = 0,
+ const GST_DISCOVERER_SERIALIZE_CAPS = 1,
+ const GST_DISCOVERER_SERIALIZE_TAGS = 2,
+ const GST_DISCOVERER_SERIALIZE_MISC = 4,
+ const GST_DISCOVERER_SERIALIZE_ALL = 7,
+ }
+}
+
+// Callbacks
+pub type GstAudioVisualizerShaderFunc = Option;
+pub type GstInstallPluginsResultFunc = Option;
+
+// Records
+#[repr(C)]
+pub struct GstAudioVisualizerClass {
+ pub parent_class: gst::GstElementClass,
+ pub setup: Option gboolean>,
+ pub render: Option gboolean>,
+ pub decide_allocation: Option gboolean>,
+}
+
+#[repr(C)]
+pub struct GstAudioVisualizerPrivate(c_void);
+
+#[repr(C)]
+pub struct GstDiscovererClass {
+ pub parentclass: gobject::GObjectClass,
+ pub finished: Option,
+ pub starting: Option,
+ pub discovered: Option,
+ pub source_setup: Option,
+ pub _reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstDiscovererPrivate(c_void);
+
+#[repr(C)]
+pub struct GstEncodingAudioProfileClass(c_void);
+
+#[repr(C)]
+pub struct GstEncodingContainerProfileClass(c_void);
+
+#[repr(C)]
+pub struct GstEncodingProfileClass(c_void);
+
+#[repr(C)]
+pub struct GstEncodingVideoProfileClass(c_void);
+
+#[repr(C)]
+pub struct GstInstallPluginsContext(c_void);
+
+// Classes
+#[repr(C)]
+pub struct GstAudioVisualizer {
+ pub parent: gst::GstElement,
+ pub req_spf: c_uint,
+ pub vinfo: gst_video::GstVideoInfo,
+ pub ainfo: gst_audio::GstAudioInfo,
+ priv_: *mut GstAudioVisualizerPrivate,
+}
+
+#[repr(C)]
+pub struct GstDiscoverer {
+ pub parent: gobject::GObject,
+ priv_: *mut GstDiscovererPrivate,
+ _reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstDiscovererAudioInfo(c_void);
+
+#[repr(C)]
+pub struct GstDiscovererContainerInfo(c_void);
+
+#[repr(C)]
+pub struct GstDiscovererInfo(c_void);
+
+#[repr(C)]
+pub struct GstDiscovererStreamInfo(c_void);
+
+#[repr(C)]
+pub struct GstDiscovererSubtitleInfo(c_void);
+
+#[repr(C)]
+pub struct GstDiscovererVideoInfo(c_void);
+
+#[repr(C)]
+pub struct GstEncodingAudioProfile(c_void);
+
+#[repr(C)]
+pub struct GstEncodingContainerProfile(c_void);
+
+#[repr(C)]
+pub struct GstEncodingProfile(c_void);
+
+#[repr(C)]
+pub struct GstEncodingTarget(c_void);
+
+#[repr(C)]
+pub struct GstEncodingVideoProfile(c_void);
+
+extern "C" {
+
+ //=========================================================================
+ // GstInstallPluginsReturn
+ //=========================================================================
+ pub fn gst_install_plugins_return_get_name(ret: GstInstallPluginsReturn) -> *const c_char;
+
+ //=========================================================================
+ // GstInstallPluginsContext
+ //=========================================================================
+ pub fn gst_install_plugins_context_get_type() -> GType;
+ pub fn gst_install_plugins_context_new() -> *mut GstInstallPluginsContext;
+ pub fn gst_install_plugins_context_free(ctx: *mut GstInstallPluginsContext);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_install_plugins_context_set_confirm_search(ctx: *mut GstInstallPluginsContext, confirm_search: gboolean);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_install_plugins_context_set_desktop_id(ctx: *mut GstInstallPluginsContext, desktop_id: *const c_char);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_install_plugins_context_set_startup_notification_id(ctx: *mut GstInstallPluginsContext, startup_id: *const c_char);
+ pub fn gst_install_plugins_context_set_xid(ctx: *mut GstInstallPluginsContext, xid: c_uint);
+
+ //=========================================================================
+ // GstAudioVisualizer
+ //=========================================================================
+ pub fn gst_audio_visualizer_get_type() -> GType;
+
+ //=========================================================================
+ // GstDiscoverer
+ //=========================================================================
+ pub fn gst_discoverer_get_type() -> GType;
+ pub fn gst_discoverer_new(timeout: gst::GstClockTime, error: *mut *mut glib::GError) -> *mut GstDiscoverer;
+ pub fn gst_discoverer_discover_uri(discoverer: *mut GstDiscoverer, uri: *const c_char, error: *mut *mut glib::GError) -> *mut GstDiscovererInfo;
+ pub fn gst_discoverer_discover_uri_async(discoverer: *mut GstDiscoverer, uri: *const c_char) -> gboolean;
+ pub fn gst_discoverer_start(discoverer: *mut GstDiscoverer);
+ pub fn gst_discoverer_stop(discoverer: *mut GstDiscoverer);
+
+ //=========================================================================
+ // GstDiscovererAudioInfo
+ //=========================================================================
+ pub fn gst_discoverer_audio_info_get_type() -> GType;
+ pub fn gst_discoverer_audio_info_get_bitrate(info: *const GstDiscovererAudioInfo) -> c_uint;
+ pub fn gst_discoverer_audio_info_get_channels(info: *const GstDiscovererAudioInfo) -> c_uint;
+ pub fn gst_discoverer_audio_info_get_depth(info: *const GstDiscovererAudioInfo) -> c_uint;
+ pub fn gst_discoverer_audio_info_get_language(info: *const GstDiscovererAudioInfo) -> *const c_char;
+ pub fn gst_discoverer_audio_info_get_max_bitrate(info: *const GstDiscovererAudioInfo) -> c_uint;
+ pub fn gst_discoverer_audio_info_get_sample_rate(info: *const GstDiscovererAudioInfo) -> c_uint;
+
+ //=========================================================================
+ // GstDiscovererContainerInfo
+ //=========================================================================
+ pub fn gst_discoverer_container_info_get_type() -> GType;
+ pub fn gst_discoverer_container_info_get_streams(info: *mut GstDiscovererContainerInfo) -> *mut glib::GList;
+
+ //=========================================================================
+ // GstDiscovererInfo
+ //=========================================================================
+ pub fn gst_discoverer_info_get_type() -> GType;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_discoverer_info_from_variant(variant: *mut glib::GVariant) -> *mut GstDiscovererInfo;
+ pub fn gst_discoverer_info_copy(ptr: *mut GstDiscovererInfo) -> *mut GstDiscovererInfo;
+ pub fn gst_discoverer_info_get_audio_streams(info: *mut GstDiscovererInfo) -> *mut glib::GList;
+ pub fn gst_discoverer_info_get_container_streams(info: *mut GstDiscovererInfo) -> *mut glib::GList;
+ pub fn gst_discoverer_info_get_duration(info: *const GstDiscovererInfo) -> gst::GstClockTime;
+ pub fn gst_discoverer_info_get_misc(info: *const GstDiscovererInfo) -> *const gst::GstStructure;
+ #[cfg(feature = "v1_4")]
+ pub fn gst_discoverer_info_get_missing_elements_installer_details(info: *const GstDiscovererInfo) -> *mut *mut c_char;
+ pub fn gst_discoverer_info_get_result(info: *const GstDiscovererInfo) -> GstDiscovererResult;
+ pub fn gst_discoverer_info_get_seekable(info: *const GstDiscovererInfo) -> gboolean;
+ pub fn gst_discoverer_info_get_stream_info(info: *mut GstDiscovererInfo) -> *mut GstDiscovererStreamInfo;
+ pub fn gst_discoverer_info_get_stream_list(info: *mut GstDiscovererInfo) -> *mut glib::GList;
+ pub fn gst_discoverer_info_get_streams(info: *mut GstDiscovererInfo, streamtype: GType) -> *mut glib::GList;
+ pub fn gst_discoverer_info_get_subtitle_streams(info: *mut GstDiscovererInfo) -> *mut glib::GList;
+ pub fn gst_discoverer_info_get_tags(info: *const GstDiscovererInfo) -> *const gst::GstTagList;
+ pub fn gst_discoverer_info_get_toc(info: *const GstDiscovererInfo) -> *const gst::GstToc;
+ pub fn gst_discoverer_info_get_uri(info: *const GstDiscovererInfo) -> *const c_char;
+ pub fn gst_discoverer_info_get_video_streams(info: *mut GstDiscovererInfo) -> *mut glib::GList;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_discoverer_info_to_variant(info: *mut GstDiscovererInfo, flags: GstDiscovererSerializeFlags) -> *mut glib::GVariant;
+
+ //=========================================================================
+ // GstDiscovererStreamInfo
+ //=========================================================================
+ pub fn gst_discoverer_stream_info_get_type() -> GType;
+ pub fn gst_discoverer_stream_info_list_free(infos: *mut glib::GList);
+ pub fn gst_discoverer_stream_info_get_caps(info: *mut GstDiscovererStreamInfo) -> *mut gst::GstCaps;
+ pub fn gst_discoverer_stream_info_get_misc(info: *mut GstDiscovererStreamInfo) -> *const gst::GstStructure;
+ pub fn gst_discoverer_stream_info_get_next(info: *mut GstDiscovererStreamInfo) -> *mut GstDiscovererStreamInfo;
+ pub fn gst_discoverer_stream_info_get_previous(info: *mut GstDiscovererStreamInfo) -> *mut GstDiscovererStreamInfo;
+ pub fn gst_discoverer_stream_info_get_stream_id(info: *mut GstDiscovererStreamInfo) -> *const c_char;
+ pub fn gst_discoverer_stream_info_get_stream_type_nick(info: *mut GstDiscovererStreamInfo) -> *const c_char;
+ pub fn gst_discoverer_stream_info_get_tags(info: *mut GstDiscovererStreamInfo) -> *const gst::GstTagList;
+ pub fn gst_discoverer_stream_info_get_toc(info: *mut GstDiscovererStreamInfo) -> *const gst::GstToc;
+
+ //=========================================================================
+ // GstDiscovererSubtitleInfo
+ //=========================================================================
+ pub fn gst_discoverer_subtitle_info_get_type() -> GType;
+ pub fn gst_discoverer_subtitle_info_get_language(info: *const GstDiscovererSubtitleInfo) -> *const c_char;
+
+ //=========================================================================
+ // GstDiscovererVideoInfo
+ //=========================================================================
+ pub fn gst_discoverer_video_info_get_type() -> GType;
+ pub fn gst_discoverer_video_info_get_bitrate(info: *const GstDiscovererVideoInfo) -> c_uint;
+ pub fn gst_discoverer_video_info_get_depth(info: *const GstDiscovererVideoInfo) -> c_uint;
+ pub fn gst_discoverer_video_info_get_framerate_denom(info: *const GstDiscovererVideoInfo) -> c_uint;
+ pub fn gst_discoverer_video_info_get_framerate_num(info: *const GstDiscovererVideoInfo) -> c_uint;
+ pub fn gst_discoverer_video_info_get_height(info: *const GstDiscovererVideoInfo) -> c_uint;
+ pub fn gst_discoverer_video_info_get_max_bitrate(info: *const GstDiscovererVideoInfo) -> c_uint;
+ pub fn gst_discoverer_video_info_get_par_denom(info: *const GstDiscovererVideoInfo) -> c_uint;
+ pub fn gst_discoverer_video_info_get_par_num(info: *const GstDiscovererVideoInfo) -> c_uint;
+ pub fn gst_discoverer_video_info_get_width(info: *const GstDiscovererVideoInfo) -> c_uint;
+ pub fn gst_discoverer_video_info_is_image(info: *const GstDiscovererVideoInfo) -> gboolean;
+ pub fn gst_discoverer_video_info_is_interlaced(info: *const GstDiscovererVideoInfo) -> gboolean;
+
+ //=========================================================================
+ // GstEncodingAudioProfile
+ //=========================================================================
+ pub fn gst_encoding_audio_profile_get_type() -> GType;
+ pub fn gst_encoding_audio_profile_new(format: *mut gst::GstCaps, preset: *const c_char, restriction: *mut gst::GstCaps, presence: c_uint) -> *mut GstEncodingAudioProfile;
+
+ //=========================================================================
+ // GstEncodingContainerProfile
+ //=========================================================================
+ pub fn gst_encoding_container_profile_get_type() -> GType;
+ pub fn gst_encoding_container_profile_new(name: *const c_char, description: *const c_char, format: *mut gst::GstCaps, preset: *const c_char) -> *mut GstEncodingContainerProfile;
+ pub fn gst_encoding_container_profile_add_profile(container: *mut GstEncodingContainerProfile, profile: *mut GstEncodingProfile) -> gboolean;
+ pub fn gst_encoding_container_profile_contains_profile(container: *mut GstEncodingContainerProfile, profile: *mut GstEncodingProfile) -> gboolean;
+ pub fn gst_encoding_container_profile_get_profiles(profile: *mut GstEncodingContainerProfile) -> *const glib::GList;
+
+ //=========================================================================
+ // GstEncodingProfile
+ //=========================================================================
+ pub fn gst_encoding_profile_get_type() -> GType;
+ pub fn gst_encoding_profile_find(targetname: *const c_char, profilename: *const c_char, category: *const c_char) -> *mut GstEncodingProfile;
+ pub fn gst_encoding_profile_from_discoverer(info: *mut GstDiscovererInfo) -> *mut GstEncodingProfile;
+ pub fn gst_encoding_profile_copy(self_: *mut GstEncodingProfile) -> *mut GstEncodingProfile;
+ pub fn gst_encoding_profile_get_allow_dynamic_output(profile: *mut GstEncodingProfile) -> gboolean;
+ pub fn gst_encoding_profile_get_description(profile: *mut GstEncodingProfile) -> *const c_char;
+ pub fn gst_encoding_profile_get_file_extension(profile: *mut GstEncodingProfile) -> *const c_char;
+ pub fn gst_encoding_profile_get_format(profile: *mut GstEncodingProfile) -> *mut gst::GstCaps;
+ pub fn gst_encoding_profile_get_input_caps(profile: *mut GstEncodingProfile) -> *mut gst::GstCaps;
+ pub fn gst_encoding_profile_get_name(profile: *mut GstEncodingProfile) -> *const c_char;
+ pub fn gst_encoding_profile_get_presence(profile: *mut GstEncodingProfile) -> c_uint;
+ pub fn gst_encoding_profile_get_preset(profile: *mut GstEncodingProfile) -> *const c_char;
+ pub fn gst_encoding_profile_get_preset_name(profile: *mut GstEncodingProfile) -> *const c_char;
+ pub fn gst_encoding_profile_get_restriction(profile: *mut GstEncodingProfile) -> *mut gst::GstCaps;
+ pub fn gst_encoding_profile_get_type_nick(profile: *mut GstEncodingProfile) -> *const c_char;
+ pub fn gst_encoding_profile_is_enabled(profile: *mut GstEncodingProfile) -> gboolean;
+ pub fn gst_encoding_profile_is_equal(a: *mut GstEncodingProfile, b: *mut GstEncodingProfile) -> gboolean;
+ pub fn gst_encoding_profile_set_allow_dynamic_output(profile: *mut GstEncodingProfile, allow_dynamic_output: gboolean);
+ pub fn gst_encoding_profile_set_description(profile: *mut GstEncodingProfile, description: *const c_char);
+ pub fn gst_encoding_profile_set_enabled(profile: *mut GstEncodingProfile, enabled: gboolean);
+ pub fn gst_encoding_profile_set_format(profile: *mut GstEncodingProfile, format: *mut gst::GstCaps);
+ pub fn gst_encoding_profile_set_name(profile: *mut GstEncodingProfile, name: *const c_char);
+ pub fn gst_encoding_profile_set_presence(profile: *mut GstEncodingProfile, presence: c_uint);
+ pub fn gst_encoding_profile_set_preset(profile: *mut GstEncodingProfile, preset: *const c_char);
+ pub fn gst_encoding_profile_set_preset_name(profile: *mut GstEncodingProfile, preset_name: *const c_char);
+ pub fn gst_encoding_profile_set_restriction(profile: *mut GstEncodingProfile, restriction: *mut gst::GstCaps);
+
+ //=========================================================================
+ // GstEncodingTarget
+ //=========================================================================
+ pub fn gst_encoding_target_get_type() -> GType;
+ pub fn gst_encoding_target_new(name: *const c_char, category: *const c_char, description: *const c_char, profiles: *const glib::GList) -> *mut GstEncodingTarget;
+ pub fn gst_encoding_target_load(name: *const c_char, category: *const c_char, error: *mut *mut glib::GError) -> *mut GstEncodingTarget;
+ pub fn gst_encoding_target_load_from_file(filepath: *const c_char, error: *mut *mut glib::GError) -> *mut GstEncodingTarget;
+ pub fn gst_encoding_target_add_profile(target: *mut GstEncodingTarget, profile: *mut GstEncodingProfile) -> gboolean;
+ pub fn gst_encoding_target_get_category(target: *mut GstEncodingTarget) -> *const c_char;
+ pub fn gst_encoding_target_get_description(target: *mut GstEncodingTarget) -> *const c_char;
+ pub fn gst_encoding_target_get_name(target: *mut GstEncodingTarget) -> *const c_char;
+ pub fn gst_encoding_target_get_profile(target: *mut GstEncodingTarget, name: *const c_char) -> *mut GstEncodingProfile;
+ pub fn gst_encoding_target_get_profiles(target: *mut GstEncodingTarget) -> *const glib::GList;
+ pub fn gst_encoding_target_save(target: *mut GstEncodingTarget, error: *mut *mut glib::GError) -> gboolean;
+ pub fn gst_encoding_target_save_to_file(target: *mut GstEncodingTarget, filepath: *const c_char, error: *mut *mut glib::GError) -> gboolean;
+
+ //=========================================================================
+ // GstEncodingVideoProfile
+ //=========================================================================
+ pub fn gst_encoding_video_profile_get_type() -> GType;
+ pub fn gst_encoding_video_profile_new(format: *mut gst::GstCaps, preset: *const c_char, restriction: *mut gst::GstCaps, presence: c_uint) -> *mut GstEncodingVideoProfile;
+ pub fn gst_encoding_video_profile_get_pass(prof: *mut GstEncodingVideoProfile) -> c_uint;
+ pub fn gst_encoding_video_profile_get_variableframerate(prof: *mut GstEncodingVideoProfile) -> gboolean;
+ pub fn gst_encoding_video_profile_set_pass(prof: *mut GstEncodingVideoProfile, pass: c_uint);
+ pub fn gst_encoding_video_profile_set_variableframerate(prof: *mut GstEncodingVideoProfile, variableframerate: gboolean);
+
+ //=========================================================================
+ // Other functions
+ //=========================================================================
+ pub fn gst_codec_utils_aac_caps_set_level_and_profile(caps: *mut gst::GstCaps, audio_config: *const u8, len: c_uint) -> gboolean;
+ pub fn gst_codec_utils_aac_get_channels(audio_config: *const u8, len: c_uint) -> c_uint;
+ pub fn gst_codec_utils_aac_get_index_from_sample_rate(rate: c_uint) -> c_int;
+ pub fn gst_codec_utils_aac_get_level(audio_config: *const u8, len: c_uint) -> *const c_char;
+ pub fn gst_codec_utils_aac_get_profile(audio_config: *const u8, len: c_uint) -> *const c_char;
+ pub fn gst_codec_utils_aac_get_sample_rate(audio_config: *const u8, len: c_uint) -> c_uint;
+ pub fn gst_codec_utils_aac_get_sample_rate_from_index(sr_idx: c_uint) -> c_uint;
+ pub fn gst_codec_utils_h264_caps_set_level_and_profile(caps: *mut gst::GstCaps, sps: *const u8, len: c_uint) -> gboolean;
+ pub fn gst_codec_utils_h264_get_level(sps: *const u8, len: c_uint) -> *const c_char;
+ pub fn gst_codec_utils_h264_get_level_idc(level: *const c_char) -> u8;
+ pub fn gst_codec_utils_h264_get_profile(sps: *const u8, len: c_uint) -> *const c_char;
+ pub fn gst_codec_utils_h265_caps_set_level_tier_and_profile(caps: *mut gst::GstCaps, profile_tier_level: *const u8, len: c_uint) -> gboolean;
+ pub fn gst_codec_utils_h265_get_level(profile_tier_level: *const u8, len: c_uint) -> *const c_char;
+ pub fn gst_codec_utils_h265_get_level_idc(level: *const c_char) -> u8;
+ pub fn gst_codec_utils_h265_get_profile(profile_tier_level: *const u8, len: c_uint) -> *const c_char;
+ pub fn gst_codec_utils_h265_get_tier(profile_tier_level: *const u8, len: c_uint) -> *const c_char;
+ pub fn gst_codec_utils_mpeg4video_caps_set_level_and_profile(caps: *mut gst::GstCaps, vis_obj_seq: *const u8, len: c_uint) -> gboolean;
+ pub fn gst_codec_utils_mpeg4video_get_level(vis_obj_seq: *const u8, len: c_uint) -> *const c_char;
+ pub fn gst_codec_utils_mpeg4video_get_profile(vis_obj_seq: *const u8, len: c_uint) -> *const c_char;
+ #[cfg(feature = "v1_8")]
+ pub fn gst_codec_utils_opus_create_caps(rate: u32, channels: u8, channel_mapping_family: u8, stream_count: u8, coupled_count: u8, channel_mapping: *const u8) -> *mut gst::GstCaps;
+ #[cfg(feature = "v1_8")]
+ pub fn gst_codec_utils_opus_create_caps_from_header(header: *mut gst::GstBuffer, comments: *mut gst::GstBuffer) -> *mut gst::GstCaps;
+ #[cfg(feature = "v1_8")]
+ pub fn gst_codec_utils_opus_create_header(rate: u32, channels: u8, channel_mapping_family: u8, stream_count: u8, coupled_count: u8, channel_mapping: *const u8, pre_skip: u16, output_gain: i16) -> *mut gst::GstBuffer;
+ #[cfg(feature = "v1_8")]
+ pub fn gst_codec_utils_opus_parse_caps(caps: *mut gst::GstCaps, rate: *mut u32, channels: *mut u8, channel_mapping_family: *mut u8, stream_count: *mut u8, coupled_count: *mut u8, channel_mapping: u8) -> gboolean;
+ #[cfg(feature = "v1_8")]
+ pub fn gst_codec_utils_opus_parse_header(header: *mut gst::GstBuffer, rate: *mut u32, channels: *mut u8, channel_mapping_family: *mut u8, stream_count: *mut u8, coupled_count: *mut u8, channel_mapping: u8, pre_skip: *mut u16, output_gain: *mut i16) -> gboolean;
+ pub fn gst_encoding_list_all_targets(categoryname: *const c_char) -> *mut glib::GList;
+ pub fn gst_encoding_list_available_categories() -> *mut glib::GList;
+ pub fn gst_install_plugins_async(details: *mut *mut c_char, ctx: *mut GstInstallPluginsContext, func: GstInstallPluginsResultFunc, user_data: gpointer) -> GstInstallPluginsReturn;
+ pub fn gst_install_plugins_installation_in_progress() -> gboolean;
+ pub fn gst_install_plugins_supported() -> gboolean;
+ pub fn gst_install_plugins_sync(details: *mut *mut c_char, ctx: *mut GstInstallPluginsContext) -> GstInstallPluginsReturn;
+ pub fn gst_is_missing_plugin_message(msg: *mut gst::GstMessage) -> gboolean;
+ pub fn gst_missing_decoder_installer_detail_new(decode_caps: *const gst::GstCaps) -> *mut c_char;
+ pub fn gst_missing_decoder_message_new(element: *mut gst::GstElement, decode_caps: *const gst::GstCaps) -> *mut gst::GstMessage;
+ pub fn gst_missing_element_installer_detail_new(factory_name: *const c_char) -> *mut c_char;
+ pub fn gst_missing_element_message_new(element: *mut gst::GstElement, factory_name: *const c_char) -> *mut gst::GstMessage;
+ pub fn gst_missing_encoder_installer_detail_new(encode_caps: *const gst::GstCaps) -> *mut c_char;
+ pub fn gst_missing_encoder_message_new(element: *mut gst::GstElement, encode_caps: *const gst::GstCaps) -> *mut gst::GstMessage;
+ pub fn gst_missing_plugin_message_get_description(msg: *mut gst::GstMessage) -> *mut c_char;
+ pub fn gst_missing_plugin_message_get_installer_detail(msg: *mut gst::GstMessage) -> *mut c_char;
+ pub fn gst_missing_uri_sink_installer_detail_new(protocol: *const c_char) -> *mut c_char;
+ pub fn gst_missing_uri_sink_message_new(element: *mut gst::GstElement, protocol: *const c_char) -> *mut gst::GstMessage;
+ pub fn gst_missing_uri_source_installer_detail_new(protocol: *const c_char) -> *mut c_char;
+ pub fn gst_missing_uri_source_message_new(element: *mut gst::GstElement, protocol: *const c_char) -> *mut gst::GstMessage;
+ pub fn gst_pb_utils_add_codec_description_to_tag_list(taglist: *mut gst::GstTagList, codec_tag: *const c_char, caps: *const gst::GstCaps) -> gboolean;
+ pub fn gst_pb_utils_get_codec_description(caps: *const gst::GstCaps) -> *mut c_char;
+ pub fn gst_pb_utils_get_decoder_description(caps: *const gst::GstCaps) -> *mut c_char;
+ pub fn gst_pb_utils_get_element_description(factory_name: *const c_char) -> *mut c_char;
+ pub fn gst_pb_utils_get_encoder_description(caps: *const gst::GstCaps) -> *mut c_char;
+ pub fn gst_pb_utils_get_sink_description(protocol: *const c_char) -> *mut c_char;
+ pub fn gst_pb_utils_get_source_description(protocol: *const c_char) -> *mut c_char;
+ pub fn gst_pb_utils_init();
+ pub fn gst_plugins_base_version(major: *mut c_uint, minor: *mut c_uint, micro: *mut c_uint, nano: *mut c_uint);
+ pub fn gst_plugins_base_version_string() -> *mut c_char;
+
+}
diff --git a/gstreamer-sys/src/lib.rs b/gstreamer-sys/src/lib.rs
index c4a99bbc7..9dfabd12c 100644
--- a/gstreamer-sys/src/lib.rs
+++ b/gstreamer-sys/src/lib.rs
@@ -1,4 +1,4 @@
-// This file was generated by gir (8e8472c) from gir-files (???)
+// This file was generated by gir (10e1d4f) from gir-files (???)
// DO NOT EDIT
#![allow(non_camel_case_types, non_upper_case_globals)]
diff --git a/gstreamer-tag-sys/Cargo.toml b/gstreamer-tag-sys/Cargo.toml
new file mode 100644
index 000000000..94a5ab184
--- /dev/null
+++ b/gstreamer-tag-sys/Cargo.toml
@@ -0,0 +1,48 @@
+[build-dependencies]
+pkg-config = "0.3.7"
+
+[dependencies]
+bitflags = "0.8"
+glib-sys = "0.3"
+gobject-sys = "0.3"
+libc = "0.2"
+
+[dependencies.gstreamer-sys]
+path = "../gstreamer-sys"
+version = "0.1.0"
+
+[dependencies.gstreamer-base-sys]
+path = "../gstreamer-base-sys"
+version = "0.1.0"
+
+[features]
+v1_0_10 = []
+v1_10 = ["v1_8"]
+v1_12 = ["v1_10"]
+v1_2 = ["v1_0_10"]
+v1_2_3 = ["v1_2"]
+v1_4 = ["v1_2_3"]
+v1_6 = ["v1_4"]
+v1_8 = ["v1_6"]
+
+[lib]
+name = "gstreamer_tag_sys"
+
+[package]
+build = "build.rs"
+links = "gsttag-1.0"
+name = "gstreamer-tag-sys"
+version = "0.1.0"
+authors = ["Sebastian Dröge "]
+description = "FFI bindings to libgsttag-1.0"
+homepage = "https://gstreamer.freedesktop.org"
+keywords = ["ffi", "gstreamer", "gnome", "multimedia"]
+repository = "https://github.com/sdroege/gstreamer-sys"
+license = "MIT"
+
+include = [
+ "src/*.rs",
+ "Cargo.toml",
+ "build.rs",
+ "LICENSE",
+]
diff --git a/gstreamer-tag-sys/LICENSE b/gstreamer-tag-sys/LICENSE
new file mode 100644
index 000000000..3d76f6e2f
--- /dev/null
+++ b/gstreamer-tag-sys/LICENSE
@@ -0,0 +1,23 @@
+The MIT License (MIT)
+
+Copyright (c) 2017 Sebastian Dröge .
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
diff --git a/gstreamer-tag-sys/build.rs b/gstreamer-tag-sys/build.rs
new file mode 100644
index 000000000..d0a51f5c1
--- /dev/null
+++ b/gstreamer-tag-sys/build.rs
@@ -0,0 +1,62 @@
+extern crate pkg_config;
+
+use pkg_config::{Config, Error};
+use std::env;
+use std::io::prelude::*;
+use std::io;
+use std::process;
+
+fn main() {
+ if let Err(s) = find() {
+ let _ = writeln!(io::stderr(), "{}", s);
+ process::exit(1);
+ }
+}
+
+fn find() -> Result<(), Error> {
+ let package_name = "gstreamer-tag-1.0";
+ let shared_libs = ["gsttag-1.0"];
+ let version = if cfg!(feature = "v1_10") {
+ "1.10"
+ } else {
+ "1.0"
+ };
+
+ if let Ok(lib_dir) = env::var("GTK_LIB_DIR") {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ println!("cargo:rustc-link-search=native={}", lib_dir);
+ return Ok(())
+ }
+
+ let target = env::var("TARGET").unwrap();
+ let hardcode_shared_libs = target.contains("windows");
+
+ let mut config = Config::new();
+ config.atleast_version(version);
+ if hardcode_shared_libs {
+ config.cargo_metadata(false);
+ }
+ match config.probe(package_name) {
+ Ok(library) => {
+ if hardcode_shared_libs {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ for path in library.link_paths.iter() {
+ println!("cargo:rustc-link-search=native={}", path.to_str().unwrap());
+ }
+ }
+ Ok(())
+ }
+ Err(Error::EnvNoPkgConfig(_)) | Err(Error::Command { .. }) => {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ Ok(())
+ }
+ Err(err) => Err(err),
+ }
+}
+
diff --git a/gstreamer-tag-sys/src/lib.rs b/gstreamer-tag-sys/src/lib.rs
new file mode 100644
index 000000000..1e55b686a
--- /dev/null
+++ b/gstreamer-tag-sys/src/lib.rs
@@ -0,0 +1,253 @@
+// This file was generated by gir (10e1d4f) from gir-files (???)
+// DO NOT EDIT
+
+#![allow(non_camel_case_types, non_upper_case_globals)]
+
+extern crate libc;
+#[macro_use] extern crate bitflags;
+extern crate glib_sys as glib;
+extern crate gobject_sys as gobject;
+extern crate gstreamer_sys as gst;
+extern crate gstreamer_base_sys as gst_base;
+
+#[allow(unused_imports)]
+use libc::{c_int, c_char, c_uchar, c_float, c_uint, c_double,
+ c_short, c_ushort, c_long, c_ulong,
+ c_void, size_t, ssize_t, time_t, FILE};
+
+#[allow(unused_imports)]
+use glib::{gboolean, gconstpointer, gpointer, GType, Volatile};
+
+// Enums
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstTagDemuxResult {
+ BrokenTag = 0,
+ Again = 1,
+ Ok = 2,
+}
+pub const GST_TAG_DEMUX_RESULT_BROKEN_TAG: GstTagDemuxResult = GstTagDemuxResult::BrokenTag;
+pub const GST_TAG_DEMUX_RESULT_AGAIN: GstTagDemuxResult = GstTagDemuxResult::Again;
+pub const GST_TAG_DEMUX_RESULT_OK: GstTagDemuxResult = GstTagDemuxResult::Ok;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstTagImageType {
+ None = -1,
+ Undefined = 0,
+ FrontCover = 1,
+ BackCover = 2,
+ LeafletPage = 3,
+ Medium = 4,
+ LeadArtist = 5,
+ Artist = 6,
+ Conductor = 7,
+ BandOrchestra = 8,
+ Composer = 9,
+ Lyricist = 10,
+ RecordingLocation = 11,
+ DuringRecording = 12,
+ DuringPerformance = 13,
+ VideoCapture = 14,
+ Fish = 15,
+ Illustration = 16,
+ BandArtistLogo = 17,
+ PublisherStudioLogo = 18,
+}
+pub const GST_TAG_IMAGE_TYPE_NONE: GstTagImageType = GstTagImageType::None;
+pub const GST_TAG_IMAGE_TYPE_UNDEFINED: GstTagImageType = GstTagImageType::Undefined;
+pub const GST_TAG_IMAGE_TYPE_FRONT_COVER: GstTagImageType = GstTagImageType::FrontCover;
+pub const GST_TAG_IMAGE_TYPE_BACK_COVER: GstTagImageType = GstTagImageType::BackCover;
+pub const GST_TAG_IMAGE_TYPE_LEAFLET_PAGE: GstTagImageType = GstTagImageType::LeafletPage;
+pub const GST_TAG_IMAGE_TYPE_MEDIUM: GstTagImageType = GstTagImageType::Medium;
+pub const GST_TAG_IMAGE_TYPE_LEAD_ARTIST: GstTagImageType = GstTagImageType::LeadArtist;
+pub const GST_TAG_IMAGE_TYPE_ARTIST: GstTagImageType = GstTagImageType::Artist;
+pub const GST_TAG_IMAGE_TYPE_CONDUCTOR: GstTagImageType = GstTagImageType::Conductor;
+pub const GST_TAG_IMAGE_TYPE_BAND_ORCHESTRA: GstTagImageType = GstTagImageType::BandOrchestra;
+pub const GST_TAG_IMAGE_TYPE_COMPOSER: GstTagImageType = GstTagImageType::Composer;
+pub const GST_TAG_IMAGE_TYPE_LYRICIST: GstTagImageType = GstTagImageType::Lyricist;
+pub const GST_TAG_IMAGE_TYPE_RECORDING_LOCATION: GstTagImageType = GstTagImageType::RecordingLocation;
+pub const GST_TAG_IMAGE_TYPE_DURING_RECORDING: GstTagImageType = GstTagImageType::DuringRecording;
+pub const GST_TAG_IMAGE_TYPE_DURING_PERFORMANCE: GstTagImageType = GstTagImageType::DuringPerformance;
+pub const GST_TAG_IMAGE_TYPE_VIDEO_CAPTURE: GstTagImageType = GstTagImageType::VideoCapture;
+pub const GST_TAG_IMAGE_TYPE_FISH: GstTagImageType = GstTagImageType::Fish;
+pub const GST_TAG_IMAGE_TYPE_ILLUSTRATION: GstTagImageType = GstTagImageType::Illustration;
+pub const GST_TAG_IMAGE_TYPE_BAND_ARTIST_LOGO: GstTagImageType = GstTagImageType::BandArtistLogo;
+pub const GST_TAG_IMAGE_TYPE_PUBLISHER_STUDIO_LOGO: GstTagImageType = GstTagImageType::PublisherStudioLogo;
+
+// Constants
+pub const GST_TAG_CAPTURING_CONTRAST: *const c_char = b"capturing-contrast\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_DIGITAL_ZOOM_RATIO: *const c_char = b"capturing-digital-zoom-ratio\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_EXPOSURE_COMPENSATION: *const c_char = b"capturing-exposure-compensation\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_EXPOSURE_MODE: *const c_char = b"capturing-exposure-mode\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_EXPOSURE_PROGRAM: *const c_char = b"capturing-exposure-program\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_FLASH_FIRED: *const c_char = b"capturing-flash-fired\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_FLASH_MODE: *const c_char = b"capturing-flash-mode\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_FOCAL_LENGTH: *const c_char = b"capturing-focal-length\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_FOCAL_LENGTH_35_MM: *const c_char = b"capturing-focal-length-35mm\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_FOCAL_RATIO: *const c_char = b"capturing-focal-ratio\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_GAIN_ADJUSTMENT: *const c_char = b"capturing-gain-adjustment\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_ISO_SPEED: *const c_char = b"capturing-iso-speed\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_METERING_MODE: *const c_char = b"capturing-metering-mode\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_SATURATION: *const c_char = b"capturing-saturation\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_SCENE_CAPTURE_TYPE: *const c_char = b"capturing-scene-capture-type\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_SHARPNESS: *const c_char = b"capturing-sharpness\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_SHUTTER_SPEED: *const c_char = b"capturing-shutter-speed\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_SOURCE: *const c_char = b"capturing-source\0" as *const u8 as *const c_char;
+pub const GST_TAG_CAPTURING_WHITE_BALANCE: *const c_char = b"capturing-white-balance\0" as *const u8 as *const c_char;
+pub const GST_TAG_CDDA_CDDB_DISCID: *const c_char = b"discid\0" as *const u8 as *const c_char;
+pub const GST_TAG_CDDA_CDDB_DISCID_FULL: *const c_char = b"discid-full\0" as *const u8 as *const c_char;
+pub const GST_TAG_CDDA_MUSICBRAINZ_DISCID: *const c_char = b"musicbrainz-discid\0" as *const u8 as *const c_char;
+pub const GST_TAG_CDDA_MUSICBRAINZ_DISCID_FULL: *const c_char = b"musicbrainz-discid-full\0" as *const u8 as *const c_char;
+pub const GST_TAG_CMML_CLIP: *const c_char = b"cmml-clip\0" as *const u8 as *const c_char;
+pub const GST_TAG_CMML_HEAD: *const c_char = b"cmml-head\0" as *const u8 as *const c_char;
+pub const GST_TAG_CMML_STREAM: *const c_char = b"cmml-stream\0" as *const u8 as *const c_char;
+pub const GST_TAG_ID3V2_HEADER_SIZE: c_int = 10;
+pub const GST_TAG_IMAGE_HORIZONTAL_PPI: *const c_char = b"image-horizontal-ppi\0" as *const u8 as *const c_char;
+pub const GST_TAG_IMAGE_VERTICAL_PPI: *const c_char = b"image-vertical-ppi\0" as *const u8 as *const c_char;
+pub const GST_TAG_MUSICAL_KEY: *const c_char = b"musical-key\0" as *const u8 as *const c_char;
+pub const GST_TAG_MUSICBRAINZ_ALBUMARTISTID: *const c_char = b"musicbrainz-albumartistid\0" as *const u8 as *const c_char;
+pub const GST_TAG_MUSICBRAINZ_ALBUMID: *const c_char = b"musicbrainz-albumid\0" as *const u8 as *const c_char;
+pub const GST_TAG_MUSICBRAINZ_ARTISTID: *const c_char = b"musicbrainz-artistid\0" as *const u8 as *const c_char;
+pub const GST_TAG_MUSICBRAINZ_TRACKID: *const c_char = b"musicbrainz-trackid\0" as *const u8 as *const c_char;
+pub const GST_TAG_MUSICBRAINZ_TRMID: *const c_char = b"musicbrainz-trmid\0" as *const u8 as *const c_char;
+
+// Flags
+bitflags! {
+ #[repr(C)]
+ pub flags GstTagLicenseFlags: c_uint {
+ const GST_TAG_LICENSE_PERMITS_REPRODUCTION = 1,
+ const GST_TAG_LICENSE_PERMITS_DISTRIBUTION = 2,
+ const GST_TAG_LICENSE_PERMITS_DERIVATIVE_WORKS = 4,
+ const GST_TAG_LICENSE_PERMITS_SHARING = 8,
+ const GST_TAG_LICENSE_REQUIRES_NOTICE = 256,
+ const GST_TAG_LICENSE_REQUIRES_ATTRIBUTION = 512,
+ const GST_TAG_LICENSE_REQUIRES_SHARE_ALIKE = 1024,
+ const GST_TAG_LICENSE_REQUIRES_SOURCE_CODE = 2048,
+ const GST_TAG_LICENSE_REQUIRES_COPYLEFT = 4096,
+ const GST_TAG_LICENSE_REQUIRES_LESSER_COPYLEFT = 8192,
+ const GST_TAG_LICENSE_PROHIBITS_COMMERCIAL_USE = 65536,
+ const GST_TAG_LICENSE_PROHIBITS_HIGH_INCOME_NATION_USE = 131072,
+ const GST_TAG_LICENSE_CREATIVE_COMMONS_LICENSE = 16777216,
+ const GST_TAG_LICENSE_FREE_SOFTWARE_FOUNDATION_LICENSE = 33554432,
+ }
+}
+
+// Records
+#[repr(C)]
+pub struct GstTagDemuxClass {
+ pub parent_class: gst::GstElementClass,
+ pub min_start_size: c_uint,
+ pub min_end_size: c_uint,
+ pub identify_tag: Option gboolean>,
+ pub parse_tag: Option GstTagDemuxResult>,
+ pub merge_tags: Option *mut gst::GstTagList>,
+ reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstTagDemuxPrivate(c_void);
+
+#[repr(C)]
+pub struct GstTagMuxClass {
+ pub parent_class: gst::GstElementClass,
+ pub render_start_tag: Option *mut gst::GstBuffer>,
+ pub render_end_tag: Option *mut gst::GstBuffer>,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstTagMuxPrivate(c_void);
+
+#[repr(C)]
+pub struct GstTagXmpWriterInterface {
+ pub parent: gobject::GTypeInterface,
+}
+
+// Classes
+#[repr(C)]
+pub struct GstTagDemux {
+ pub element: gst::GstElement,
+ priv_: *mut GstTagDemuxPrivate,
+ reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstTagMux {
+ pub element: gst::GstElement,
+ priv_: *mut GstTagMuxPrivate,
+ _gst_reserved: [gpointer; 4],
+}
+
+// Interfaces
+#[repr(C)]
+pub struct GstTagXmpWriter(c_void);
+
+extern "C" {
+
+ //=========================================================================
+ // GstTagDemux
+ //=========================================================================
+ pub fn gst_tag_demux_get_type() -> GType;
+
+ //=========================================================================
+ // GstTagMux
+ //=========================================================================
+ pub fn gst_tag_mux_get_type() -> GType;
+
+ //=========================================================================
+ // GstTagXmpWriter
+ //=========================================================================
+ pub fn gst_tag_xmp_writer_get_type() -> GType;
+ pub fn gst_tag_xmp_writer_add_all_schemas(config: *mut GstTagXmpWriter);
+ pub fn gst_tag_xmp_writer_add_schema(config: *mut GstTagXmpWriter, schema: *const c_char);
+ pub fn gst_tag_xmp_writer_has_schema(config: *mut GstTagXmpWriter, schema: *const c_char) -> gboolean;
+ pub fn gst_tag_xmp_writer_remove_all_schemas(config: *mut GstTagXmpWriter);
+ pub fn gst_tag_xmp_writer_remove_schema(config: *mut GstTagXmpWriter, schema: *const c_char);
+ pub fn gst_tag_xmp_writer_tag_list_to_xmp_buffer(config: *mut GstTagXmpWriter, taglist: *const gst::GstTagList, read_only: gboolean) -> *mut gst::GstBuffer;
+
+ //=========================================================================
+ // Other functions
+ //=========================================================================
+ pub fn gst_tag_check_language_code(lang_code: *const c_char) -> gboolean;
+ pub fn gst_tag_freeform_string_to_utf8(data: *const c_char, size: c_int, env_vars: *mut *const c_char) -> *mut c_char;
+ pub fn gst_tag_from_id3_tag(id3_tag: *const c_char) -> *const c_char;
+ pub fn gst_tag_from_id3_user_tag(type_: *const c_char, id3_user_tag: *const c_char) -> *const c_char;
+ pub fn gst_tag_from_vorbis_tag(vorbis_tag: *const c_char) -> *const c_char;
+ pub fn gst_tag_get_id3v2_tag_size(buffer: *mut gst::GstBuffer) -> c_uint;
+ pub fn gst_tag_get_language_code_iso_639_1(lang_code: *const c_char) -> *const c_char;
+ pub fn gst_tag_get_language_code_iso_639_2B(lang_code: *const c_char) -> *const c_char;
+ pub fn gst_tag_get_language_code_iso_639_2T(lang_code: *const c_char) -> *const c_char;
+ pub fn gst_tag_get_language_codes() -> *mut *mut c_char;
+ pub fn gst_tag_get_language_name(language_code: *const c_char) -> *const c_char;
+ pub fn gst_tag_get_license_description(license_ref: *const c_char) -> *const c_char;
+ pub fn gst_tag_get_license_flags(license_ref: *const c_char) -> GstTagLicenseFlags;
+ pub fn gst_tag_get_license_jurisdiction(license_ref: *const c_char) -> *const c_char;
+ pub fn gst_tag_get_license_nick(license_ref: *const c_char) -> *const c_char;
+ pub fn gst_tag_get_license_title(license_ref: *const c_char) -> *const c_char;
+ pub fn gst_tag_get_license_version(license_ref: *const c_char) -> *const c_char;
+ pub fn gst_tag_get_licenses() -> *mut *mut c_char;
+ pub fn gst_tag_id3_genre_count() -> c_uint;
+ pub fn gst_tag_id3_genre_get(id: c_uint) -> *const c_char;
+ pub fn gst_tag_image_data_to_image_sample(image_data: *const u8, image_data_len: c_uint, image_type: GstTagImageType) -> *mut gst::GstSample;
+ pub fn gst_tag_list_add_id3_image(tag_list: *mut gst::GstTagList, image_data: *const u8, image_data_len: c_uint, id3_picture_type: c_uint) -> gboolean;
+ pub fn gst_tag_list_from_exif_buffer(buffer: *mut gst::GstBuffer, byte_order: c_int, base_offset: u32) -> *mut gst::GstTagList;
+ pub fn gst_tag_list_from_exif_buffer_with_tiff_header(buffer: *mut gst::GstBuffer) -> *mut gst::GstTagList;
+ pub fn gst_tag_list_from_id3v2_tag(buffer: *mut gst::GstBuffer) -> *mut gst::GstTagList;
+ pub fn gst_tag_list_from_vorbiscomment(data: *const u8, size: size_t, id_data: *const u8, id_data_length: c_uint, vendor_string: *mut *mut c_char) -> *mut gst::GstTagList;
+ pub fn gst_tag_list_from_vorbiscomment_buffer(buffer: *mut gst::GstBuffer, id_data: *const u8, id_data_length: c_uint, vendor_string: *mut *mut c_char) -> *mut gst::GstTagList;
+ pub fn gst_tag_list_from_xmp_buffer(buffer: *mut gst::GstBuffer) -> *mut gst::GstTagList;
+ pub fn gst_tag_list_new_from_id3v1(data: *const u8) -> *mut gst::GstTagList;
+ pub fn gst_tag_list_to_exif_buffer(taglist: *const gst::GstTagList, byte_order: c_int, base_offset: u32) -> *mut gst::GstBuffer;
+ pub fn gst_tag_list_to_exif_buffer_with_tiff_header(taglist: *const gst::GstTagList) -> *mut gst::GstBuffer;
+ pub fn gst_tag_list_to_vorbiscomment_buffer(list: *const gst::GstTagList, id_data: *const u8, id_data_length: c_uint, vendor_string: *const c_char) -> *mut gst::GstBuffer;
+ pub fn gst_tag_list_to_xmp_buffer(list: *const gst::GstTagList, read_only: gboolean, schemas: *mut *const c_char) -> *mut gst::GstBuffer;
+ pub fn gst_tag_parse_extended_comment(ext_comment: *const c_char, key: *mut *mut c_char, lang: *mut *mut c_char, value: *mut *mut c_char, fail_if_no_key: gboolean) -> gboolean;
+ pub fn gst_tag_register_musicbrainz_tags();
+ pub fn gst_tag_to_id3_tag(gst_tag: *const c_char) -> *const c_char;
+ pub fn gst_tag_to_vorbis_comments(list: *const gst::GstTagList, tag: *const c_char) -> *mut glib::GList;
+ pub fn gst_tag_to_vorbis_tag(gst_tag: *const c_char) -> *const c_char;
+ pub fn gst_tag_xmp_list_schemas() -> *mut *const c_char;
+ pub fn gst_vorbis_tag_add(list: *mut gst::GstTagList, tag: *const c_char, value: *const c_char);
+
+}
diff --git a/gstreamer-video-sys/Cargo.toml b/gstreamer-video-sys/Cargo.toml
new file mode 100644
index 000000000..6eefe8c5f
--- /dev/null
+++ b/gstreamer-video-sys/Cargo.toml
@@ -0,0 +1,48 @@
+[build-dependencies]
+pkg-config = "0.3.7"
+
+[dependencies]
+bitflags = "0.8"
+glib-sys = "0.3"
+gobject-sys = "0.3"
+libc = "0.2"
+
+[dependencies.gstreamer-sys]
+path = "../gstreamer-sys"
+version = "0.1.0"
+
+[dependencies.gstreamer-base-sys]
+path = "../gstreamer-base-sys"
+version = "0.1.0"
+
+[features]
+v1_0_10 = []
+v1_10 = ["v1_8"]
+v1_12 = ["v1_10"]
+v1_2 = ["v1_0_10"]
+v1_2_3 = ["v1_2"]
+v1_4 = ["v1_2_3"]
+v1_6 = ["v1_4"]
+v1_8 = ["v1_6"]
+
+[lib]
+name = "gstreamer_video_sys"
+
+[package]
+build = "build.rs"
+links = "gstvideo-1.0"
+name = "gstreamer-video-sys"
+version = "0.1.0"
+authors = ["Sebastian Dröge "]
+description = "FFI bindings to libgstvideo-1.0"
+homepage = "https://gstreamer.freedesktop.org"
+keywords = ["ffi", "gstreamer", "gnome", "multimedia"]
+repository = "https://github.com/sdroege/gstreamer-sys"
+license = "MIT"
+
+include = [
+ "src/*.rs",
+ "Cargo.toml",
+ "build.rs",
+ "LICENSE",
+]
diff --git a/gstreamer-video-sys/LICENSE b/gstreamer-video-sys/LICENSE
new file mode 100644
index 000000000..3d76f6e2f
--- /dev/null
+++ b/gstreamer-video-sys/LICENSE
@@ -0,0 +1,23 @@
+The MIT License (MIT)
+
+Copyright (c) 2017 Sebastian Dröge .
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+
diff --git a/gstreamer-video-sys/build.rs b/gstreamer-video-sys/build.rs
new file mode 100644
index 000000000..7e0280a6b
--- /dev/null
+++ b/gstreamer-video-sys/build.rs
@@ -0,0 +1,74 @@
+extern crate pkg_config;
+
+use pkg_config::{Config, Error};
+use std::env;
+use std::io::prelude::*;
+use std::io;
+use std::process;
+
+fn main() {
+ if let Err(s) = find() {
+ let _ = writeln!(io::stderr(), "{}", s);
+ process::exit(1);
+ }
+}
+
+fn find() -> Result<(), Error> {
+ let package_name = "gstreamer-video-1.0";
+ let shared_libs = ["gstvideo-1.0"];
+ let version = if cfg!(feature = "v1_12") {
+ "1.12"
+ } else if cfg!(feature = "v1_10") {
+ "1.10"
+ } else if cfg!(feature = "v1_8") {
+ "1.8"
+ } else if cfg!(feature = "v1_6") {
+ "1.6"
+ } else if cfg!(feature = "v1_4") {
+ "1.4"
+ } else if cfg!(feature = "v1_2_2") {
+ "1.2.2"
+ } else if cfg!(feature = "v1_2") {
+ "1.2"
+ } else {
+ "1.0"
+ };
+
+ if let Ok(lib_dir) = env::var("GTK_LIB_DIR") {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ println!("cargo:rustc-link-search=native={}", lib_dir);
+ return Ok(())
+ }
+
+ let target = env::var("TARGET").unwrap();
+ let hardcode_shared_libs = target.contains("windows");
+
+ let mut config = Config::new();
+ config.atleast_version(version);
+ if hardcode_shared_libs {
+ config.cargo_metadata(false);
+ }
+ match config.probe(package_name) {
+ Ok(library) => {
+ if hardcode_shared_libs {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ for path in library.link_paths.iter() {
+ println!("cargo:rustc-link-search=native={}", path.to_str().unwrap());
+ }
+ }
+ Ok(())
+ }
+ Err(Error::EnvNoPkgConfig(_)) | Err(Error::Command { .. }) => {
+ for lib_ in shared_libs.iter() {
+ println!("cargo:rustc-link-lib=dylib={}", lib_);
+ }
+ Ok(())
+ }
+ Err(err) => Err(err),
+ }
+}
+
diff --git a/gstreamer-video-sys/src/lib.rs b/gstreamer-video-sys/src/lib.rs
new file mode 100644
index 000000000..6a2834727
--- /dev/null
+++ b/gstreamer-video-sys/src/lib.rs
@@ -0,0 +1,1834 @@
+// This file was generated by gir (10e1d4f+) from gir-files (???)
+// DO NOT EDIT
+
+#![allow(non_camel_case_types, non_upper_case_globals)]
+
+extern crate libc;
+#[macro_use] extern crate bitflags;
+extern crate glib_sys as glib;
+extern crate gobject_sys as gobject;
+extern crate gstreamer_sys as gst;
+extern crate gstreamer_base_sys as gst_base;
+
+#[allow(unused_imports)]
+use libc::{c_int, c_char, c_uchar, c_float, c_uint, c_double,
+ c_short, c_ushort, c_long, c_ulong,
+ c_void, size_t, ssize_t, intptr_t, uintptr_t, time_t, FILE};
+
+#[allow(unused_imports)]
+use glib::{gboolean, gconstpointer, gpointer, GType, Volatile};
+
+// Enums
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstColorBalanceType {
+ Hardware = 0,
+ Software = 1,
+}
+pub const GST_COLOR_BALANCE_HARDWARE: GstColorBalanceType = GstColorBalanceType::Hardware;
+pub const GST_COLOR_BALANCE_SOFTWARE: GstColorBalanceType = GstColorBalanceType::Software;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstNavigationCommand {
+ Invalid = 0,
+ Menu1 = 1,
+ Menu2 = 2,
+ Menu3 = 3,
+ Menu4 = 4,
+ Menu5 = 5,
+ Menu6 = 6,
+ Menu7 = 7,
+ Left = 20,
+ Right = 21,
+ Up = 22,
+ Down = 23,
+ Activate = 24,
+ PrevAngle = 30,
+ NextAngle = 31,
+}
+pub const GST_NAVIGATION_COMMAND_INVALID: GstNavigationCommand = GstNavigationCommand::Invalid;
+pub const GST_NAVIGATION_COMMAND_MENU1: GstNavigationCommand = GstNavigationCommand::Menu1;
+pub const GST_NAVIGATION_COMMAND_MENU2: GstNavigationCommand = GstNavigationCommand::Menu2;
+pub const GST_NAVIGATION_COMMAND_MENU3: GstNavigationCommand = GstNavigationCommand::Menu3;
+pub const GST_NAVIGATION_COMMAND_MENU4: GstNavigationCommand = GstNavigationCommand::Menu4;
+pub const GST_NAVIGATION_COMMAND_MENU5: GstNavigationCommand = GstNavigationCommand::Menu5;
+pub const GST_NAVIGATION_COMMAND_MENU6: GstNavigationCommand = GstNavigationCommand::Menu6;
+pub const GST_NAVIGATION_COMMAND_MENU7: GstNavigationCommand = GstNavigationCommand::Menu7;
+pub const GST_NAVIGATION_COMMAND_LEFT: GstNavigationCommand = GstNavigationCommand::Left;
+pub const GST_NAVIGATION_COMMAND_RIGHT: GstNavigationCommand = GstNavigationCommand::Right;
+pub const GST_NAVIGATION_COMMAND_UP: GstNavigationCommand = GstNavigationCommand::Up;
+pub const GST_NAVIGATION_COMMAND_DOWN: GstNavigationCommand = GstNavigationCommand::Down;
+pub const GST_NAVIGATION_COMMAND_ACTIVATE: GstNavigationCommand = GstNavigationCommand::Activate;
+pub const GST_NAVIGATION_COMMAND_PREV_ANGLE: GstNavigationCommand = GstNavigationCommand::PrevAngle;
+pub const GST_NAVIGATION_COMMAND_NEXT_ANGLE: GstNavigationCommand = GstNavigationCommand::NextAngle;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstNavigationEventType {
+ Invalid = 0,
+ KeyPress = 1,
+ KeyRelease = 2,
+ MouseButtonPress = 3,
+ MouseButtonRelease = 4,
+ MouseMove = 5,
+ Command = 6,
+}
+pub const GST_NAVIGATION_EVENT_INVALID: GstNavigationEventType = GstNavigationEventType::Invalid;
+pub const GST_NAVIGATION_EVENT_KEY_PRESS: GstNavigationEventType = GstNavigationEventType::KeyPress;
+pub const GST_NAVIGATION_EVENT_KEY_RELEASE: GstNavigationEventType = GstNavigationEventType::KeyRelease;
+pub const GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS: GstNavigationEventType = GstNavigationEventType::MouseButtonPress;
+pub const GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE: GstNavigationEventType = GstNavigationEventType::MouseButtonRelease;
+pub const GST_NAVIGATION_EVENT_MOUSE_MOVE: GstNavigationEventType = GstNavigationEventType::MouseMove;
+pub const GST_NAVIGATION_EVENT_COMMAND: GstNavigationEventType = GstNavigationEventType::Command;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstNavigationMessageType {
+ Invalid = 0,
+ MouseOver = 1,
+ CommandsChanged = 2,
+ AnglesChanged = 3,
+ Event = 4,
+}
+pub const GST_NAVIGATION_MESSAGE_INVALID: GstNavigationMessageType = GstNavigationMessageType::Invalid;
+pub const GST_NAVIGATION_MESSAGE_MOUSE_OVER: GstNavigationMessageType = GstNavigationMessageType::MouseOver;
+pub const GST_NAVIGATION_MESSAGE_COMMANDS_CHANGED: GstNavigationMessageType = GstNavigationMessageType::CommandsChanged;
+pub const GST_NAVIGATION_MESSAGE_ANGLES_CHANGED: GstNavigationMessageType = GstNavigationMessageType::AnglesChanged;
+pub const GST_NAVIGATION_MESSAGE_EVENT: GstNavigationMessageType = GstNavigationMessageType::Event;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstNavigationQueryType {
+ Invalid = 0,
+ Commands = 1,
+ Angles = 2,
+}
+pub const GST_NAVIGATION_QUERY_INVALID: GstNavigationQueryType = GstNavigationQueryType::Invalid;
+pub const GST_NAVIGATION_QUERY_COMMANDS: GstNavigationQueryType = GstNavigationQueryType::Commands;
+pub const GST_NAVIGATION_QUERY_ANGLES: GstNavigationQueryType = GstNavigationQueryType::Angles;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoAlphaMode {
+ Copy = 0,
+ Set = 1,
+ Mult = 2,
+}
+pub const GST_VIDEO_ALPHA_MODE_COPY: GstVideoAlphaMode = GstVideoAlphaMode::Copy;
+pub const GST_VIDEO_ALPHA_MODE_SET: GstVideoAlphaMode = GstVideoAlphaMode::Set;
+pub const GST_VIDEO_ALPHA_MODE_MULT: GstVideoAlphaMode = GstVideoAlphaMode::Mult;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoChromaMethod {
+ Nearest = 0,
+ Linear = 1,
+}
+pub const GST_VIDEO_CHROMA_METHOD_NEAREST: GstVideoChromaMethod = GstVideoChromaMethod::Nearest;
+pub const GST_VIDEO_CHROMA_METHOD_LINEAR: GstVideoChromaMethod = GstVideoChromaMethod::Linear;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoChromaMode {
+ Full = 0,
+ UpsampleOnly = 1,
+ DownsampleOnly = 2,
+ None = 3,
+}
+pub const GST_VIDEO_CHROMA_MODE_FULL: GstVideoChromaMode = GstVideoChromaMode::Full;
+pub const GST_VIDEO_CHROMA_MODE_UPSAMPLE_ONLY: GstVideoChromaMode = GstVideoChromaMode::UpsampleOnly;
+pub const GST_VIDEO_CHROMA_MODE_DOWNSAMPLE_ONLY: GstVideoChromaMode = GstVideoChromaMode::DownsampleOnly;
+pub const GST_VIDEO_CHROMA_MODE_NONE: GstVideoChromaMode = GstVideoChromaMode::None;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoColorMatrix {
+ Unknown = 0,
+ Rgb = 1,
+ Fcc = 2,
+ Bt709 = 3,
+ Bt601 = 4,
+ Smpte240m = 5,
+ Bt2020 = 6,
+}
+pub const GST_VIDEO_COLOR_MATRIX_UNKNOWN: GstVideoColorMatrix = GstVideoColorMatrix::Unknown;
+pub const GST_VIDEO_COLOR_MATRIX_RGB: GstVideoColorMatrix = GstVideoColorMatrix::Rgb;
+pub const GST_VIDEO_COLOR_MATRIX_FCC: GstVideoColorMatrix = GstVideoColorMatrix::Fcc;
+pub const GST_VIDEO_COLOR_MATRIX_BT709: GstVideoColorMatrix = GstVideoColorMatrix::Bt709;
+pub const GST_VIDEO_COLOR_MATRIX_BT601: GstVideoColorMatrix = GstVideoColorMatrix::Bt601;
+pub const GST_VIDEO_COLOR_MATRIX_SMPTE240M: GstVideoColorMatrix = GstVideoColorMatrix::Smpte240m;
+pub const GST_VIDEO_COLOR_MATRIX_BT2020: GstVideoColorMatrix = GstVideoColorMatrix::Bt2020;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoColorPrimaries {
+ Unknown = 0,
+ Bt709 = 1,
+ Bt470m = 2,
+ Bt470bg = 3,
+ Smpte170m = 4,
+ Smpte240m = 5,
+ Film = 6,
+ Bt2020 = 7,
+ Adobergb = 8,
+}
+pub const GST_VIDEO_COLOR_PRIMARIES_UNKNOWN: GstVideoColorPrimaries = GstVideoColorPrimaries::Unknown;
+pub const GST_VIDEO_COLOR_PRIMARIES_BT709: GstVideoColorPrimaries = GstVideoColorPrimaries::Bt709;
+pub const GST_VIDEO_COLOR_PRIMARIES_BT470M: GstVideoColorPrimaries = GstVideoColorPrimaries::Bt470m;
+pub const GST_VIDEO_COLOR_PRIMARIES_BT470BG: GstVideoColorPrimaries = GstVideoColorPrimaries::Bt470bg;
+pub const GST_VIDEO_COLOR_PRIMARIES_SMPTE170M: GstVideoColorPrimaries = GstVideoColorPrimaries::Smpte170m;
+pub const GST_VIDEO_COLOR_PRIMARIES_SMPTE240M: GstVideoColorPrimaries = GstVideoColorPrimaries::Smpte240m;
+pub const GST_VIDEO_COLOR_PRIMARIES_FILM: GstVideoColorPrimaries = GstVideoColorPrimaries::Film;
+pub const GST_VIDEO_COLOR_PRIMARIES_BT2020: GstVideoColorPrimaries = GstVideoColorPrimaries::Bt2020;
+pub const GST_VIDEO_COLOR_PRIMARIES_ADOBERGB: GstVideoColorPrimaries = GstVideoColorPrimaries::Adobergb;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoColorRange {
+ Unknown = 0,
+ _0255 = 1,
+ _16235 = 2,
+}
+pub const GST_VIDEO_COLOR_RANGE_UNKNOWN: GstVideoColorRange = GstVideoColorRange::Unknown;
+pub const GST_VIDEO_COLOR_RANGE_0_255: GstVideoColorRange = GstVideoColorRange::_0255;
+pub const GST_VIDEO_COLOR_RANGE_16_235: GstVideoColorRange = GstVideoColorRange::_16235;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoDitherMethod {
+ None = 0,
+ Verterr = 1,
+ FloydSteinberg = 2,
+ SierraLite = 3,
+ Bayer = 4,
+}
+pub const GST_VIDEO_DITHER_NONE: GstVideoDitherMethod = GstVideoDitherMethod::None;
+pub const GST_VIDEO_DITHER_VERTERR: GstVideoDitherMethod = GstVideoDitherMethod::Verterr;
+pub const GST_VIDEO_DITHER_FLOYD_STEINBERG: GstVideoDitherMethod = GstVideoDitherMethod::FloydSteinberg;
+pub const GST_VIDEO_DITHER_SIERRA_LITE: GstVideoDitherMethod = GstVideoDitherMethod::SierraLite;
+pub const GST_VIDEO_DITHER_BAYER: GstVideoDitherMethod = GstVideoDitherMethod::Bayer;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoFieldOrder {
+ Unknown = 0,
+ TopFieldFirst = 1,
+ BottomFieldFirst = 2,
+}
+pub const GST_VIDEO_FIELD_ORDER_UNKNOWN: GstVideoFieldOrder = GstVideoFieldOrder::Unknown;
+pub const GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST: GstVideoFieldOrder = GstVideoFieldOrder::TopFieldFirst;
+pub const GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST: GstVideoFieldOrder = GstVideoFieldOrder::BottomFieldFirst;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoFormat {
+ Unknown = 0,
+ Encoded = 1,
+ I420 = 2,
+ Yv12 = 3,
+ Yuy2 = 4,
+ Uyvy = 5,
+ Ayuv = 6,
+ Rgbx = 7,
+ Bgrx = 8,
+ Xrgb = 9,
+ Xbgr = 10,
+ Rgba = 11,
+ Bgra = 12,
+ Argb = 13,
+ Abgr = 14,
+ Rgb = 15,
+ Bgr = 16,
+ Y41b = 17,
+ Y42b = 18,
+ Yvyu = 19,
+ Y444 = 20,
+ V210 = 21,
+ V216 = 22,
+ Nv12 = 23,
+ Nv21 = 24,
+ Gray8 = 25,
+ Gray16Be = 26,
+ Gray16Le = 27,
+ V308 = 28,
+ Rgb16 = 29,
+ Bgr16 = 30,
+ Rgb15 = 31,
+ Bgr15 = 32,
+ Uyvp = 33,
+ A420 = 34,
+ Rgb8p = 35,
+ Yuv9 = 36,
+ Yvu9 = 37,
+ Iyu1 = 38,
+ Argb64 = 39,
+ Ayuv64 = 40,
+ R210 = 41,
+ I42010be = 42,
+ I42010le = 43,
+ I42210be = 44,
+ I42210le = 45,
+ Y44410be = 46,
+ Y44410le = 47,
+ Gbr = 48,
+ Gbr10be = 49,
+ Gbr10le = 50,
+ Nv16 = 51,
+ Nv24 = 52,
+ Nv1264z32 = 53,
+ A42010be = 54,
+ A42010le = 55,
+ A42210be = 56,
+ A42210le = 57,
+ A44410be = 58,
+ A44410le = 59,
+ Nv61 = 60,
+ P01010be = 61,
+ P01010le = 62,
+ Iyu2 = 63,
+ Vyuy = 64,
+ Gbra = 65,
+ Gbra10be = 66,
+ Gbra10le = 67,
+ Gbr12be = 68,
+ Gbr12le = 69,
+ Gbra12be = 70,
+ Gbra12le = 71,
+ I42012be = 72,
+ I42012le = 73,
+ I42212be = 74,
+ I42212le = 75,
+ Y44412be = 76,
+ Y44412le = 77,
+}
+pub const GST_VIDEO_FORMAT_UNKNOWN: GstVideoFormat = GstVideoFormat::Unknown;
+pub const GST_VIDEO_FORMAT_ENCODED: GstVideoFormat = GstVideoFormat::Encoded;
+pub const GST_VIDEO_FORMAT_I420: GstVideoFormat = GstVideoFormat::I420;
+pub const GST_VIDEO_FORMAT_YV12: GstVideoFormat = GstVideoFormat::Yv12;
+pub const GST_VIDEO_FORMAT_YUY2: GstVideoFormat = GstVideoFormat::Yuy2;
+pub const GST_VIDEO_FORMAT_UYVY: GstVideoFormat = GstVideoFormat::Uyvy;
+pub const GST_VIDEO_FORMAT_AYUV: GstVideoFormat = GstVideoFormat::Ayuv;
+pub const GST_VIDEO_FORMAT_RGBx: GstVideoFormat = GstVideoFormat::Rgbx;
+pub const GST_VIDEO_FORMAT_BGRx: GstVideoFormat = GstVideoFormat::Bgrx;
+pub const GST_VIDEO_FORMAT_xRGB: GstVideoFormat = GstVideoFormat::Xrgb;
+pub const GST_VIDEO_FORMAT_xBGR: GstVideoFormat = GstVideoFormat::Xbgr;
+pub const GST_VIDEO_FORMAT_RGBA: GstVideoFormat = GstVideoFormat::Rgba;
+pub const GST_VIDEO_FORMAT_BGRA: GstVideoFormat = GstVideoFormat::Bgra;
+pub const GST_VIDEO_FORMAT_ARGB: GstVideoFormat = GstVideoFormat::Argb;
+pub const GST_VIDEO_FORMAT_ABGR: GstVideoFormat = GstVideoFormat::Abgr;
+pub const GST_VIDEO_FORMAT_RGB: GstVideoFormat = GstVideoFormat::Rgb;
+pub const GST_VIDEO_FORMAT_BGR: GstVideoFormat = GstVideoFormat::Bgr;
+pub const GST_VIDEO_FORMAT_Y41B: GstVideoFormat = GstVideoFormat::Y41b;
+pub const GST_VIDEO_FORMAT_Y42B: GstVideoFormat = GstVideoFormat::Y42b;
+pub const GST_VIDEO_FORMAT_YVYU: GstVideoFormat = GstVideoFormat::Yvyu;
+pub const GST_VIDEO_FORMAT_Y444: GstVideoFormat = GstVideoFormat::Y444;
+pub const GST_VIDEO_FORMAT_v210: GstVideoFormat = GstVideoFormat::V210;
+pub const GST_VIDEO_FORMAT_v216: GstVideoFormat = GstVideoFormat::V216;
+pub const GST_VIDEO_FORMAT_NV12: GstVideoFormat = GstVideoFormat::Nv12;
+pub const GST_VIDEO_FORMAT_NV21: GstVideoFormat = GstVideoFormat::Nv21;
+pub const GST_VIDEO_FORMAT_GRAY8: GstVideoFormat = GstVideoFormat::Gray8;
+pub const GST_VIDEO_FORMAT_GRAY16_BE: GstVideoFormat = GstVideoFormat::Gray16Be;
+pub const GST_VIDEO_FORMAT_GRAY16_LE: GstVideoFormat = GstVideoFormat::Gray16Le;
+pub const GST_VIDEO_FORMAT_v308: GstVideoFormat = GstVideoFormat::V308;
+pub const GST_VIDEO_FORMAT_RGB16: GstVideoFormat = GstVideoFormat::Rgb16;
+pub const GST_VIDEO_FORMAT_BGR16: GstVideoFormat = GstVideoFormat::Bgr16;
+pub const GST_VIDEO_FORMAT_RGB15: GstVideoFormat = GstVideoFormat::Rgb15;
+pub const GST_VIDEO_FORMAT_BGR15: GstVideoFormat = GstVideoFormat::Bgr15;
+pub const GST_VIDEO_FORMAT_UYVP: GstVideoFormat = GstVideoFormat::Uyvp;
+pub const GST_VIDEO_FORMAT_A420: GstVideoFormat = GstVideoFormat::A420;
+pub const GST_VIDEO_FORMAT_RGB8P: GstVideoFormat = GstVideoFormat::Rgb8p;
+pub const GST_VIDEO_FORMAT_YUV9: GstVideoFormat = GstVideoFormat::Yuv9;
+pub const GST_VIDEO_FORMAT_YVU9: GstVideoFormat = GstVideoFormat::Yvu9;
+pub const GST_VIDEO_FORMAT_IYU1: GstVideoFormat = GstVideoFormat::Iyu1;
+pub const GST_VIDEO_FORMAT_ARGB64: GstVideoFormat = GstVideoFormat::Argb64;
+pub const GST_VIDEO_FORMAT_AYUV64: GstVideoFormat = GstVideoFormat::Ayuv64;
+pub const GST_VIDEO_FORMAT_r210: GstVideoFormat = GstVideoFormat::R210;
+pub const GST_VIDEO_FORMAT_I420_10BE: GstVideoFormat = GstVideoFormat::I42010be;
+pub const GST_VIDEO_FORMAT_I420_10LE: GstVideoFormat = GstVideoFormat::I42010le;
+pub const GST_VIDEO_FORMAT_I422_10BE: GstVideoFormat = GstVideoFormat::I42210be;
+pub const GST_VIDEO_FORMAT_I422_10LE: GstVideoFormat = GstVideoFormat::I42210le;
+pub const GST_VIDEO_FORMAT_Y444_10BE: GstVideoFormat = GstVideoFormat::Y44410be;
+pub const GST_VIDEO_FORMAT_Y444_10LE: GstVideoFormat = GstVideoFormat::Y44410le;
+pub const GST_VIDEO_FORMAT_GBR: GstVideoFormat = GstVideoFormat::Gbr;
+pub const GST_VIDEO_FORMAT_GBR_10BE: GstVideoFormat = GstVideoFormat::Gbr10be;
+pub const GST_VIDEO_FORMAT_GBR_10LE: GstVideoFormat = GstVideoFormat::Gbr10le;
+pub const GST_VIDEO_FORMAT_NV16: GstVideoFormat = GstVideoFormat::Nv16;
+pub const GST_VIDEO_FORMAT_NV24: GstVideoFormat = GstVideoFormat::Nv24;
+pub const GST_VIDEO_FORMAT_NV12_64Z32: GstVideoFormat = GstVideoFormat::Nv1264z32;
+pub const GST_VIDEO_FORMAT_A420_10BE: GstVideoFormat = GstVideoFormat::A42010be;
+pub const GST_VIDEO_FORMAT_A420_10LE: GstVideoFormat = GstVideoFormat::A42010le;
+pub const GST_VIDEO_FORMAT_A422_10BE: GstVideoFormat = GstVideoFormat::A42210be;
+pub const GST_VIDEO_FORMAT_A422_10LE: GstVideoFormat = GstVideoFormat::A42210le;
+pub const GST_VIDEO_FORMAT_A444_10BE: GstVideoFormat = GstVideoFormat::A44410be;
+pub const GST_VIDEO_FORMAT_A444_10LE: GstVideoFormat = GstVideoFormat::A44410le;
+pub const GST_VIDEO_FORMAT_NV61: GstVideoFormat = GstVideoFormat::Nv61;
+pub const GST_VIDEO_FORMAT_P010_10BE: GstVideoFormat = GstVideoFormat::P01010be;
+pub const GST_VIDEO_FORMAT_P010_10LE: GstVideoFormat = GstVideoFormat::P01010le;
+pub const GST_VIDEO_FORMAT_IYU2: GstVideoFormat = GstVideoFormat::Iyu2;
+pub const GST_VIDEO_FORMAT_VYUY: GstVideoFormat = GstVideoFormat::Vyuy;
+pub const GST_VIDEO_FORMAT_GBRA: GstVideoFormat = GstVideoFormat::Gbra;
+pub const GST_VIDEO_FORMAT_GBRA_10BE: GstVideoFormat = GstVideoFormat::Gbra10be;
+pub const GST_VIDEO_FORMAT_GBRA_10LE: GstVideoFormat = GstVideoFormat::Gbra10le;
+pub const GST_VIDEO_FORMAT_GBR_12BE: GstVideoFormat = GstVideoFormat::Gbr12be;
+pub const GST_VIDEO_FORMAT_GBR_12LE: GstVideoFormat = GstVideoFormat::Gbr12le;
+pub const GST_VIDEO_FORMAT_GBRA_12BE: GstVideoFormat = GstVideoFormat::Gbra12be;
+pub const GST_VIDEO_FORMAT_GBRA_12LE: GstVideoFormat = GstVideoFormat::Gbra12le;
+pub const GST_VIDEO_FORMAT_I420_12BE: GstVideoFormat = GstVideoFormat::I42012be;
+pub const GST_VIDEO_FORMAT_I420_12LE: GstVideoFormat = GstVideoFormat::I42012le;
+pub const GST_VIDEO_FORMAT_I422_12BE: GstVideoFormat = GstVideoFormat::I42212be;
+pub const GST_VIDEO_FORMAT_I422_12LE: GstVideoFormat = GstVideoFormat::I42212le;
+pub const GST_VIDEO_FORMAT_Y444_12BE: GstVideoFormat = GstVideoFormat::Y44412be;
+pub const GST_VIDEO_FORMAT_Y444_12LE: GstVideoFormat = GstVideoFormat::Y44412le;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoGLTextureOrientation {
+ NormalYNormal = 0,
+ NormalYFlip = 1,
+ FlipYNormal = 2,
+ FlipYFlip = 3,
+}
+pub const GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_NORMAL: GstVideoGLTextureOrientation = GstVideoGLTextureOrientation::NormalYNormal;
+pub const GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_FLIP: GstVideoGLTextureOrientation = GstVideoGLTextureOrientation::NormalYFlip;
+pub const GST_VIDEO_GL_TEXTURE_ORIENTATION_X_FLIP_Y_NORMAL: GstVideoGLTextureOrientation = GstVideoGLTextureOrientation::FlipYNormal;
+pub const GST_VIDEO_GL_TEXTURE_ORIENTATION_X_FLIP_Y_FLIP: GstVideoGLTextureOrientation = GstVideoGLTextureOrientation::FlipYFlip;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoGLTextureType {
+ Luminance = 0,
+ LuminanceAlpha = 1,
+ Rgb16 = 2,
+ Rgb = 3,
+ Rgba = 4,
+ R = 5,
+ Rg = 6,
+}
+pub const GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE: GstVideoGLTextureType = GstVideoGLTextureType::Luminance;
+pub const GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE_ALPHA: GstVideoGLTextureType = GstVideoGLTextureType::LuminanceAlpha;
+pub const GST_VIDEO_GL_TEXTURE_TYPE_RGB16: GstVideoGLTextureType = GstVideoGLTextureType::Rgb16;
+pub const GST_VIDEO_GL_TEXTURE_TYPE_RGB: GstVideoGLTextureType = GstVideoGLTextureType::Rgb;
+pub const GST_VIDEO_GL_TEXTURE_TYPE_RGBA: GstVideoGLTextureType = GstVideoGLTextureType::Rgba;
+pub const GST_VIDEO_GL_TEXTURE_TYPE_R: GstVideoGLTextureType = GstVideoGLTextureType::R;
+pub const GST_VIDEO_GL_TEXTURE_TYPE_RG: GstVideoGLTextureType = GstVideoGLTextureType::Rg;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoGammaMode {
+ None = 0,
+ Remap = 1,
+}
+pub const GST_VIDEO_GAMMA_MODE_NONE: GstVideoGammaMode = GstVideoGammaMode::None;
+pub const GST_VIDEO_GAMMA_MODE_REMAP: GstVideoGammaMode = GstVideoGammaMode::Remap;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoInterlaceMode {
+ Progressive = 0,
+ Interleaved = 1,
+ Mixed = 2,
+ Fields = 3,
+}
+pub const GST_VIDEO_INTERLACE_MODE_PROGRESSIVE: GstVideoInterlaceMode = GstVideoInterlaceMode::Progressive;
+pub const GST_VIDEO_INTERLACE_MODE_INTERLEAVED: GstVideoInterlaceMode = GstVideoInterlaceMode::Interleaved;
+pub const GST_VIDEO_INTERLACE_MODE_MIXED: GstVideoInterlaceMode = GstVideoInterlaceMode::Mixed;
+pub const GST_VIDEO_INTERLACE_MODE_FIELDS: GstVideoInterlaceMode = GstVideoInterlaceMode::Fields;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoMatrixMode {
+ Full = 0,
+ InputOnly = 1,
+ OutputOnly = 2,
+ None = 3,
+}
+pub const GST_VIDEO_MATRIX_MODE_FULL: GstVideoMatrixMode = GstVideoMatrixMode::Full;
+pub const GST_VIDEO_MATRIX_MODE_INPUT_ONLY: GstVideoMatrixMode = GstVideoMatrixMode::InputOnly;
+pub const GST_VIDEO_MATRIX_MODE_OUTPUT_ONLY: GstVideoMatrixMode = GstVideoMatrixMode::OutputOnly;
+pub const GST_VIDEO_MATRIX_MODE_NONE: GstVideoMatrixMode = GstVideoMatrixMode::None;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoMultiviewFramePacking {
+ None = -1,
+ Mono = 0,
+ Left = 1,
+ Right = 2,
+ SideBySide = 3,
+ SideBySideQuincunx = 4,
+ ColumnInterleaved = 5,
+ RowInterleaved = 6,
+ TopBottom = 7,
+ Checkerboard = 8,
+}
+pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_NONE: GstVideoMultiviewFramePacking = GstVideoMultiviewFramePacking::None;
+pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_MONO: GstVideoMultiviewFramePacking = GstVideoMultiviewFramePacking::Mono;
+pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_LEFT: GstVideoMultiviewFramePacking = GstVideoMultiviewFramePacking::Left;
+pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_RIGHT: GstVideoMultiviewFramePacking = GstVideoMultiviewFramePacking::Right;
+pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_SIDE_BY_SIDE: GstVideoMultiviewFramePacking = GstVideoMultiviewFramePacking::SideBySide;
+pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_SIDE_BY_SIDE_QUINCUNX: GstVideoMultiviewFramePacking = GstVideoMultiviewFramePacking::SideBySideQuincunx;
+pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_COLUMN_INTERLEAVED: GstVideoMultiviewFramePacking = GstVideoMultiviewFramePacking::ColumnInterleaved;
+pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_ROW_INTERLEAVED: GstVideoMultiviewFramePacking = GstVideoMultiviewFramePacking::RowInterleaved;
+pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_TOP_BOTTOM: GstVideoMultiviewFramePacking = GstVideoMultiviewFramePacking::TopBottom;
+pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_CHECKERBOARD: GstVideoMultiviewFramePacking = GstVideoMultiviewFramePacking::Checkerboard;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoMultiviewMode {
+ None = -1,
+ Mono = 0,
+ Left = 1,
+ Right = 2,
+ SideBySide = 3,
+ SideBySideQuincunx = 4,
+ ColumnInterleaved = 5,
+ RowInterleaved = 6,
+ TopBottom = 7,
+ Checkerboard = 8,
+ FrameByFrame = 32,
+ MultiviewFrameByFrame = 33,
+ Separated = 34,
+}
+pub const GST_VIDEO_MULTIVIEW_MODE_NONE: GstVideoMultiviewMode = GstVideoMultiviewMode::None;
+pub const GST_VIDEO_MULTIVIEW_MODE_MONO: GstVideoMultiviewMode = GstVideoMultiviewMode::Mono;
+pub const GST_VIDEO_MULTIVIEW_MODE_LEFT: GstVideoMultiviewMode = GstVideoMultiviewMode::Left;
+pub const GST_VIDEO_MULTIVIEW_MODE_RIGHT: GstVideoMultiviewMode = GstVideoMultiviewMode::Right;
+pub const GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE: GstVideoMultiviewMode = GstVideoMultiviewMode::SideBySide;
+pub const GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX: GstVideoMultiviewMode = GstVideoMultiviewMode::SideBySideQuincunx;
+pub const GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED: GstVideoMultiviewMode = GstVideoMultiviewMode::ColumnInterleaved;
+pub const GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED: GstVideoMultiviewMode = GstVideoMultiviewMode::RowInterleaved;
+pub const GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM: GstVideoMultiviewMode = GstVideoMultiviewMode::TopBottom;
+pub const GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD: GstVideoMultiviewMode = GstVideoMultiviewMode::Checkerboard;
+pub const GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME: GstVideoMultiviewMode = GstVideoMultiviewMode::FrameByFrame;
+pub const GST_VIDEO_MULTIVIEW_MODE_MULTIVIEW_FRAME_BY_FRAME: GstVideoMultiviewMode = GstVideoMultiviewMode::MultiviewFrameByFrame;
+pub const GST_VIDEO_MULTIVIEW_MODE_SEPARATED: GstVideoMultiviewMode = GstVideoMultiviewMode::Separated;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoOrientationMethod {
+ Identity = 0,
+ _90r = 1,
+ _180 = 2,
+ _90l = 3,
+ Horiz = 4,
+ Vert = 5,
+ UlLr = 6,
+ UrLl = 7,
+ Auto = 8,
+ Custom = 9,
+}
+pub const GST_VIDEO_ORIENTATION_IDENTITY: GstVideoOrientationMethod = GstVideoOrientationMethod::Identity;
+pub const GST_VIDEO_ORIENTATION_90R: GstVideoOrientationMethod = GstVideoOrientationMethod::_90r;
+pub const GST_VIDEO_ORIENTATION_180: GstVideoOrientationMethod = GstVideoOrientationMethod::_180;
+pub const GST_VIDEO_ORIENTATION_90L: GstVideoOrientationMethod = GstVideoOrientationMethod::_90l;
+pub const GST_VIDEO_ORIENTATION_HORIZ: GstVideoOrientationMethod = GstVideoOrientationMethod::Horiz;
+pub const GST_VIDEO_ORIENTATION_VERT: GstVideoOrientationMethod = GstVideoOrientationMethod::Vert;
+pub const GST_VIDEO_ORIENTATION_UL_LR: GstVideoOrientationMethod = GstVideoOrientationMethod::UlLr;
+pub const GST_VIDEO_ORIENTATION_UR_LL: GstVideoOrientationMethod = GstVideoOrientationMethod::UrLl;
+pub const GST_VIDEO_ORIENTATION_AUTO: GstVideoOrientationMethod = GstVideoOrientationMethod::Auto;
+pub const GST_VIDEO_ORIENTATION_CUSTOM: GstVideoOrientationMethod = GstVideoOrientationMethod::Custom;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoOverlayFormatFlags {
+ None = 0,
+ PremultipliedAlpha = 1,
+ GlobalAlpha = 2,
+}
+pub const GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE: GstVideoOverlayFormatFlags = GstVideoOverlayFormatFlags::None;
+pub const GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA: GstVideoOverlayFormatFlags = GstVideoOverlayFormatFlags::PremultipliedAlpha;
+pub const GST_VIDEO_OVERLAY_FORMAT_FLAG_GLOBAL_ALPHA: GstVideoOverlayFormatFlags = GstVideoOverlayFormatFlags::GlobalAlpha;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoPrimariesMode {
+ None = 0,
+ MergeOnly = 1,
+ Fast = 2,
+}
+pub const GST_VIDEO_PRIMARIES_MODE_NONE: GstVideoPrimariesMode = GstVideoPrimariesMode::None;
+pub const GST_VIDEO_PRIMARIES_MODE_MERGE_ONLY: GstVideoPrimariesMode = GstVideoPrimariesMode::MergeOnly;
+pub const GST_VIDEO_PRIMARIES_MODE_FAST: GstVideoPrimariesMode = GstVideoPrimariesMode::Fast;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoResamplerMethod {
+ Nearest = 0,
+ Linear = 1,
+ Cubic = 2,
+ Sinc = 3,
+ Lanczos = 4,
+}
+pub const GST_VIDEO_RESAMPLER_METHOD_NEAREST: GstVideoResamplerMethod = GstVideoResamplerMethod::Nearest;
+pub const GST_VIDEO_RESAMPLER_METHOD_LINEAR: GstVideoResamplerMethod = GstVideoResamplerMethod::Linear;
+pub const GST_VIDEO_RESAMPLER_METHOD_CUBIC: GstVideoResamplerMethod = GstVideoResamplerMethod::Cubic;
+pub const GST_VIDEO_RESAMPLER_METHOD_SINC: GstVideoResamplerMethod = GstVideoResamplerMethod::Sinc;
+pub const GST_VIDEO_RESAMPLER_METHOD_LANCZOS: GstVideoResamplerMethod = GstVideoResamplerMethod::Lanczos;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoTileMode {
+ Unknown = 0,
+ Zflipz2x2 = 65536,
+}
+pub const GST_VIDEO_TILE_MODE_UNKNOWN: GstVideoTileMode = GstVideoTileMode::Unknown;
+pub const GST_VIDEO_TILE_MODE_ZFLIPZ_2X2: GstVideoTileMode = GstVideoTileMode::Zflipz2x2;
+
+pub type VideoTileType = c_int;
+pub const GST_VIDEO_TILE_TYPE_INDEXED: VideoTileType = 0;
+pub type GstVideoTileType = VideoTileType;
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+#[repr(C)]
+pub enum GstVideoTransferFunction {
+ Unknown = 0,
+ Gamma10 = 1,
+ Gamma18 = 2,
+ Gamma20 = 3,
+ Gamma22 = 4,
+ Bt709 = 5,
+ Smpte240m = 6,
+ Srgb = 7,
+ Gamma28 = 8,
+ Log100 = 9,
+ Log316 = 10,
+ Bt202012 = 11,
+ Adobergb = 12,
+}
+pub const GST_VIDEO_TRANSFER_UNKNOWN: GstVideoTransferFunction = GstVideoTransferFunction::Unknown;
+pub const GST_VIDEO_TRANSFER_GAMMA10: GstVideoTransferFunction = GstVideoTransferFunction::Gamma10;
+pub const GST_VIDEO_TRANSFER_GAMMA18: GstVideoTransferFunction = GstVideoTransferFunction::Gamma18;
+pub const GST_VIDEO_TRANSFER_GAMMA20: GstVideoTransferFunction = GstVideoTransferFunction::Gamma20;
+pub const GST_VIDEO_TRANSFER_GAMMA22: GstVideoTransferFunction = GstVideoTransferFunction::Gamma22;
+pub const GST_VIDEO_TRANSFER_BT709: GstVideoTransferFunction = GstVideoTransferFunction::Bt709;
+pub const GST_VIDEO_TRANSFER_SMPTE240M: GstVideoTransferFunction = GstVideoTransferFunction::Smpte240m;
+pub const GST_VIDEO_TRANSFER_SRGB: GstVideoTransferFunction = GstVideoTransferFunction::Srgb;
+pub const GST_VIDEO_TRANSFER_GAMMA28: GstVideoTransferFunction = GstVideoTransferFunction::Gamma28;
+pub const GST_VIDEO_TRANSFER_LOG100: GstVideoTransferFunction = GstVideoTransferFunction::Log100;
+pub const GST_VIDEO_TRANSFER_LOG316: GstVideoTransferFunction = GstVideoTransferFunction::Log316;
+pub const GST_VIDEO_TRANSFER_BT2020_12: GstVideoTransferFunction = GstVideoTransferFunction::Bt202012;
+pub const GST_VIDEO_TRANSFER_ADOBERGB: GstVideoTransferFunction = GstVideoTransferFunction::Adobergb;
+
+// Constants
+pub const GST_BUFFER_POOL_OPTION_VIDEO_AFFINE_TRANSFORMATION_META: *const c_char = b"GstBufferPoolOptionVideoAffineTransformation\0" as *const u8 as *const c_char;
+pub const GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT: *const c_char = b"GstBufferPoolOptionVideoAlignment\0" as *const u8 as *const c_char;
+pub const GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META: *const c_char = b"GstBufferPoolOptionVideoGLTextureUploadMeta\0" as *const u8 as *const c_char;
+pub const GST_BUFFER_POOL_OPTION_VIDEO_META: *const c_char = b"GstBufferPoolOptionVideoMeta\0" as *const u8 as *const c_char;
+pub const GST_CAPS_FEATURE_META_GST_VIDEO_AFFINE_TRANSFORMATION_META: *const c_char = b"meta:GstVideoAffineTransformation\0" as *const u8 as *const c_char;
+pub const GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META: *const c_char = b"meta:GstVideoGLTextureUploadMeta\0" as *const u8 as *const c_char;
+pub const GST_CAPS_FEATURE_META_GST_VIDEO_META: *const c_char = b"meta:GstVideoMeta\0" as *const u8 as *const c_char;
+pub const GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION: *const c_char = b"meta:GstVideoOverlayComposition\0" as *const u8 as *const c_char;
+pub const GST_META_TAG_VIDEO_COLORSPACE_STR: *const c_char = b"colorspace\0" as *const u8 as *const c_char;
+pub const GST_META_TAG_VIDEO_ORIENTATION_STR: *const c_char = b"orientation\0" as *const u8 as *const c_char;
+pub const GST_META_TAG_VIDEO_SIZE_STR: *const c_char = b"size\0" as *const u8 as *const c_char;
+pub const GST_META_TAG_VIDEO_STR: *const c_char = b"video\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_COLORIMETRY_BT2020: *const c_char = b"bt2020\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_COLORIMETRY_BT601: *const c_char = b"bt601\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_COLORIMETRY_BT709: *const c_char = b"bt709\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_COLORIMETRY_SMPTE240M: *const c_char = b"smpte240m\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_COLORIMETRY_SRGB: *const c_char = b"sRGB\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_COMP_A: c_int = 3;
+pub const GST_VIDEO_COMP_B: c_int = 2;
+pub const GST_VIDEO_COMP_G: c_int = 1;
+pub const GST_VIDEO_COMP_INDEX: c_int = 0;
+pub const GST_VIDEO_COMP_PALETTE: c_int = 1;
+pub const GST_VIDEO_COMP_R: c_int = 0;
+pub const GST_VIDEO_COMP_U: c_int = 1;
+pub const GST_VIDEO_COMP_V: c_int = 2;
+pub const GST_VIDEO_COMP_Y: c_int = 0;
+pub const GST_VIDEO_CONVERTER_OPT_ALPHA_MODE: *const c_char = b"GstVideoConverter.alpha-mode\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE: *const c_char = b"GstVideoConverter.alpha-value\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_BORDER_ARGB: *const c_char = b"GstVideoConverter.border-argb\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_CHROMA_MODE: *const c_char = b"GstVideoConverter.chroma-mode\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_CHROMA_RESAMPLER_METHOD: *const c_char = b"GstVideoConverter.chroma-resampler-method\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_DEST_HEIGHT: *const c_char = b"GstVideoConverter.dest-height\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_DEST_WIDTH: *const c_char = b"GstVideoConverter.dest-width\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_DEST_X: *const c_char = b"GstVideoConverter.dest-x\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_DEST_Y: *const c_char = b"GstVideoConverter.dest-y\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_DITHER_METHOD: *const c_char = b"GstVideoConverter.dither-method\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION: *const c_char = b"GstVideoConverter.dither-quantization\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_FILL_BORDER: *const c_char = b"GstVideoConverter.fill-border\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_GAMMA_MODE: *const c_char = b"GstVideoConverter.gamma-mode\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_MATRIX_MODE: *const c_char = b"GstVideoConverter.matrix-mode\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_PRIMARIES_MODE: *const c_char = b"GstVideoConverter.primaries-mode\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD: *const c_char = b"GstVideoConverter.resampler-method\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_RESAMPLER_TAPS: *const c_char = b"GstVideoConverter.resampler-taps\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_SRC_HEIGHT: *const c_char = b"GstVideoConverter.src-height\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_SRC_WIDTH: *const c_char = b"GstVideoConverter.src-width\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_SRC_X: *const c_char = b"GstVideoConverter.src-x\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_SRC_Y: *const c_char = b"GstVideoConverter.src-y\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_CONVERTER_OPT_THREADS: *const c_char = b"GstVideoConverter.threads\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_DECODER_MAX_ERRORS: c_int = 10;
+pub const GST_VIDEO_DECODER_SINK_NAME: *const c_char = b"sink\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_DECODER_SRC_NAME: *const c_char = b"src\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_ENCODER_SINK_NAME: *const c_char = b"sink\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_ENCODER_SRC_NAME: *const c_char = b"src\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_FORMATS_ALL: *const c_char = b"{ I420, YV12, YUY2, UYVY, AYUV, RGBx, BGRx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, Y41B, Y42B, YVYU, Y444, v210, v216, NV12, NV21, GRAY8, GRAY16_BE, GRAY16_LE, v308, RGB16, BGR16, RGB15, BGR15, UYVP, A420, RGB8P, YUV9, YVU9, IYU1, ARGB64, AYUV64, r210, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, NV16, NV24, NV12_64Z32, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, NV61, P010_10BE, P010_10LE, IYU2, VYUY, GBRA, GBRA_10BE, GBRA_10LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE }\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_FPS_RANGE: *const c_char = b"(fraction) [ 0, max ]\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_MAX_COMPONENTS: c_int = 4;
+pub const GST_VIDEO_MAX_PLANES: c_int = 4;
+pub const GST_VIDEO_OVERLAY_COMPOSITION_BLEND_FORMATS: *const c_char = b"{ BGRx, RGBx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, I420, YV12, AYUV, YUY2, UYVY, v308, Y41B, Y42B, Y444, NV12, NV21, A420, YUV9, YVU9, IYU1, GRAY8 }\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_RESAMPLER_OPT_CUBIC_B: *const c_char = b"GstVideoResampler.cubic-b\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_RESAMPLER_OPT_CUBIC_C: *const c_char = b"GstVideoResampler.cubic-c\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_RESAMPLER_OPT_ENVELOPE: *const c_char = b"GstVideoResampler.envelope\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_RESAMPLER_OPT_MAX_TAPS: *const c_char = b"GstVideoResampler.max-taps\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_RESAMPLER_OPT_SHARPEN: *const c_char = b"GstVideoResampler.sharpen\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_RESAMPLER_OPT_SHARPNESS: *const c_char = b"GstVideoResampler.sharpness\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_SCALER_OPT_DITHER_METHOD: *const c_char = b"GstVideoScaler.dither-method\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_SIZE_RANGE: *const c_char = b"(int) [ 1, max ]\0" as *const u8 as *const c_char;
+pub const GST_VIDEO_TILE_TYPE_MASK: c_int = 0;
+pub const GST_VIDEO_TILE_TYPE_SHIFT: c_int = 16;
+pub const GST_VIDEO_TILE_X_TILES_MASK: c_int = 0;
+pub const GST_VIDEO_TILE_Y_TILES_SHIFT: c_int = 16;
+
+// Flags
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoBufferFlags: c_uint {
+ const GST_VIDEO_BUFFER_FLAG_INTERLACED = 1048576,
+ const GST_VIDEO_BUFFER_FLAG_TFF = 2097152,
+ const GST_VIDEO_BUFFER_FLAG_RFF = 4194304,
+ const GST_VIDEO_BUFFER_FLAG_ONEFIELD = 8388608,
+ const GST_VIDEO_BUFFER_FLAG_MULTIPLE_VIEW = 16777216,
+ const GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE = 33554432,
+ const GST_VIDEO_BUFFER_FLAG_LAST = 268435456,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoChromaFlags: c_uint {
+ const GST_VIDEO_CHROMA_FLAG_NONE = 0,
+ const GST_VIDEO_CHROMA_FLAG_INTERLACED = 1,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoChromaSite: c_uint {
+ const GST_VIDEO_CHROMA_SITE_UNKNOWN = 0,
+ const GST_VIDEO_CHROMA_SITE_NONE = 1,
+ const GST_VIDEO_CHROMA_SITE_H_COSITED = 2,
+ const GST_VIDEO_CHROMA_SITE_V_COSITED = 4,
+ const GST_VIDEO_CHROMA_SITE_ALT_LINE = 8,
+ const GST_VIDEO_CHROMA_SITE_COSITED = 6,
+ const GST_VIDEO_CHROMA_SITE_JPEG = 1,
+ const GST_VIDEO_CHROMA_SITE_MPEG2 = 2,
+ const GST_VIDEO_CHROMA_SITE_DV = 14,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoCodecFrameFlags: c_uint {
+ const GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY = 1,
+ const GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT = 2,
+ const GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME = 4,
+ const GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS = 8,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoDitherFlags: c_uint {
+ const GST_VIDEO_DITHER_FLAG_NONE = 0,
+ const GST_VIDEO_DITHER_FLAG_INTERLACED = 1,
+ const GST_VIDEO_DITHER_FLAG_QUANTIZE = 2,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoFlags: c_uint {
+ const GST_VIDEO_FLAG_NONE = 0,
+ const GST_VIDEO_FLAG_VARIABLE_FPS = 1,
+ const GST_VIDEO_FLAG_PREMULTIPLIED_ALPHA = 2,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoFormatFlags: c_uint {
+ const GST_VIDEO_FORMAT_FLAG_YUV = 1,
+ const GST_VIDEO_FORMAT_FLAG_RGB = 2,
+ const GST_VIDEO_FORMAT_FLAG_GRAY = 4,
+ const GST_VIDEO_FORMAT_FLAG_ALPHA = 8,
+ const GST_VIDEO_FORMAT_FLAG_LE = 16,
+ const GST_VIDEO_FORMAT_FLAG_PALETTE = 32,
+ const GST_VIDEO_FORMAT_FLAG_COMPLEX = 64,
+ const GST_VIDEO_FORMAT_FLAG_UNPACK = 128,
+ const GST_VIDEO_FORMAT_FLAG_TILED = 256,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoFrameFlags: c_uint {
+ const GST_VIDEO_FRAME_FLAG_NONE = 0,
+ const GST_VIDEO_FRAME_FLAG_INTERLACED = 1,
+ const GST_VIDEO_FRAME_FLAG_TFF = 2,
+ const GST_VIDEO_FRAME_FLAG_RFF = 4,
+ const GST_VIDEO_FRAME_FLAG_ONEFIELD = 8,
+ const GST_VIDEO_FRAME_FLAG_MULTIPLE_VIEW = 16,
+ const GST_VIDEO_FRAME_FLAG_FIRST_IN_BUNDLE = 32,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoFrameMapFlags: c_uint {
+ const GST_VIDEO_FRAME_MAP_FLAG_NO_REF = 65536,
+ const GST_VIDEO_FRAME_MAP_FLAG_LAST = 16777216,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoMultiviewFlags: c_uint {
+ const GST_VIDEO_MULTIVIEW_FLAGS_NONE = 0,
+ const GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST = 1,
+ const GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLIPPED = 2,
+ const GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLOPPED = 4,
+ const GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLIPPED = 8,
+ const GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLOPPED = 16,
+ const GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT = 16384,
+ const GST_VIDEO_MULTIVIEW_FLAGS_MIXED_MONO = 32768,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoPackFlags: c_uint {
+ const GST_VIDEO_PACK_FLAG_NONE = 0,
+ const GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE = 1,
+ const GST_VIDEO_PACK_FLAG_INTERLACED = 2,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoResamplerFlags: c_uint {
+ const GST_VIDEO_RESAMPLER_FLAG_NONE = 0,
+ const GST_VIDEO_RESAMPLER_FLAG_HALF_TAPS = 1,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoScalerFlags: c_uint {
+ const GST_VIDEO_SCALER_FLAG_NONE = 0,
+ const GST_VIDEO_SCALER_FLAG_INTERLACED = 1,
+ }
+}
+
+bitflags! {
+ #[repr(C)]
+ pub flags GstVideoTimeCodeFlags: c_uint {
+ const GST_VIDEO_TIME_CODE_FLAGS_NONE = 0,
+ const GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME = 1,
+ const GST_VIDEO_TIME_CODE_FLAGS_INTERLACED = 2,
+ }
+}
+
+// Callbacks
+pub type GstVideoAffineTransformationGetMatrix = Option gboolean>;
+pub type GstVideoConvertSampleCallback = Option;
+pub type GstVideoFormatPack = Option;
+pub type GstVideoFormatUnpack = Option;
+pub type GstVideoGLTextureUpload = Option gboolean>;
+
+// Records
+#[repr(C)]
+pub struct GstColorBalanceChannelClass {
+ pub parent: gobject::GObjectClass,
+ pub value_changed: Option,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstColorBalanceInterface {
+ pub iface: gobject::GTypeInterface,
+ pub list_channels: Option *const glib::GList>,
+ pub set_value: Option,
+ pub get_value: Option c_int>,
+ pub get_balance_type: Option GstColorBalanceType>,
+ pub value_changed: Option,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstNavigationInterface {
+ pub iface: gobject::GTypeInterface,
+ pub send_event: Option,
+}
+
+#[repr(C)]
+pub struct GstVideoAffineTransformationMeta {
+ pub meta: gst::GstMeta,
+ pub matrix: [c_float; 16],
+}
+
+#[repr(C)]
+pub struct GstVideoAlignment {
+ pub padding_top: c_uint,
+ pub padding_bottom: c_uint,
+ pub padding_left: c_uint,
+ pub padding_right: c_uint,
+ pub stride_align: [c_uint; 4],
+}
+
+#[repr(C)]
+pub struct GstVideoBufferPoolClass {
+ pub parent_class: gst::GstBufferPoolClass,
+}
+
+#[repr(C)]
+pub struct GstVideoBufferPoolPrivate(c_void);
+
+#[repr(C)]
+pub struct GstVideoChromaResample(c_void);
+
+#[repr(C)]
+pub struct GstVideoCodecFrame {
+ ref_count: c_int,
+ flags: u32,
+ pub system_frame_number: u32,
+ pub decode_frame_number: u32,
+ pub presentation_frame_number: u32,
+ pub dts: gst::GstClockTime,
+ pub pts: gst::GstClockTime,
+ pub duration: gst::GstClockTime,
+ pub distance_from_sync: c_int,
+ pub input_buffer: *mut gst::GstBuffer,
+ pub output_buffer: *mut gst::GstBuffer,
+ pub deadline: gst::GstClockTime,
+ events: *mut glib::GList,
+ user_data: gpointer,
+ user_data_destroy_notify: glib::GDestroyNotify,
+ _truncated_record_marker: c_void,
+ //union,
+}
+
+#[repr(C)]
+pub struct GstVideoCodecState {
+ ref_count: c_int,
+ pub info: GstVideoInfo,
+ pub caps: *mut gst::GstCaps,
+ pub codec_data: *mut gst::GstBuffer,
+ pub allocation_caps: *mut gst::GstCaps,
+ padding: [c_void; 19],
+}
+
+#[repr(C)]
+pub struct GstVideoColorPrimariesInfo {
+ pub primaries: GstVideoColorPrimaries,
+ pub Wx: c_double,
+ pub Wy: c_double,
+ pub Rx: c_double,
+ pub Ry: c_double,
+ pub Gx: c_double,
+ pub Gy: c_double,
+ pub Bx: c_double,
+ pub By: c_double,
+}
+
+#[repr(C)]
+pub struct GstVideoColorimetry {
+ pub range: GstVideoColorRange,
+ pub matrix: GstVideoColorMatrix,
+ pub transfer: GstVideoTransferFunction,
+ pub primaries: GstVideoColorPrimaries,
+}
+
+#[repr(C)]
+pub struct GstVideoConverter(c_void);
+
+#[repr(C)]
+pub struct GstVideoCropMeta {
+ pub meta: gst::GstMeta,
+ pub x: c_uint,
+ pub y: c_uint,
+ pub width: c_uint,
+ pub height: c_uint,
+}
+
+#[repr(C)]
+pub struct GstVideoDecoderClass {
+ element_class: gst::GstElementClass,
+ pub open: Option gboolean>,
+ pub close: Option gboolean>,
+ pub start: Option gboolean>,
+ pub stop: Option gboolean>,
+ pub parse: Option gst::GstFlowReturn>,
+ pub set_format: Option gboolean>,
+ pub reset: Option gboolean>,
+ pub finish: Option gst::GstFlowReturn>,
+ pub handle_frame: Option gst::GstFlowReturn>,
+ pub sink_event: Option gboolean>,
+ pub src_event: Option gboolean>,
+ pub negotiate: Option gboolean>,
+ pub decide_allocation: Option gboolean>,
+ pub propose_allocation: Option gboolean>,
+ pub flush: Option gboolean>,
+ pub sink_query: Option gboolean>,
+ pub src_query: Option gboolean>,
+ pub getcaps: Option *mut gst::GstCaps>,
+ pub drain: Option gst::GstFlowReturn>,
+ pub transform_meta: Option gboolean>,
+ padding: [c_void; 14],
+}
+
+#[repr(C)]
+pub struct GstVideoDecoderPrivate(c_void);
+
+#[repr(C)]
+pub struct GstVideoDirectionInterface {
+ pub iface: gobject::GTypeInterface,
+}
+
+#[repr(C)]
+pub struct GstVideoDither(c_void);
+
+#[repr(C)]
+pub struct GstVideoEncoderClass {
+ element_class: gst::GstElementClass,
+ pub open: Option gboolean>,
+ pub close: Option gboolean>,
+ pub start: Option gboolean>,
+ pub stop: Option gboolean>,
+ pub set_format: Option gboolean>,
+ pub handle_frame: Option gst::GstFlowReturn>,
+ pub reset: Option gboolean>,
+ pub finish: Option gst::GstFlowReturn>,
+ pub pre_push: Option gst::GstFlowReturn>,
+ pub getcaps: Option *mut gst::GstCaps>,
+ pub sink_event: Option gboolean>,
+ pub src_event: Option gboolean>,
+ pub negotiate: Option gboolean>,
+ pub decide_allocation: Option gboolean>,
+ pub propose_allocation: Option gboolean>,
+ pub flush: Option gboolean>,
+ pub sink_query: Option gboolean>,
+ pub src_query: Option gboolean>,
+ pub transform_meta: Option gboolean>,
+ _gst_reserved: [gpointer; 16],
+}
+
+#[repr(C)]
+pub struct GstVideoEncoderPrivate(c_void);
+
+#[repr(C)]
+pub struct GstVideoFilterClass {
+ pub parent_class: gst_base::GstBaseTransformClass,
+ pub set_info: Option gboolean>,
+ pub transform_frame: Option gst::GstFlowReturn>,
+ pub transform_frame_ip: Option gst::GstFlowReturn>,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstVideoFormatInfo {
+ pub format: GstVideoFormat,
+ pub name: *const c_char,
+ pub description: *const c_char,
+ pub flags: GstVideoFormatFlags,
+ pub bits: c_uint,
+ pub n_components: c_uint,
+ pub shift: [c_uint; 4],
+ pub depth: [c_uint; 4],
+ pub pixel_stride: [c_int; 4],
+ pub n_planes: c_uint,
+ pub plane: [c_uint; 4],
+ pub poffset: [c_uint; 4],
+ pub w_sub: [c_uint; 4],
+ pub h_sub: [c_uint; 4],
+ pub unpack_format: GstVideoFormat,
+ pub unpack_func: GstVideoFormatUnpack,
+ pub pack_lines: c_int,
+ pub pack_func: GstVideoFormatPack,
+ pub tile_mode: GstVideoTileMode,
+ pub tile_ws: c_uint,
+ pub tile_hs: c_uint,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstVideoFrame {
+ pub info: GstVideoInfo,
+ pub flags: GstVideoFrameFlags,
+ pub buffer: *mut gst::GstBuffer,
+ pub meta: gpointer,
+ pub id: c_int,
+ pub data: [gpointer; 4],
+ pub map: [gst::GstMapInfo; 4],
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstVideoGLTextureUploadMeta {
+ pub meta: gst::GstMeta,
+ pub texture_orientation: GstVideoGLTextureOrientation,
+ pub n_textures: c_uint,
+ pub texture_type: [GstVideoGLTextureType; 4],
+ buffer: *mut gst::GstBuffer,
+ upload: GstVideoGLTextureUpload,
+ user_data: gpointer,
+ user_data_copy: gobject::GBoxedCopyFunc,
+ user_data_free: gobject::GBoxedFreeFunc,
+}
+
+#[repr(C)]
+pub struct GstVideoInfo {
+ pub finfo: *const GstVideoFormatInfo,
+ pub interlace_mode: GstVideoInterlaceMode,
+ pub flags: GstVideoFlags,
+ pub width: c_int,
+ pub height: c_int,
+ pub size: size_t,
+ pub views: c_int,
+ pub chroma_site: GstVideoChromaSite,
+ pub colorimetry: GstVideoColorimetry,
+ pub par_n: c_int,
+ pub par_d: c_int,
+ pub fps_n: c_int,
+ pub fps_d: c_int,
+ pub offset: [size_t; 4],
+ pub stride: [c_int; 4],
+ _truncated_record_marker: c_void,
+ //union,
+}
+
+#[repr(C)]
+pub struct GstVideoMeta {
+ pub meta: gst::GstMeta,
+ pub buffer: *mut gst::GstBuffer,
+ pub flags: GstVideoFrameFlags,
+ pub format: GstVideoFormat,
+ pub id: c_int,
+ pub width: c_uint,
+ pub height: c_uint,
+ pub n_planes: c_uint,
+ pub offset: [size_t; 4],
+ pub stride: [c_int; 4],
+ pub map: Option gboolean>,
+ pub unmap: Option gboolean>,
+}
+
+#[repr(C)]
+pub struct GstVideoMetaTransform {
+ pub in_info: *mut GstVideoInfo,
+ pub out_info: *mut GstVideoInfo,
+}
+
+#[repr(C)]
+pub struct GstVideoOrientationInterface {
+ pub iface: gobject::GTypeInterface,
+ pub get_hflip: Option gboolean>,
+ pub get_vflip: Option gboolean>,
+ pub get_hcenter: Option gboolean>,
+ pub get_vcenter: Option gboolean>,
+ pub set_hflip: Option gboolean>,
+ pub set_vflip: Option gboolean>,
+ pub set_hcenter: Option gboolean>,
+ pub set_vcenter: Option gboolean>,
+}
+
+#[repr(C)]
+pub struct GstVideoOverlayComposition(c_void);
+
+#[repr(C)]
+pub struct GstVideoOverlayCompositionMeta {
+ pub meta: gst::GstMeta,
+ pub overlay: *mut GstVideoOverlayComposition,
+}
+
+#[repr(C)]
+pub struct GstVideoOverlayInterface {
+ pub iface: gobject::GTypeInterface,
+ pub expose: Option,
+ pub handle_events: Option,
+ pub set_render_rectangle: Option,
+ pub set_window_handle: Option,
+}
+
+#[repr(C)]
+pub struct GstVideoOverlayRectangle(c_void);
+
+#[repr(C)]
+pub struct GstVideoRectangle {
+ pub x: c_int,
+ pub y: c_int,
+ pub w: c_int,
+ pub h: c_int,
+}
+
+#[repr(C)]
+pub struct GstVideoRegionOfInterestMeta {
+ pub meta: gst::GstMeta,
+ pub roi_type: glib::GQuark,
+ pub id: c_int,
+ pub parent_id: c_int,
+ pub x: c_uint,
+ pub y: c_uint,
+ pub w: c_uint,
+ pub h: c_uint,
+}
+
+#[repr(C)]
+pub struct GstVideoResampler {
+ pub in_size: c_int,
+ pub out_size: c_int,
+ pub max_taps: c_uint,
+ pub n_phases: c_uint,
+ pub offset: *mut u32,
+ pub phase: *mut u32,
+ pub n_taps: *mut u32,
+ pub taps: *mut c_double,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstVideoScaler(c_void);
+
+#[repr(C)]
+pub struct GstVideoSinkClass {
+ pub parent_class: gst_base::GstBaseSinkClass,
+ pub show_frame: Option gst::GstFlowReturn>,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstVideoSinkPrivate(c_void);
+
+#[repr(C)]
+pub struct GstVideoTimeCode {
+ pub config: GstVideoTimeCodeConfig,
+ pub hours: c_uint,
+ pub minutes: c_uint,
+ pub seconds: c_uint,
+ pub frames: c_uint,
+ pub field_count: c_uint,
+}
+
+#[repr(C)]
+pub struct GstVideoTimeCodeConfig {
+ pub fps_n: c_uint,
+ pub fps_d: c_uint,
+ pub flags: GstVideoTimeCodeFlags,
+ pub latest_daily_jam: *mut glib::GDateTime,
+}
+
+#[repr(C)]
+pub struct GstVideoTimeCodeInterval {
+ pub hours: c_uint,
+ pub minutes: c_uint,
+ pub seconds: c_uint,
+ pub frames: c_uint,
+}
+
+#[repr(C)]
+pub struct GstVideoTimeCodeMeta {
+ pub meta: gst::GstMeta,
+ pub tc: GstVideoTimeCode,
+}
+
+// Classes
+#[repr(C)]
+pub struct GstColorBalanceChannel {
+ pub parent: gobject::GObject,
+ pub label: *mut c_char,
+ pub min_value: c_int,
+ pub max_value: c_int,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstVideoBufferPool {
+ pub bufferpool: gst::GstBufferPool,
+ pub priv_: *mut GstVideoBufferPoolPrivate,
+}
+
+#[repr(C)]
+pub struct GstVideoDecoder {
+ element: gst::GstElement,
+ sinkpad: *mut gst::GstPad,
+ srcpad: *mut gst::GstPad,
+ stream_lock: glib::GRecMutex,
+ input_segment: gst::GstSegment,
+ output_segment: gst::GstSegment,
+ priv_: *mut GstVideoDecoderPrivate,
+ padding: [c_void; 20],
+}
+
+#[repr(C)]
+pub struct GstVideoEncoder {
+ element: gst::GstElement,
+ sinkpad: *mut gst::GstPad,
+ srcpad: *mut gst::GstPad,
+ stream_lock: glib::GRecMutex,
+ input_segment: gst::GstSegment,
+ output_segment: gst::GstSegment,
+ priv_: *mut GstVideoEncoderPrivate,
+ padding: [c_void; 20],
+}
+
+#[repr(C)]
+pub struct GstVideoFilter {
+ pub element: gst_base::GstBaseTransform,
+ pub negotiated: gboolean,
+ pub in_info: GstVideoInfo,
+ pub out_info: GstVideoInfo,
+ _gst_reserved: [gpointer; 4],
+}
+
+#[repr(C)]
+pub struct GstVideoMultiviewFlagsSet(c_void);
+
+#[repr(C)]
+pub struct GstVideoSink {
+ pub element: gst_base::GstBaseSink,
+ pub width: c_int,
+ pub height: c_int,
+ priv_: *mut GstVideoSinkPrivate,
+ _gst_reserved: [gpointer; 4],
+}
+
+// Interfaces
+#[repr(C)]
+pub struct GstColorBalance(c_void);
+#[repr(C)]
+pub struct GstNavigation(c_void);
+#[repr(C)]
+pub struct GstVideoDirection(c_void);
+#[repr(C)]
+pub struct GstVideoOrientation(c_void);
+#[repr(C)]
+pub struct GstVideoOverlay(c_void);
+
+extern "C" {
+
+ //=========================================================================
+ // GstVideoColorMatrix
+ //=========================================================================
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_color_matrix_get_Kr_Kb(matrix: GstVideoColorMatrix, Kr: *mut c_double, Kb: *mut c_double) -> gboolean;
+
+ //=========================================================================
+ // GstVideoColorPrimaries
+ //=========================================================================
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_color_primaries_get_info(primaries: GstVideoColorPrimaries) -> *const GstVideoColorPrimariesInfo;
+
+ //=========================================================================
+ // GstVideoColorRange
+ //=========================================================================
+ pub fn gst_video_color_range_offsets(range: GstVideoColorRange, info: *const GstVideoFormatInfo, offset: c_int, scale: c_int);
+
+ //=========================================================================
+ // GstVideoFieldOrder
+ //=========================================================================
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_field_order_from_string(order: *const c_char) -> GstVideoFieldOrder;
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_field_order_to_string(order: GstVideoFieldOrder) -> *const c_char;
+
+ //=========================================================================
+ // GstVideoFormat
+ //=========================================================================
+ pub fn gst_video_format_from_fourcc(fourcc: u32) -> GstVideoFormat;
+ pub fn gst_video_format_from_masks(depth: c_int, bpp: c_int, endianness: c_int, red_mask: c_uint, green_mask: c_uint, blue_mask: c_uint, alpha_mask: c_uint) -> GstVideoFormat;
+ pub fn gst_video_format_from_string(format: *const c_char) -> GstVideoFormat;
+ pub fn gst_video_format_get_info(format: GstVideoFormat) -> *const GstVideoFormatInfo;
+ #[cfg(feature = "v1_2")]
+ pub fn gst_video_format_get_palette(format: GstVideoFormat, size: *mut size_t) -> gconstpointer;
+ pub fn gst_video_format_to_fourcc(format: GstVideoFormat) -> u32;
+ pub fn gst_video_format_to_string(format: GstVideoFormat) -> *const c_char;
+
+ //=========================================================================
+ // GstVideoInterlaceMode
+ //=========================================================================
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_interlace_mode_from_string(mode: *const c_char) -> GstVideoInterlaceMode;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_interlace_mode_to_string(mode: GstVideoInterlaceMode) -> *const c_char;
+
+ //=========================================================================
+ // GstVideoMultiviewMode
+ //=========================================================================
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_multiview_mode_from_caps_string(caps_mview_mode: *const c_char) -> GstVideoMultiviewMode;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_multiview_mode_to_caps_string(mview_mode: GstVideoMultiviewMode) -> *const c_char;
+
+ //=========================================================================
+ // GstVideoAffineTransformationMeta
+ //=========================================================================
+ #[cfg(feature = "v1_8")]
+ pub fn gst_video_affine_transformation_meta_apply_matrix(meta: *mut GstVideoAffineTransformationMeta, matrix: c_float);
+ pub fn gst_video_affine_transformation_meta_get_info() -> *const gst::GstMetaInfo;
+
+ //=========================================================================
+ // GstVideoAlignment
+ //=========================================================================
+ pub fn gst_video_alignment_reset(align: *mut GstVideoAlignment);
+
+ //=========================================================================
+ // GstVideoChromaResample
+ //=========================================================================
+ pub fn gst_video_chroma_resample_free(resample: *mut GstVideoChromaResample);
+ pub fn gst_video_chroma_resample_get_info(resample: *mut GstVideoChromaResample, n_lines: *mut c_uint, offset: *mut c_int);
+ pub fn gst_video_chroma_resample_new(method: GstVideoChromaMethod, site: GstVideoChromaSite, flags: GstVideoChromaFlags, format: GstVideoFormat, h_factor: c_int, v_factor: c_int) -> *mut GstVideoChromaResample;
+
+ //=========================================================================
+ // GstVideoCodecFrame
+ //=========================================================================
+ pub fn gst_video_codec_frame_get_type() -> GType;
+ pub fn gst_video_codec_frame_get_user_data(frame: *mut GstVideoCodecFrame) -> gpointer;
+ pub fn gst_video_codec_frame_ref(frame: *mut GstVideoCodecFrame) -> *mut GstVideoCodecFrame;
+ pub fn gst_video_codec_frame_set_user_data(frame: *mut GstVideoCodecFrame, user_data: gpointer, notify: glib::GDestroyNotify);
+ pub fn gst_video_codec_frame_unref(frame: *mut GstVideoCodecFrame);
+
+ //=========================================================================
+ // GstVideoCodecState
+ //=========================================================================
+ pub fn gst_video_codec_state_get_type() -> GType;
+ pub fn gst_video_codec_state_ref(state: *mut GstVideoCodecState) -> *mut GstVideoCodecState;
+ pub fn gst_video_codec_state_unref(state: *mut GstVideoCodecState);
+
+ //=========================================================================
+ // GstVideoColorimetry
+ //=========================================================================
+ pub fn gst_video_colorimetry_from_string(cinfo: *mut GstVideoColorimetry, color: *const c_char) -> gboolean;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_colorimetry_is_equal(cinfo: *const GstVideoColorimetry, other: *const GstVideoColorimetry) -> gboolean;
+ pub fn gst_video_colorimetry_matches(cinfo: *const GstVideoColorimetry, color: *const c_char) -> gboolean;
+ pub fn gst_video_colorimetry_to_string(cinfo: *const GstVideoColorimetry) -> *mut c_char;
+
+ //=========================================================================
+ // GstVideoConverter
+ //=========================================================================
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_converter_frame(convert: *mut GstVideoConverter, src: *const GstVideoFrame, dest: *mut GstVideoFrame);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_converter_free(convert: *mut GstVideoConverter);
+ pub fn gst_video_converter_get_config(convert: *mut GstVideoConverter) -> *const gst::GstStructure;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_converter_set_config(convert: *mut GstVideoConverter, config: *mut gst::GstStructure) -> gboolean;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_converter_new(in_info: *mut GstVideoInfo, out_info: *mut GstVideoInfo, config: *mut gst::GstStructure) -> *mut GstVideoConverter;
+
+ //=========================================================================
+ // GstVideoCropMeta
+ //=========================================================================
+ pub fn gst_video_crop_meta_get_info() -> *const gst::GstMetaInfo;
+
+ //=========================================================================
+ // GstVideoDither
+ //=========================================================================
+ pub fn gst_video_dither_free(dither: *mut GstVideoDither);
+ pub fn gst_video_dither_line(dither: *mut GstVideoDither, line: gpointer, x: c_uint, y: c_uint, width: c_uint);
+ pub fn gst_video_dither_new(method: GstVideoDitherMethod, flags: GstVideoDitherFlags, format: GstVideoFormat, quantizer: c_uint, width: c_uint) -> *mut GstVideoDither;
+
+ //=========================================================================
+ // GstVideoFrame
+ //=========================================================================
+ pub fn gst_video_frame_copy(dest: *mut GstVideoFrame, src: *const GstVideoFrame) -> gboolean;
+ pub fn gst_video_frame_copy_plane(dest: *mut GstVideoFrame, src: *const GstVideoFrame, plane: c_uint) -> gboolean;
+ pub fn gst_video_frame_map(frame: *mut GstVideoFrame, info: *mut GstVideoInfo, buffer: *mut gst::GstBuffer, flags: gst::GstMapFlags) -> gboolean;
+ pub fn gst_video_frame_map_id(frame: *mut GstVideoFrame, info: *mut GstVideoInfo, buffer: *mut gst::GstBuffer, id: c_int, flags: gst::GstMapFlags) -> gboolean;
+ pub fn gst_video_frame_unmap(frame: *mut GstVideoFrame);
+
+ //=========================================================================
+ // GstVideoGLTextureUploadMeta
+ //=========================================================================
+ pub fn gst_video_gl_texture_upload_meta_upload(meta: *mut GstVideoGLTextureUploadMeta, texture_id: c_uint) -> gboolean;
+ pub fn gst_video_gl_texture_upload_meta_get_info() -> *const gst::GstMetaInfo;
+
+ //=========================================================================
+ // GstVideoInfo
+ //=========================================================================
+ pub fn gst_video_info_get_type() -> GType;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_info_new() -> *mut GstVideoInfo;
+ pub fn gst_video_info_align(info: *mut GstVideoInfo, align: *mut GstVideoAlignment) -> gboolean;
+ pub fn gst_video_info_convert(info: *mut GstVideoInfo, src_format: gst::GstFormat, src_value: i64, dest_format: gst::GstFormat, dest_value: *mut i64) -> gboolean;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_info_copy(info: *const GstVideoInfo) -> *mut GstVideoInfo;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_info_free(info: *mut GstVideoInfo);
+ pub fn gst_video_info_from_caps(info: *mut GstVideoInfo, caps: *const gst::GstCaps) -> gboolean;
+ pub fn gst_video_info_init(info: *mut GstVideoInfo);
+ pub fn gst_video_info_is_equal(info: *const GstVideoInfo, other: *const GstVideoInfo) -> gboolean;
+ pub fn gst_video_info_set_format(info: *mut GstVideoInfo, format: GstVideoFormat, width: c_uint, height: c_uint) -> gboolean;
+ pub fn gst_video_info_to_caps(info: *mut GstVideoInfo) -> *mut gst::GstCaps;
+
+ //=========================================================================
+ // GstVideoMeta
+ //=========================================================================
+ pub fn gst_video_meta_map(meta: *mut GstVideoMeta, plane: c_uint, info: *mut gst::GstMapInfo, data: *mut gpointer, stride: *mut c_int, flags: gst::GstMapFlags) -> gboolean;
+ pub fn gst_video_meta_unmap(meta: *mut GstVideoMeta, plane: c_uint, info: *mut gst::GstMapInfo) -> gboolean;
+ pub fn gst_video_meta_get_info() -> *const gst::GstMetaInfo;
+
+ //=========================================================================
+ // GstVideoMetaTransform
+ //=========================================================================
+ pub fn gst_video_meta_transform_scale_get_quark() -> glib::GQuark;
+
+ //=========================================================================
+ // GstVideoOverlayComposition
+ //=========================================================================
+ pub fn gst_video_overlay_composition_get_type() -> GType;
+ pub fn gst_video_overlay_composition_new(rectangle: *mut GstVideoOverlayRectangle) -> *mut GstVideoOverlayComposition;
+ pub fn gst_video_overlay_composition_add_rectangle(comp: *mut GstVideoOverlayComposition, rectangle: *mut GstVideoOverlayRectangle);
+ pub fn gst_video_overlay_composition_blend(comp: *mut GstVideoOverlayComposition, video_buf: *mut GstVideoFrame) -> gboolean;
+ pub fn gst_video_overlay_composition_copy(comp: *mut GstVideoOverlayComposition) -> *mut GstVideoOverlayComposition;
+ pub fn gst_video_overlay_composition_get_rectangle(comp: *mut GstVideoOverlayComposition, n: c_uint) -> *mut GstVideoOverlayRectangle;
+ pub fn gst_video_overlay_composition_get_seqnum(comp: *mut GstVideoOverlayComposition) -> c_uint;
+ pub fn gst_video_overlay_composition_make_writable(comp: *mut GstVideoOverlayComposition) -> *mut GstVideoOverlayComposition;
+ pub fn gst_video_overlay_composition_n_rectangles(comp: *mut GstVideoOverlayComposition) -> c_uint;
+
+ //=========================================================================
+ // GstVideoOverlayCompositionMeta
+ //=========================================================================
+ pub fn gst_video_overlay_composition_meta_get_info() -> *const gst::GstMetaInfo;
+
+ //=========================================================================
+ // GstVideoOverlayRectangle
+ //=========================================================================
+ pub fn gst_video_overlay_rectangle_get_type() -> GType;
+ pub fn gst_video_overlay_rectangle_new_raw(pixels: *mut gst::GstBuffer, render_x: c_int, render_y: c_int, render_width: c_uint, render_height: c_uint, flags: GstVideoOverlayFormatFlags) -> *mut GstVideoOverlayRectangle;
+ pub fn gst_video_overlay_rectangle_copy(rectangle: *mut GstVideoOverlayRectangle) -> *mut GstVideoOverlayRectangle;
+ pub fn gst_video_overlay_rectangle_get_flags(rectangle: *mut GstVideoOverlayRectangle) -> GstVideoOverlayFormatFlags;
+ pub fn gst_video_overlay_rectangle_get_global_alpha(rectangle: *mut GstVideoOverlayRectangle) -> c_float;
+ pub fn gst_video_overlay_rectangle_get_pixels_argb(rectangle: *mut GstVideoOverlayRectangle, flags: GstVideoOverlayFormatFlags) -> *mut gst::GstBuffer;
+ pub fn gst_video_overlay_rectangle_get_pixels_ayuv(rectangle: *mut GstVideoOverlayRectangle, flags: GstVideoOverlayFormatFlags) -> *mut gst::GstBuffer;
+ pub fn gst_video_overlay_rectangle_get_pixels_raw(rectangle: *mut GstVideoOverlayRectangle, flags: GstVideoOverlayFormatFlags) -> *mut gst::GstBuffer;
+ pub fn gst_video_overlay_rectangle_get_pixels_unscaled_argb(rectangle: *mut GstVideoOverlayRectangle, flags: GstVideoOverlayFormatFlags) -> *mut gst::GstBuffer;
+ pub fn gst_video_overlay_rectangle_get_pixels_unscaled_ayuv(rectangle: *mut GstVideoOverlayRectangle, flags: GstVideoOverlayFormatFlags) -> *mut gst::GstBuffer;
+ pub fn gst_video_overlay_rectangle_get_pixels_unscaled_raw(rectangle: *mut GstVideoOverlayRectangle, flags: GstVideoOverlayFormatFlags) -> *mut gst::GstBuffer;
+ pub fn gst_video_overlay_rectangle_get_render_rectangle(rectangle: *mut GstVideoOverlayRectangle, render_x: *mut c_int, render_y: *mut c_int, render_width: *mut c_uint, render_height: *mut c_uint) -> gboolean;
+ pub fn gst_video_overlay_rectangle_get_seqnum(rectangle: *mut GstVideoOverlayRectangle) -> c_uint;
+ pub fn gst_video_overlay_rectangle_set_global_alpha(rectangle: *mut GstVideoOverlayRectangle, global_alpha: c_float);
+ pub fn gst_video_overlay_rectangle_set_render_rectangle(rectangle: *mut GstVideoOverlayRectangle, render_x: c_int, render_y: c_int, render_width: c_uint, render_height: c_uint);
+
+ //=========================================================================
+ // GstVideoRegionOfInterestMeta
+ //=========================================================================
+ pub fn gst_video_region_of_interest_meta_get_info() -> *const gst::GstMetaInfo;
+
+ //=========================================================================
+ // GstVideoResampler
+ //=========================================================================
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_resampler_clear(resampler: *mut GstVideoResampler);
+ pub fn gst_video_resampler_init(resampler: *mut GstVideoResampler, method: GstVideoResamplerMethod, flags: GstVideoResamplerFlags, n_phases: c_uint, n_taps: c_uint, shift: c_double, in_size: c_uint, out_size: c_uint, options: *mut gst::GstStructure) -> gboolean;
+
+ //=========================================================================
+ // GstVideoScaler
+ //=========================================================================
+ pub fn gst_video_scaler_2d(hscale: *mut GstVideoScaler, vscale: *mut GstVideoScaler, format: GstVideoFormat, src: gpointer, src_stride: c_int, dest: gpointer, dest_stride: c_int, x: c_uint, y: c_uint, width: c_uint, height: c_uint);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_scaler_combine_packed_YUV(y_scale: *mut GstVideoScaler, uv_scale: *mut GstVideoScaler, in_format: GstVideoFormat, out_format: GstVideoFormat) -> *mut GstVideoScaler;
+ pub fn gst_video_scaler_free(scale: *mut GstVideoScaler);
+ pub fn gst_video_scaler_get_coeff(scale: *mut GstVideoScaler, out_offset: c_uint, in_offset: *mut c_uint, n_taps: *mut c_uint) -> *const c_double;
+ pub fn gst_video_scaler_get_max_taps(scale: *mut GstVideoScaler) -> c_uint;
+ pub fn gst_video_scaler_horizontal(scale: *mut GstVideoScaler, format: GstVideoFormat, src: gpointer, dest: gpointer, dest_offset: c_uint, width: c_uint);
+ pub fn gst_video_scaler_vertical(scale: *mut GstVideoScaler, format: GstVideoFormat, src_lines: gpointer, dest: gpointer, dest_offset: c_uint, width: c_uint);
+ pub fn gst_video_scaler_new(method: GstVideoResamplerMethod, flags: GstVideoScalerFlags, n_taps: c_uint, in_size: c_uint, out_size: c_uint, options: *mut gst::GstStructure) -> *mut GstVideoScaler;
+
+ //=========================================================================
+ // GstVideoTimeCode
+ //=========================================================================
+ pub fn gst_video_time_code_get_type() -> GType;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_new(fps_n: c_uint, fps_d: c_uint, latest_daily_jam: *mut glib::GDateTime, flags: GstVideoTimeCodeFlags, hours: c_uint, minutes: c_uint, seconds: c_uint, frames: c_uint, field_count: c_uint) -> *mut GstVideoTimeCode;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_new_empty() -> *mut GstVideoTimeCode;
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_time_code_new_from_date_time(fps_n: c_uint, fps_d: c_uint, dt: *mut glib::GDateTime, flags: GstVideoTimeCodeFlags, field_count: c_uint) -> *mut GstVideoTimeCode;
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_time_code_new_from_string(tc_str: *const c_char) -> *mut GstVideoTimeCode;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_add_frames(tc: *mut GstVideoTimeCode, frames: i64);
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_time_code_add_interval(tc: *const GstVideoTimeCode, tc_inter: *const GstVideoTimeCodeInterval) -> *mut GstVideoTimeCode;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_clear(tc: *mut GstVideoTimeCode);
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_compare(tc1: *const GstVideoTimeCode, tc2: *const GstVideoTimeCode) -> c_int;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_copy(tc: *const GstVideoTimeCode) -> *mut GstVideoTimeCode;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_frames_since_daily_jam(tc: *const GstVideoTimeCode) -> u64;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_free(tc: *mut GstVideoTimeCode);
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_increment_frame(tc: *mut GstVideoTimeCode);
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_init(tc: *mut GstVideoTimeCode, fps_n: c_uint, fps_d: c_uint, latest_daily_jam: *mut glib::GDateTime, flags: GstVideoTimeCodeFlags, hours: c_uint, minutes: c_uint, seconds: c_uint, frames: c_uint, field_count: c_uint);
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_time_code_init_from_date_time(tc: *mut GstVideoTimeCode, fps_n: c_uint, fps_d: c_uint, dt: *mut glib::GDateTime, flags: GstVideoTimeCodeFlags, field_count: c_uint);
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_is_valid(tc: *const GstVideoTimeCode) -> gboolean;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_nsec_since_daily_jam(tc: *const GstVideoTimeCode) -> u64;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_to_date_time(tc: *const GstVideoTimeCode) -> *mut glib::GDateTime;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_video_time_code_to_string(tc: *const GstVideoTimeCode) -> *mut c_char;
+
+ //=========================================================================
+ // GstVideoTimeCodeInterval
+ //=========================================================================
+ pub fn gst_video_time_code_interval_get_type() -> GType;
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_time_code_interval_new(hours: c_uint, minutes: c_uint, seconds: c_uint, frames: c_uint) -> *mut GstVideoTimeCodeInterval;
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_time_code_interval_new_from_string(tc_inter_str: *const c_char) -> *mut GstVideoTimeCodeInterval;
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_time_code_interval_clear(tc: *mut GstVideoTimeCodeInterval);
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_time_code_interval_copy(tc: *const GstVideoTimeCodeInterval) -> *mut GstVideoTimeCodeInterval;
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_time_code_interval_free(tc: *mut GstVideoTimeCodeInterval);
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_time_code_interval_init(tc: *mut GstVideoTimeCodeInterval, hours: c_uint, minutes: c_uint, seconds: c_uint, frames: c_uint);
+
+ //=========================================================================
+ // GstVideoTimeCodeMeta
+ //=========================================================================
+ pub fn gst_video_time_code_meta_get_info() -> *const gst::GstMetaInfo;
+
+ //=========================================================================
+ // GstColorBalanceChannel
+ //=========================================================================
+ pub fn gst_color_balance_channel_get_type() -> GType;
+
+ //=========================================================================
+ // GstVideoBufferPool
+ //=========================================================================
+ pub fn gst_video_buffer_pool_get_type() -> GType;
+ pub fn gst_video_buffer_pool_new() -> *mut gst::GstBufferPool;
+
+ //=========================================================================
+ // GstVideoDecoder
+ //=========================================================================
+ pub fn gst_video_decoder_get_type() -> GType;
+ pub fn gst_video_decoder_add_to_frame(decoder: *mut GstVideoDecoder, n_bytes: c_int);
+ pub fn gst_video_decoder_allocate_output_buffer(decoder: *mut GstVideoDecoder) -> *mut gst::GstBuffer;
+ pub fn gst_video_decoder_allocate_output_frame(decoder: *mut GstVideoDecoder, frame: *mut GstVideoCodecFrame) -> gst::GstFlowReturn;
+ #[cfg(feature = "v1_12")]
+ pub fn gst_video_decoder_allocate_output_frame_with_params(decoder: *mut GstVideoDecoder, frame: *mut GstVideoCodecFrame, params: *mut gst::GstBufferPoolAcquireParams) -> gst::GstFlowReturn;
+ pub fn gst_video_decoder_drop_frame(dec: *mut GstVideoDecoder, frame: *mut GstVideoCodecFrame) -> gst::GstFlowReturn;
+ pub fn gst_video_decoder_finish_frame(decoder: *mut GstVideoDecoder, frame: *mut GstVideoCodecFrame) -> gst::GstFlowReturn;
+ pub fn gst_video_decoder_get_allocator(decoder: *mut GstVideoDecoder, allocator: *mut *mut gst::GstAllocator, params: *mut gst::GstAllocationParams);
+ pub fn gst_video_decoder_get_buffer_pool(decoder: *mut GstVideoDecoder) -> *mut gst::GstBufferPool;
+ pub fn gst_video_decoder_get_estimate_rate(dec: *mut GstVideoDecoder) -> c_int;
+ pub fn gst_video_decoder_get_frame(decoder: *mut GstVideoDecoder, frame_number: c_int) -> *mut GstVideoCodecFrame;
+ pub fn gst_video_decoder_get_frames(decoder: *mut GstVideoDecoder) -> *mut glib::GList;
+ pub fn gst_video_decoder_get_latency(decoder: *mut GstVideoDecoder, min_latency: *mut gst::GstClockTime, max_latency: *mut gst::GstClockTime);
+ pub fn gst_video_decoder_get_max_decode_time(decoder: *mut GstVideoDecoder, frame: *mut GstVideoCodecFrame) -> gst::GstClockTimeDiff;
+ pub fn gst_video_decoder_get_max_errors(dec: *mut GstVideoDecoder) -> c_int;
+ #[cfg(feature = "v1_4")]
+ pub fn gst_video_decoder_get_needs_format(dec: *mut GstVideoDecoder) -> gboolean;
+ pub fn gst_video_decoder_get_oldest_frame(decoder: *mut GstVideoDecoder) -> *mut GstVideoCodecFrame;
+ pub fn gst_video_decoder_get_output_state(decoder: *mut GstVideoDecoder) -> *mut GstVideoCodecState;
+ pub fn gst_video_decoder_get_packetized(decoder: *mut GstVideoDecoder) -> gboolean;
+ #[cfg(feature = "v1_4")]
+ pub fn gst_video_decoder_get_pending_frame_size(decoder: *mut GstVideoDecoder) -> size_t;
+ #[cfg(feature = "v1_0_3")]
+ pub fn gst_video_decoder_get_qos_proportion(decoder: *mut GstVideoDecoder) -> c_double;
+ pub fn gst_video_decoder_have_frame(decoder: *mut GstVideoDecoder) -> gst::GstFlowReturn;
+ pub fn gst_video_decoder_merge_tags(decoder: *mut GstVideoDecoder, tags: *const gst::GstTagList, mode: gst::GstTagMergeMode);
+ pub fn gst_video_decoder_negotiate(decoder: *mut GstVideoDecoder) -> gboolean;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_decoder_proxy_getcaps(decoder: *mut GstVideoDecoder, caps: *mut gst::GstCaps, filter: *mut gst::GstCaps) -> *mut gst::GstCaps;
+ #[cfg(feature = "v1_2_2")]
+ pub fn gst_video_decoder_release_frame(dec: *mut GstVideoDecoder, frame: *mut GstVideoCodecFrame);
+ pub fn gst_video_decoder_set_estimate_rate(dec: *mut GstVideoDecoder, enabled: gboolean);
+ pub fn gst_video_decoder_set_latency(decoder: *mut GstVideoDecoder, min_latency: gst::GstClockTime, max_latency: gst::GstClockTime);
+ pub fn gst_video_decoder_set_max_errors(dec: *mut GstVideoDecoder, num: c_int);
+ #[cfg(feature = "v1_4")]
+ pub fn gst_video_decoder_set_needs_format(dec: *mut GstVideoDecoder, enabled: gboolean);
+ pub fn gst_video_decoder_set_output_state(decoder: *mut GstVideoDecoder, fmt: GstVideoFormat, width: c_uint, height: c_uint, reference: *mut GstVideoCodecState) -> *mut GstVideoCodecState;
+ pub fn gst_video_decoder_set_packetized(decoder: *mut GstVideoDecoder, packetized: gboolean);
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_decoder_set_use_default_pad_acceptcaps(decoder: *mut GstVideoDecoder, use_: gboolean);
+
+ //=========================================================================
+ // GstVideoEncoder
+ //=========================================================================
+ pub fn gst_video_encoder_get_type() -> GType;
+ pub fn gst_video_encoder_allocate_output_buffer(encoder: *mut GstVideoEncoder, size: size_t) -> *mut gst::GstBuffer;
+ pub fn gst_video_encoder_allocate_output_frame(encoder: *mut GstVideoEncoder, frame: *mut GstVideoCodecFrame, size: size_t) -> gst::GstFlowReturn;
+ pub fn gst_video_encoder_finish_frame(encoder: *mut GstVideoEncoder, frame: *mut GstVideoCodecFrame) -> gst::GstFlowReturn;
+ pub fn gst_video_encoder_get_allocator(encoder: *mut GstVideoEncoder, allocator: *mut *mut gst::GstAllocator, params: *mut gst::GstAllocationParams);
+ pub fn gst_video_encoder_get_frame(encoder: *mut GstVideoEncoder, frame_number: c_int) -> *mut GstVideoCodecFrame;
+ pub fn gst_video_encoder_get_frames(encoder: *mut GstVideoEncoder) -> *mut glib::GList;
+ pub fn gst_video_encoder_get_latency(encoder: *mut GstVideoEncoder, min_latency: *mut gst::GstClockTime, max_latency: *mut gst::GstClockTime);
+ pub fn gst_video_encoder_get_oldest_frame(encoder: *mut GstVideoEncoder) -> *mut GstVideoCodecFrame;
+ pub fn gst_video_encoder_get_output_state(encoder: *mut GstVideoEncoder) -> *mut GstVideoCodecState;
+ pub fn gst_video_encoder_merge_tags(encoder: *mut GstVideoEncoder, tags: *const gst::GstTagList, mode: gst::GstTagMergeMode);
+ pub fn gst_video_encoder_negotiate(encoder: *mut GstVideoEncoder) -> gboolean;
+ pub fn gst_video_encoder_proxy_getcaps(enc: *mut GstVideoEncoder, caps: *mut gst::GstCaps, filter: *mut gst::GstCaps) -> *mut gst::GstCaps;
+ pub fn gst_video_encoder_set_headers(encoder: *mut GstVideoEncoder, headers: *mut glib::GList);
+ pub fn gst_video_encoder_set_latency(encoder: *mut GstVideoEncoder, min_latency: gst::GstClockTime, max_latency: gst::GstClockTime);
+ pub fn gst_video_encoder_set_min_pts(encoder: *mut GstVideoEncoder, min_pts: gst::GstClockTime);
+ pub fn gst_video_encoder_set_output_state(encoder: *mut GstVideoEncoder, caps: *mut gst::GstCaps, reference: *mut GstVideoCodecState) -> *mut GstVideoCodecState;
+
+ //=========================================================================
+ // GstVideoFilter
+ //=========================================================================
+ pub fn gst_video_filter_get_type() -> GType;
+
+ //=========================================================================
+ // GstVideoMultiviewFlagsSet
+ //=========================================================================
+ pub fn gst_video_multiview_flagset_get_type() -> GType;
+
+ //=========================================================================
+ // GstVideoSink
+ //=========================================================================
+ pub fn gst_video_sink_get_type() -> GType;
+ pub fn gst_video_sink_center_rect(src: GstVideoRectangle, dst: GstVideoRectangle, result: *mut GstVideoRectangle, scaling: gboolean);
+
+ //=========================================================================
+ // GstColorBalance
+ //=========================================================================
+ pub fn gst_color_balance_get_type() -> GType;
+ pub fn gst_color_balance_get_balance_type(balance: *mut GstColorBalance) -> GstColorBalanceType;
+ pub fn gst_color_balance_get_value(balance: *mut GstColorBalance, channel: *mut GstColorBalanceChannel) -> c_int;
+ pub fn gst_color_balance_list_channels(balance: *mut GstColorBalance) -> *const glib::GList;
+ pub fn gst_color_balance_set_value(balance: *mut GstColorBalance, channel: *mut GstColorBalanceChannel, value: c_int);
+ pub fn gst_color_balance_value_changed(balance: *mut GstColorBalance, channel: *mut GstColorBalanceChannel, value: c_int);
+
+ //=========================================================================
+ // GstNavigation
+ //=========================================================================
+ pub fn gst_navigation_get_type() -> GType;
+ pub fn gst_navigation_event_get_type(event: *mut gst::GstEvent) -> GstNavigationEventType;
+ pub fn gst_navigation_event_parse_command(event: *mut gst::GstEvent, command: *mut GstNavigationCommand) -> gboolean;
+ pub fn gst_navigation_event_parse_key_event(event: *mut gst::GstEvent, key: *mut *const c_char) -> gboolean;
+ pub fn gst_navigation_event_parse_mouse_button_event(event: *mut gst::GstEvent, button: *mut c_int, x: *mut c_double, y: *mut c_double) -> gboolean;
+ pub fn gst_navigation_event_parse_mouse_move_event(event: *mut gst::GstEvent, x: *mut c_double, y: *mut c_double) -> gboolean;
+ pub fn gst_navigation_message_get_type(message: *mut gst::GstMessage) -> GstNavigationMessageType;
+ pub fn gst_navigation_message_new_angles_changed(src: *mut gst::GstObject, cur_angle: c_uint, n_angles: c_uint) -> *mut gst::GstMessage;
+ pub fn gst_navigation_message_new_commands_changed(src: *mut gst::GstObject) -> *mut gst::GstMessage;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_navigation_message_new_event(src: *mut gst::GstObject, event: *mut gst::GstEvent) -> *mut gst::GstMessage;
+ pub fn gst_navigation_message_new_mouse_over(src: *mut gst::GstObject, active: gboolean) -> *mut gst::GstMessage;
+ pub fn gst_navigation_message_parse_angles_changed(message: *mut gst::GstMessage, cur_angle: *mut c_uint, n_angles: *mut c_uint) -> gboolean;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_navigation_message_parse_event(message: *mut gst::GstMessage, event: *mut *mut gst::GstEvent) -> gboolean;
+ pub fn gst_navigation_message_parse_mouse_over(message: *mut gst::GstMessage, active: *mut gboolean) -> gboolean;
+ pub fn gst_navigation_query_get_type(query: *mut gst::GstQuery) -> GstNavigationQueryType;
+ pub fn gst_navigation_query_new_angles() -> *mut gst::GstQuery;
+ pub fn gst_navigation_query_new_commands() -> *mut gst::GstQuery;
+ pub fn gst_navigation_query_parse_angles(query: *mut gst::GstQuery, cur_angle: *mut c_uint, n_angles: *mut c_uint) -> gboolean;
+ pub fn gst_navigation_query_parse_commands_length(query: *mut gst::GstQuery, n_cmds: *mut c_uint) -> gboolean;
+ pub fn gst_navigation_query_parse_commands_nth(query: *mut gst::GstQuery, nth: c_uint, cmd: *mut GstNavigationCommand) -> gboolean;
+ pub fn gst_navigation_query_set_angles(query: *mut gst::GstQuery, cur_angle: c_uint, n_angles: c_uint);
+ pub fn gst_navigation_query_set_commands(query: *mut gst::GstQuery, n_cmds: c_int, ...);
+ pub fn gst_navigation_query_set_commandsv(query: *mut gst::GstQuery, n_cmds: c_int, cmds: *mut GstNavigationCommand);
+ pub fn gst_navigation_send_command(navigation: *mut GstNavigation, command: GstNavigationCommand);
+ pub fn gst_navigation_send_event(navigation: *mut GstNavigation, structure: *mut gst::GstStructure);
+ pub fn gst_navigation_send_key_event(navigation: *mut GstNavigation, event: *const c_char, key: *const c_char);
+ pub fn gst_navigation_send_mouse_event(navigation: *mut GstNavigation, event: *const c_char, button: c_int, x: c_double, y: c_double);
+
+ //=========================================================================
+ // GstVideoDirection
+ //=========================================================================
+ pub fn gst_video_direction_get_type() -> GType;
+
+ //=========================================================================
+ // GstVideoOrientation
+ //=========================================================================
+ pub fn gst_video_orientation_get_type() -> GType;
+ pub fn gst_video_orientation_get_hcenter(video_orientation: *mut GstVideoOrientation, center: *mut c_int) -> gboolean;
+ pub fn gst_video_orientation_get_hflip(video_orientation: *mut GstVideoOrientation, flip: *mut gboolean) -> gboolean;
+ pub fn gst_video_orientation_get_vcenter(video_orientation: *mut GstVideoOrientation, center: *mut c_int) -> gboolean;
+ pub fn gst_video_orientation_get_vflip(video_orientation: *mut GstVideoOrientation, flip: *mut gboolean) -> gboolean;
+ pub fn gst_video_orientation_set_hcenter(video_orientation: *mut GstVideoOrientation, center: c_int) -> gboolean;
+ pub fn gst_video_orientation_set_hflip(video_orientation: *mut GstVideoOrientation, flip: gboolean) -> gboolean;
+ pub fn gst_video_orientation_set_vcenter(video_orientation: *mut GstVideoOrientation, center: c_int) -> gboolean;
+ pub fn gst_video_orientation_set_vflip(video_orientation: *mut GstVideoOrientation, flip: gboolean) -> gboolean;
+
+ //=========================================================================
+ // GstVideoOverlay
+ //=========================================================================
+ pub fn gst_video_overlay_get_type() -> GType;
+ pub fn gst_video_overlay_expose(overlay: *mut GstVideoOverlay);
+ pub fn gst_video_overlay_got_window_handle(overlay: *mut GstVideoOverlay, handle: uintptr_t);
+ pub fn gst_video_overlay_handle_events(overlay: *mut GstVideoOverlay, handle_events: gboolean);
+ pub fn gst_video_overlay_prepare_window_handle(overlay: *mut GstVideoOverlay);
+ pub fn gst_video_overlay_set_render_rectangle(overlay: *mut GstVideoOverlay, x: c_int, y: c_int, width: c_int, height: c_int) -> gboolean;
+ pub fn gst_video_overlay_set_window_handle(overlay: *mut GstVideoOverlay, handle: uintptr_t);
+
+ //=========================================================================
+ // Other functions
+ //=========================================================================
+ #[cfg(feature = "v1_8")]
+ pub fn gst_buffer_add_video_affine_transformation_meta(buffer: *mut gst::GstBuffer) -> *mut GstVideoAffineTransformationMeta;
+ pub fn gst_buffer_add_video_gl_texture_upload_meta(buffer: *mut gst::GstBuffer, texture_orientation: GstVideoGLTextureOrientation, n_textures: c_uint, texture_type: GstVideoGLTextureType, upload: GstVideoGLTextureUpload, user_data: gpointer, user_data_copy: gobject::GBoxedCopyFunc, user_data_free: gobject::GBoxedFreeFunc) -> *mut GstVideoGLTextureUploadMeta;
+ pub fn gst_buffer_add_video_meta(buffer: *mut gst::GstBuffer, flags: GstVideoFrameFlags, format: GstVideoFormat, width: c_uint, height: c_uint) -> *mut GstVideoMeta;
+ pub fn gst_buffer_add_video_meta_full(buffer: *mut gst::GstBuffer, flags: GstVideoFrameFlags, format: GstVideoFormat, width: c_uint, height: c_uint, n_planes: c_uint, offset: size_t, stride: c_int) -> *mut GstVideoMeta;
+ pub fn gst_buffer_add_video_overlay_composition_meta(buf: *mut gst::GstBuffer, comp: *mut GstVideoOverlayComposition) -> *mut GstVideoOverlayCompositionMeta;
+ pub fn gst_buffer_add_video_region_of_interest_meta(buffer: *mut gst::GstBuffer, roi_type: *const c_char, x: c_uint, y: c_uint, w: c_uint, h: c_uint) -> *mut GstVideoRegionOfInterestMeta;
+ pub fn gst_buffer_add_video_region_of_interest_meta_id(buffer: *mut gst::GstBuffer, roi_type: glib::GQuark, x: c_uint, y: c_uint, w: c_uint, h: c_uint) -> *mut GstVideoRegionOfInterestMeta;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_buffer_add_video_time_code_meta(buffer: *mut gst::GstBuffer, tc: *mut GstVideoTimeCode) -> *mut GstVideoTimeCodeMeta;
+ #[cfg(feature = "v1_10")]
+ pub fn gst_buffer_add_video_time_code_meta_full(buffer: *mut gst::GstBuffer, fps_n: c_uint, fps_d: c_uint, latest_daily_jam: *mut glib::GDateTime, flags: GstVideoTimeCodeFlags, hours: c_uint, minutes: c_uint, seconds: c_uint, frames: c_uint, field_count: c_uint) -> *mut GstVideoTimeCodeMeta;
+ pub fn gst_buffer_get_video_meta(buffer: *mut gst::GstBuffer) -> *mut GstVideoMeta;
+ pub fn gst_buffer_get_video_meta_id(buffer: *mut gst::GstBuffer, id: c_int) -> *mut GstVideoMeta;
+ pub fn gst_buffer_get_video_region_of_interest_meta_id(buffer: *mut gst::GstBuffer, id: c_int) -> *mut GstVideoRegionOfInterestMeta;
+ pub fn gst_buffer_pool_config_get_video_alignment(config: *mut gst::GstStructure, align: *mut GstVideoAlignment) -> gboolean;
+ pub fn gst_buffer_pool_config_set_video_alignment(config: *mut gst::GstStructure, align: *mut GstVideoAlignment);
+ pub fn gst_is_video_overlay_prepare_window_handle_message(msg: *mut gst::GstMessage) -> gboolean;
+ pub fn gst_video_affine_transformation_meta_api_get_type() -> GType;
+ pub fn gst_video_blend(dest: *mut GstVideoFrame, src: *mut GstVideoFrame, x: c_int, y: c_int, global_alpha: c_float) -> gboolean;
+ pub fn gst_video_blend_scale_linear_RGBA(src: *mut GstVideoInfo, src_buffer: *mut gst::GstBuffer, dest_height: c_int, dest_width: c_int, dest: *mut GstVideoInfo, dest_buffer: *mut *mut gst::GstBuffer);
+ pub fn gst_video_calculate_display_ratio(dar_n: *mut c_uint, dar_d: *mut c_uint, video_width: c_uint, video_height: c_uint, video_par_n: c_uint, video_par_d: c_uint, display_par_n: c_uint, display_par_d: c_uint) -> gboolean;
+ pub fn gst_video_chroma_from_string(s: *const c_char) -> GstVideoChromaSite;
+ pub fn gst_video_chroma_resample(resample: *mut GstVideoChromaResample, lines: gpointer, width: c_int);
+ pub fn gst_video_chroma_to_string(site: GstVideoChromaSite) -> *const c_char;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_color_transfer_decode(func: GstVideoTransferFunction, val: c_double) -> c_double;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_color_transfer_encode(func: GstVideoTransferFunction, val: c_double) -> c_double;
+ pub fn gst_video_convert_sample(sample: *mut gst::GstSample, to_caps: *const gst::GstCaps, timeout: gst::GstClockTime, error: *mut *mut glib::GError) -> *mut gst::GstSample;
+ pub fn gst_video_convert_sample_async(sample: *mut gst::GstSample, to_caps: *const gst::GstCaps, timeout: gst::GstClockTime, callback: GstVideoConvertSampleCallback, user_data: gpointer, destroy_notify: glib::GDestroyNotify);
+ pub fn gst_video_crop_meta_api_get_type() -> GType;
+ pub fn gst_video_event_is_force_key_unit(event: *mut gst::GstEvent) -> gboolean;
+ pub fn gst_video_event_new_downstream_force_key_unit(timestamp: gst::GstClockTime, stream_time: gst::GstClockTime, running_time: gst::GstClockTime, all_headers: gboolean, count: c_uint) -> *mut gst::GstEvent;
+ pub fn gst_video_event_new_still_frame(in_still: gboolean) -> *mut gst::GstEvent;
+ pub fn gst_video_event_new_upstream_force_key_unit(running_time: gst::GstClockTime, all_headers: gboolean, count: c_uint) -> *mut gst::GstEvent;
+ pub fn gst_video_event_parse_downstream_force_key_unit(event: *mut gst::GstEvent, timestamp: *mut gst::GstClockTime, stream_time: *mut gst::GstClockTime, running_time: *mut gst::GstClockTime, all_headers: *mut gboolean, count: *mut c_uint) -> gboolean;
+ pub fn gst_video_event_parse_still_frame(event: *mut gst::GstEvent, in_still: *mut gboolean) -> gboolean;
+ pub fn gst_video_event_parse_upstream_force_key_unit(event: *mut gst::GstEvent, running_time: *mut gst::GstClockTime, all_headers: *mut gboolean, count: *mut c_uint) -> gboolean;
+ pub fn gst_video_gl_texture_upload_meta_api_get_type() -> GType;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_guess_framerate(duration: gst::GstClockTime, dest_n: *mut c_int, dest_d: *mut c_int) -> gboolean;
+ pub fn gst_video_meta_api_get_type() -> GType;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_multiview_get_doubled_height_modes() -> *const gobject::GValue;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_multiview_get_doubled_size_modes() -> *const gobject::GValue;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_multiview_get_doubled_width_modes() -> *const gobject::GValue;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_multiview_get_mono_modes() -> *const gobject::GValue;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_multiview_get_unpacked_modes() -> *const gobject::GValue;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_multiview_guess_half_aspect(mv_mode: GstVideoMultiviewMode, width: c_uint, height: c_uint, par_n: c_uint, par_d: c_uint) -> gboolean;
+ #[cfg(feature = "v1_6")]
+ pub fn gst_video_multiview_video_info_change_mode(info: *mut GstVideoInfo, out_mview_mode: GstVideoMultiviewMode, out_mview_flags: GstVideoMultiviewFlags);
+ pub fn gst_video_overlay_composition_meta_api_get_type() -> GType;
+ pub fn gst_video_region_of_interest_meta_api_get_type() -> GType;
+ #[cfg(feature = "v1_4")]
+ pub fn gst_video_tile_get_index(mode: GstVideoTileMode, x: c_int, y: c_int, x_tiles: c_int, y_tiles: c_int) -> c_uint;
+ pub fn gst_video_time_code_meta_api_get_type() -> GType;
+
+}