mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-26 11:41:09 +00:00
docs: Port all docstring to gtk-doc markdown
This commit is contained in:
parent
90f766cc51
commit
78022a6e0c
257 changed files with 943 additions and 1141 deletions
|
@ -20,15 +20,15 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-assrender
|
||||
* @title: assrender
|
||||
*
|
||||
* Renders timestamped SSA/ASS subtitles on top of a video stream.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mpegaudioparse ! mpg123audiodec ! audioconvert ! autoaudiosink d. ! queue ! h264parse ! avdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink
|
||||
* ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,17 +20,17 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-bs2b
|
||||
* @title: bs2b
|
||||
*
|
||||
* Improve headphone listening of stereo audio records using the bs2b library.
|
||||
* It does so by mixing the left and right channel in a way that simulates
|
||||
* a stereo speaker setup while using headphones.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipelines</title>
|
||||
* ## Example pipelines
|
||||
* |[
|
||||
* gst-launch-1.0 audiotestsrc ! "audio/x-raw,channel-mask=(bitmask)0x1" ! interleave name=i ! bs2b ! autoaudiosink audiotestsrc freq=330 ! "audio/x-raw,channel-mask=(bitmask)0x2" ! i.
|
||||
* ]| Play two independent sine test sources and crossfeed them.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -23,18 +23,18 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-chromaprint
|
||||
* @title: chromaprint
|
||||
*
|
||||
* The chromaprint element calculates an acoustic fingerprint for an
|
||||
* audio stream which can be used to identify a song and look up
|
||||
* further metadata from the <ulink url="http://acoustid.org/">Acoustid</ulink>
|
||||
* and Musicbrainz databases.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 -m uridecodebin uri=file:///path/to/song.ogg ! audioconvert ! chromaprint ! fakesink
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -19,14 +19,14 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-curlsink
|
||||
* @title: curlsink
|
||||
* @short_description: sink that uploads data to a server using libcurl
|
||||
* @see_also:
|
||||
*
|
||||
* This is a network sink that uses libcurl as a client to upload data to
|
||||
* a server (e.g. a HTTP/FTP server).
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line (upload a JPEG file to an HTTP server)</title>
|
||||
* ## Example launch line (upload a JPEG file to an HTTP server)
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlsink \
|
||||
* file-name=image.jpg \
|
||||
|
@ -35,7 +35,7 @@
|
|||
* content-type=image/jpeg \
|
||||
* use-content-length=false
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -19,21 +19,20 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-curlfilesink
|
||||
* @title: curlfilesink
|
||||
* @short_description: sink that uploads data to a server using libcurl
|
||||
* @see_also:
|
||||
*
|
||||
* This is a network sink that uses libcurl as a client to upload data to
|
||||
* a local or network drive.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line (upload a JPEG file to /home/test/images
|
||||
* directory)</title>
|
||||
* ## Example launch line (upload a JPEG file to /home/test/images directory)
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlfilesink \
|
||||
* file-name=image.jpg \
|
||||
* location=file:///home/test/images/
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -19,21 +19,23 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-curlftpsink
|
||||
* @title: curlftpsink
|
||||
* @short_description: sink that uploads data to a server using libcurl
|
||||
* @see_also:
|
||||
*
|
||||
* This is a network sink that uses libcurl as a client to upload data to
|
||||
* an FTP server.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line (upload a JPEG file to /home/test/images
|
||||
* directory)</title>
|
||||
* ## Example launch line
|
||||
*
|
||||
* Upload a JPEG file to /home/test/images * directory)
|
||||
*
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlftpsink \
|
||||
* file-name=image.jpg \
|
||||
* location=ftp://192.168.0.1/images/
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -19,14 +19,17 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-curlhttpsink
|
||||
* @title: curlhttpsink
|
||||
* @short_description: sink that uploads data to a server using libcurl
|
||||
* @see_also:
|
||||
*
|
||||
* This is a network sink that uses libcurl as a client to upload data to
|
||||
* an HTTP server.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line (upload a JPEG file to an HTTP server)</title>
|
||||
* ## Example launch line
|
||||
*
|
||||
* Upload a JPEG file to an HTTP server.
|
||||
*
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlhttpsink \
|
||||
* file-name=image.jpg \
|
||||
|
@ -35,7 +38,6 @@
|
|||
* content-type=image/jpeg \
|
||||
* use-content-length=false
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -19,14 +19,17 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-curlsftpsink
|
||||
* @title: curlsftpsink
|
||||
* @short_description: sink that uploads data to a server using libcurl
|
||||
* @see_also:
|
||||
*
|
||||
* This is a network sink that uses libcurl as a client to upload data to
|
||||
* a SFTP (SSH File Transfer Protocol) server.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line (upload a file to /home/john/sftp_tests/)</title>
|
||||
* ## Example launch line
|
||||
*
|
||||
* Upload a file to /home/john/sftp_tests/
|
||||
*
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=/home/jdoe/some.file ! curlsftpsink \
|
||||
* file-name=some.file.backup \
|
||||
|
@ -36,8 +39,6 @@
|
|||
* ssh-priv-keyfile=/home/jdoe/.ssh/id_rsa \
|
||||
* create-dirs=TRUE
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -19,14 +19,17 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-curlsink
|
||||
* @title: curlsink
|
||||
* @short_description: sink that uploads data to a server using libcurl
|
||||
* @see_also:
|
||||
*
|
||||
* This is a network sink that uses libcurl as a client to upload data to
|
||||
* an SMTP server.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line (upload a JPEG file to an SMTP server)</title>
|
||||
* ## Example launch line
|
||||
*
|
||||
* Upload a JPEG file to an SMTP server.
|
||||
*
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlsmtpsink \
|
||||
* file-name=image.jpg \
|
||||
|
@ -38,7 +41,7 @@
|
|||
* use-ssl=TRUE \
|
||||
* insecure=TRUE
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-curlsshsink
|
||||
* @title: curlsshsink
|
||||
* @short_description: sink that uploads data to a server using libcurl
|
||||
* @see_also:
|
||||
*
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-curltlssink
|
||||
* @title: curltlssink
|
||||
* @short_description: sink that uploads data to a server using libcurl
|
||||
* @see_also:
|
||||
*
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-daaladec
|
||||
* @title: daaladec
|
||||
* @see_also: daalaenc, oggdemux
|
||||
*
|
||||
* This element decodes daala streams into raw video
|
||||
|
@ -30,13 +31,12 @@
|
|||
* video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org
|
||||
* Foundation</ulink>.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipeline</title>
|
||||
* ## Example pipeline
|
||||
* |[
|
||||
* gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! daaladec ! xvimagesink
|
||||
* ]| This example pipeline will decode an ogg stream and decodes the daala video. Refer to
|
||||
* the daalaenc example to create the ogg file.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-daalaenc
|
||||
* @title: daalaenc
|
||||
* @see_also: daaladec, oggmux
|
||||
*
|
||||
* This element encodes raw video into a Daala stream.
|
||||
|
@ -30,14 +31,13 @@
|
|||
* video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org
|
||||
* Foundation</ulink>.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipeline</title>
|
||||
* ## Example pipeline
|
||||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc num-buffers=1000 ! daalaenc ! oggmux ! filesink location=videotestsrc.ogg
|
||||
* ]| This example pipeline will encode a test video source to daala muxed in an
|
||||
* ogg container. Refer to the daaladec documentation to decode the create
|
||||
* stream.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -29,9 +29,10 @@
|
|||
*/
|
||||
/**
|
||||
* SECTION:element-dashdemux
|
||||
* @title: dashdemux
|
||||
*
|
||||
* DASH demuxer element.
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 playbin uri="http://www-itec.uni-klu.ac.at/ftp/datasets/mmsys12/RedBullPlayStreets/redbull_4s/RedBullPlayStreets_4s_isoffmain_DIS_23009_1_v_2_1c2_2011_08_30.mpd"
|
||||
* ]|
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-dc1394src
|
||||
* @title: dc1394src
|
||||
*
|
||||
* Source for IIDC (Instrumentation & Industrial Digital Camera) firewire
|
||||
* cameras. If several cameras are connected to the system, the desired one
|
||||
|
@ -31,8 +32,7 @@
|
|||
* corresponding video formats are exposed in the capabilities.
|
||||
* The Bayer pattern is left unspecified.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch lines</title>
|
||||
* ## Example launch lines
|
||||
* |[
|
||||
* gst-launch-1.0 -v dc1394src ! videoconvert ! autovideosink
|
||||
* ]| Capture and display frames from the first camera available in the system.
|
||||
|
@ -41,7 +41,7 @@
|
|||
* ! "video/x-bayer,format=gbrg,width=1280,height=960,framerate=15/2" \
|
||||
* ! bayer2rgb ! videoconvert ! autovideosink
|
||||
* ]| Capture and display frames from a specific camera in the desired format.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,20 +20,17 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-dfbvideosink
|
||||
* @title: dfbvideosink
|
||||
*
|
||||
* DfbVideoSink renders video frames using the
|
||||
* <ulink url="http://www.directfb.org/">DirectFB</ulink> library.
|
||||
* Rendering can happen in two different modes :
|
||||
* <itemizedlist>
|
||||
* <listitem>
|
||||
* <para>
|
||||
* Standalone: this mode will take complete control of the monitor forcing
|
||||
*
|
||||
* * Standalone: this mode will take complete control of the monitor forcing
|
||||
* <ulink url="http://www.directfb.org/">DirectFB</ulink> to fullscreen layout.
|
||||
* This is convenient to test using the gst-launch-1.0 command line tool or
|
||||
* other simple applications. It is possible to interrupt playback while
|
||||
* being in this mode by pressing the Escape key.
|
||||
* </para>
|
||||
* <para>
|
||||
* This mode handles navigation events for every input device supported by
|
||||
* the <ulink url="http://www.directfb.org/">DirectFB</ulink> library, it will
|
||||
* look for available video modes in the fb.modes file and try to switch
|
||||
|
@ -41,17 +38,12 @@
|
|||
* hardware acceleration capabilities the element will handle scaling or not.
|
||||
* If no acceleration is available it will do clipping or centering of the
|
||||
* video frames respecting the original aspect ratio.
|
||||
* </para>
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* <para>
|
||||
* Embedded: this mode will render video frames in a
|
||||
*
|
||||
* * Embedded: this mode will render video frames in a
|
||||
* #GstDfbVideoSink:surface provided by the
|
||||
* application developer. This is a more advanced usage of the element and
|
||||
* it is required to integrate video playback in existing
|
||||
* <ulink url="http://www.directfb.org/">DirectFB</ulink> applications.
|
||||
* </para>
|
||||
* <para>
|
||||
* When using this mode the element just renders to the
|
||||
* #GstDfbVideoSink:surface provided by the
|
||||
* application, that means it won't handle navigation events and won't resize
|
||||
|
@ -59,27 +51,21 @@
|
|||
* frames geometry. Application has to implement the necessary code to grab
|
||||
* informations about the negotiated geometry and resize there
|
||||
* #GstDfbVideoSink:surface accordingly.
|
||||
* </para>
|
||||
* </listitem>
|
||||
* </itemizedlist>
|
||||
*
|
||||
* For both modes the element implements a buffer pool allocation system to
|
||||
* optimize memory allocation time and handle reverse negotiation. Indeed if
|
||||
* you insert an element like videoscale in the pipeline the video sink will
|
||||
* negotiate with it to try get a scaled video for either the fullscreen layout
|
||||
* or the application provided external #GstDfbVideoSink:surface.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example application</title>
|
||||
* <para>
|
||||
* ## Example application
|
||||
*
|
||||
* <include xmlns="http://www.w3.org/2003/XInclude" href="element-dfb-example.xml" />
|
||||
* </para>
|
||||
* </refsect2>
|
||||
* <refsect2>
|
||||
* <title>Example pipelines</title>
|
||||
*
|
||||
* ## Example pipelines
|
||||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc ! dfbvideosink hue=20000 saturation=40000 brightness=25000
|
||||
* ]| test the colorbalance interface implementation in dfbvideosink
|
||||
* </refsect2>
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,18 +20,18 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-dtsdec
|
||||
* @title: dtsdec
|
||||
*
|
||||
* Digital Theatre System (DTS) audio decoder
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 dvdreadsrc title=1 ! mpegpsdemux ! dtsdec ! audioresample ! audioconvert ! alsasink
|
||||
* ]| Play a DTS audio track from a dvd.
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=abc.dts ! dtsdec ! audioresample ! audioconvert ! alsasink
|
||||
* ]| Decode a standalone file and play it.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,16 +20,16 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-faac
|
||||
* @title: faac
|
||||
* @see_also: faad
|
||||
*
|
||||
* faac encodes raw audio to AAC (MPEG-4 part 3) streams.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 audiotestsrc wave=sine num-buffers=100 ! audioconvert ! faac ! matroskamux ! filesink location=sine.mkv
|
||||
* ]| Encode a sine beep as aac and write to matroska container.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,19 +20,19 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-faad
|
||||
* @title: faad
|
||||
* @seealso: faac
|
||||
*
|
||||
* faad decodes AAC (MPEG-4 part 3) stream.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch lines</title>
|
||||
* ## Example launch lines
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=example.mp4 ! qtdemux ! faad ! audioconvert ! audioresample ! autoaudiosink
|
||||
* ]| Play aac from mp4 file.
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=example.adts ! faad ! audioconvert ! audioresample ! autoaudiosink
|
||||
* ]| Play standalone aac bitstream.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -21,19 +21,19 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-fluiddec
|
||||
* @title: fluiddec
|
||||
* @see_also: timidity, wildmidi
|
||||
*
|
||||
* This element renders midi-events as audio streams using
|
||||
* <ulink url="http://fluidsynth.sourceforge.net//">Fluidsynth</ulink>.
|
||||
* It offers better sound quality compared to the timidity or wildmidi element.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipeline</title>
|
||||
* ## Example pipeline
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=song.mid ! midiparse ! fluiddec ! pulsesink
|
||||
* ]| This example pipeline will parse the midi and render to raw audio which is
|
||||
* played via pulseaudio.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -21,16 +21,16 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glbumper
|
||||
* @title: glbumper
|
||||
*
|
||||
* Bump mapping using the normal method.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc ! glupload ! glbumper location=normalmap.bmp ! glimagesink
|
||||
* ]| A pipeline to test normal mapping.
|
||||
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -22,16 +22,16 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glcolorbalance
|
||||
* @title: glcolorbalance
|
||||
*
|
||||
* Adjusts brightness, contrast, hue, saturation on a video stream.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc ! glupload ! glcolorbalance saturation=0.0 ! glcolorconvert ! gldownload ! ximagesink
|
||||
* ]| This pipeline converts the image to black and white by setting the
|
||||
* saturation to 0.0.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,17 +20,15 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glcolorscale
|
||||
* @title: glcolorscale
|
||||
*
|
||||
* video frame scaling and colorspace conversion.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Scaling and Color space conversion</title>
|
||||
* <para>
|
||||
* ## Scaling and Color space conversion
|
||||
*
|
||||
* Equivalent to glupload ! gldownload.
|
||||
* </para>
|
||||
* </refsect2>
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
*
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc ! video/x-raw ! glcolorscale ! ximagesink
|
||||
* ]| A pipeline to test colorspace conversion.
|
||||
|
@ -40,7 +38,7 @@
|
|||
* video/x-raw, width=320, height=240, format=YV12 ! videoconvert ! autovideosink
|
||||
* ]| A pipeline to test hardware scaling and colorspace conversion.
|
||||
* FBO and GLSL are required.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,16 +20,16 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-deinterlace
|
||||
* @title: deinterlace
|
||||
*
|
||||
* Deinterlacing using based on fragment shaders.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc ! glupload ! gldeinterlace ! glimagesink
|
||||
* ]|
|
||||
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,16 +20,16 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-gldifferencematte.
|
||||
* @title: gldifferencematte.
|
||||
*
|
||||
* Saves a background frame and replace it with a pixbuf.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc ! glupload ! gldifferencemate location=backgroundimagefile ! glimagesink
|
||||
* ]|
|
||||
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,16 +20,16 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-gleffects.
|
||||
* @title: gleffects.
|
||||
*
|
||||
* GL Shading Language effects.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc ! glupload ! gleffects effect=5 ! glimagesink
|
||||
* ]|
|
||||
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,20 +20,18 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glfilterapp
|
||||
* @title: glfilterapp
|
||||
*
|
||||
* The resize and redraw callbacks can be set from a client code.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>CLient callbacks</title>
|
||||
* <para>
|
||||
* ## CLient callbacks
|
||||
*
|
||||
* The graphic scene can be written from a client code through the
|
||||
* two glfilterapp properties.
|
||||
* </para>
|
||||
* </refsect2>
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
*
|
||||
* ## Examples
|
||||
* see gst-plugins-gl/tests/examples/generic/recordgraphic
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,11 +20,11 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glfiltercube
|
||||
* @title: glfiltercube
|
||||
*
|
||||
* The resize and redraw callbacks can be set from a client code.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc ! glfiltercube ! glimagesink
|
||||
* ]| A pipeline to mpa textures on the 6 cube faces..
|
||||
|
@ -37,7 +37,7 @@
|
|||
* gst-launch-1.0 -v videotestsrc ! video/x-raw, width=640, height=480 ! glfiltercube ! glimagesink
|
||||
* ]| Resize scene before drawing the cube.
|
||||
* The scene size is greater than the input video size.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -21,11 +21,11 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glfilterglass
|
||||
* @title: glfilterglass
|
||||
*
|
||||
* Map textures on moving glass.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc ! glfilterglass ! glimagesink
|
||||
* ]| A pipeline inspired from http://www.mdk.org.pl/2007/11/17/gl-colorspace-conversions
|
||||
|
@ -33,7 +33,7 @@
|
|||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc ! glfilterglass ! video/x-raw, width=640, height=480 ! glimagesink
|
||||
* ]| The scene is greater than the input size.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,15 +20,15 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glfilterreflectedscreen
|
||||
* @title: glfilterreflectedscreen
|
||||
*
|
||||
* Map Video Texture upon a screen, on a reflecting surface
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc ! glupload ! glfilterreflectedscreen ! glimagesink
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -21,11 +21,11 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glshader
|
||||
* @title: glshader
|
||||
*
|
||||
* OpenGL fragment shader filter
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc ! glupload ! glshader fragment="\"`cat myshader.frag`\"" ! glimagesink
|
||||
* ]|
|
||||
|
@ -51,7 +51,6 @@
|
|||
* }
|
||||
* ]|
|
||||
*
|
||||
* </refsect2>
|
||||
*/
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include "config.h"
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glimagesink
|
||||
* @title: glimagesink
|
||||
*
|
||||
* glimagesink renders video frames to a drawable on a local or remote
|
||||
* display using OpenGL. This element can receive a Window ID from the
|
||||
|
@ -34,28 +35,23 @@
|
|||
* See the #GstGLDisplay documentation for a list of environment variables that
|
||||
* can override window/platform detection.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Scaling</title>
|
||||
* <para>
|
||||
* ## Scaling
|
||||
*
|
||||
* Depends on the driver, OpenGL handles hardware accelerated
|
||||
* scaling of video frames. This means that the element will just accept
|
||||
* incoming video frames no matter their geometry and will then put them to the
|
||||
* drawable scaling them on the fly. Using the #GstGLImageSink:force-aspect-ratio
|
||||
* property it is possible to enforce scaling with a constant aspect ratio,
|
||||
* which means drawing black borders around the video frame.
|
||||
* </para>
|
||||
* </refsect2>
|
||||
* <refsect2>
|
||||
* <title>Events</title>
|
||||
* <para>
|
||||
*
|
||||
* ## Events
|
||||
*
|
||||
* Through the gl thread, glimagesink handle some events coming from the drawable
|
||||
* to manage its appearance even when the data is not flowing (GST_STATE_PAUSED).
|
||||
* That means that even when the element is paused, it will receive expose events
|
||||
* from the drawable and draw the latest frame with correct borders/aspect-ratio.
|
||||
* </para>
|
||||
* </refsect2>
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
*
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc ! video/x-raw ! glimagesink
|
||||
* ]| A pipeline to test hardware scaling.
|
||||
|
@ -80,7 +76,7 @@
|
|||
* ]| The graphic FPS scene can be greater than the input video FPS.
|
||||
* The graphic scene can be written from a client code through the
|
||||
* two glfilterapp properties.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,14 +20,14 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glmosaic
|
||||
* @title: glmosaic
|
||||
*
|
||||
* glmixer sub element. N gl sink pads to 1 source pad.
|
||||
* N + 1 OpenGL contexts shared together.
|
||||
* N <= 6 because the rendering is more a like a cube than a mosaic
|
||||
* Each opengl input stream is rendered on a cube face
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc ! video/x-raw, format=YUY2 ! queue ! glmosaic name=m ! glimagesink \
|
||||
* videotestsrc pattern=12 ! video/x-raw, format=I420, framerate=5/1, width=100, height=200 ! queue ! m. \
|
||||
|
@ -37,7 +37,7 @@
|
|||
* videotestsrc ! gleffects effect=6 ! queue ! m.
|
||||
* ]|
|
||||
* FBO (Frame Buffer Object) is required.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,16 +20,16 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-gloverlay
|
||||
* @title: gloverlay
|
||||
*
|
||||
* Overlay GL video texture with a PNG image
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc ! gloverlay location=image.jpg ! glimagesink
|
||||
* ]|
|
||||
* FBO (Frame Buffer Object) is required.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glstereomix
|
||||
* @title: glstereomix
|
||||
*
|
||||
* Combine 2 input streams to produce a stereoscopic output
|
||||
* stream. Input views are taken from the left pad and right pad
|
||||
|
@ -34,8 +35,7 @@
|
|||
* The multiview representation on the output is chosen according to
|
||||
* the downstream caps.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc pattern=ball name=left \
|
||||
* videotestsrc name=right glstereomix name=mix \
|
||||
|
@ -55,7 +55,7 @@
|
|||
* mp4mux ! progressreport ! filesink location=output.mp4
|
||||
* ]| Mix the input from a camera to the left view, and videotestsrc to the right view,
|
||||
* and encode as a top-bottom frame packed H.264 video.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include "config.h"
|
||||
|
|
|
@ -20,16 +20,16 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glstereosplit
|
||||
* @title: glstereosplit
|
||||
*
|
||||
* Receive a stereoscopic video stream and split into left/right
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc ! glstereosplit name=s ! queue ! glimagesink s. ! queue ! glimagesink
|
||||
* ]|
|
||||
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -23,21 +23,19 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-gltestsrc
|
||||
* @title: gltestsrc
|
||||
*
|
||||
* <refsect2>
|
||||
* <para>
|
||||
* The gltestsrc element is used to produce test video texture.
|
||||
* The video test produced can be controlled with the "pattern"
|
||||
* property.
|
||||
* </para>
|
||||
* <title>Example launch line</title>
|
||||
* <para>
|
||||
* <programlisting>
|
||||
*
|
||||
* ## Example launch line
|
||||
*
|
||||
* |[
|
||||
* gst-launch-1.0 -v gltestsrc pattern=smpte ! glimagesink
|
||||
* </programlisting>
|
||||
* ]|
|
||||
* Shows original SMPTE color bars in a window.
|
||||
* </para>
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -21,11 +21,11 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-gltransformation
|
||||
* @title: gltransformation
|
||||
*
|
||||
* Transforms video on the GPU.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 gltestsrc ! gltransformation rotation-z=45 ! glimagesink
|
||||
* ]| A pipeline to rotate by 45 degrees
|
||||
|
@ -38,7 +38,7 @@
|
|||
* |[
|
||||
* gst-launch-1.0 gltestsrc ! gltransformation rotation-x=-45 ortho=True ! glimagesink
|
||||
* ]| Rotate the video around the X-Axis by -45° with an orthographic projection
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,15 +20,15 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glvideo_flip
|
||||
* @title: glvideo_flip
|
||||
*
|
||||
* Transforms video on the GPU.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc ! glupload ! glvideoflip method=clockwise ! glimagesinkelement
|
||||
* ]| This pipeline flips the test image 90 degrees clockwise.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,13 +20,13 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glvideomixer
|
||||
* @title: glvideomixer
|
||||
*
|
||||
* Composites a number of streams into a single output scene using OpenGL in
|
||||
* a similar fashion to compositor and videomixer. See the compositor plugin
|
||||
* for documentation about the #GstGLVideoMixerPad properties.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 glvideomixer name=m ! glimagesink \
|
||||
* videotestsrc ! video/x-raw, format=YUY2 ! glupload ! glcolorconvert ! m. \
|
||||
|
@ -36,7 +36,7 @@
|
|||
* videotestsrc ! glupload ! glfiltercube ! queue ! m. \
|
||||
* videotestsrc ! glupload ! gleffects effect=6 ! queue ! m.gst-launch-1.0 glvideomixer name=m ! glimagesink \
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -22,14 +22,14 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-glviewconvert
|
||||
* @title: glviewconvert
|
||||
*
|
||||
* Convert stereoscopic video between different representations using fragment shaders.
|
||||
*
|
||||
* The element can use either property settings or caps negotiation to choose the
|
||||
* input and output formats to process.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc ! glupload ! glviewconvert ! glimagesink
|
||||
* ]| Simple placebo example demonstrating identity passthrough of mono video
|
||||
|
@ -39,7 +39,7 @@
|
|||
* ]| Force re-interpretation of the input checkers pattern as a side-by-side stereoscopic
|
||||
* image and display in glimagesink.
|
||||
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -23,20 +23,20 @@
|
|||
|
||||
/**
|
||||
* SECTION:plugin-opengl
|
||||
* @title: GstOpengl
|
||||
*
|
||||
* Cross-platform OpenGL plugin.
|
||||
* <refsect2>
|
||||
* <title>Debugging</title>
|
||||
* </refsect2>
|
||||
* <refsect2>
|
||||
* <title>Examples</title>
|
||||
*
|
||||
* ## Debugging
|
||||
*
|
||||
* ## Examples
|
||||
* |[
|
||||
* gst-launch-1.0 --gst-debug=gldisplay:3 videotestsrc ! glimagesink
|
||||
* ]| A debugging pipeline.
|
||||
|[
|
||||
* GST_DEBUG=gl*:6 gst-launch-1.0 videotestsrc ! glimagesink
|
||||
* ]| A debugging pipelines related to shaders.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:gtkgstsink
|
||||
* @title: GstGtkBaseSink
|
||||
*
|
||||
*/
|
||||
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
*/
|
||||
|
||||
/**
|
||||
* SECTION:gstgtkglsink
|
||||
*
|
||||
* SECTION:element-gtkglsink
|
||||
* @title: gtkglsink
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -19,7 +19,8 @@
|
|||
*/
|
||||
|
||||
/**
|
||||
* SECTION:gtkgstsink
|
||||
* SECTION:element-gtkgstsink
|
||||
* @title: gtkgstsink
|
||||
*
|
||||
*/
|
||||
|
||||
|
|
|
@ -41,6 +41,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:gtkgstglwidget
|
||||
* @title: GtkGstGlWidget
|
||||
* @short_description: a #GtkGLArea that renders GStreamer video #GstBuffers
|
||||
* @see_also: #GtkGLArea, #GstBuffer
|
||||
*
|
||||
|
|
|
@ -29,6 +29,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:gtkgstwidget
|
||||
* @title: GtkGstWidget
|
||||
* @short_description: a #GtkWidget that renders GStreamer video #GstBuffers
|
||||
* @see_also: #GtkDrawingArea, #GstBuffer
|
||||
*
|
||||
|
|
|
@ -26,15 +26,15 @@
|
|||
*/
|
||||
/**
|
||||
* SECTION:element-hlsdemux
|
||||
* @title: hlsdemux
|
||||
*
|
||||
* HTTP Live Streaming demuxer element.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 souphttpsrc location=http://devimages.apple.com/iphone/samples/bipbop/gear4/prog_index.m3u8 ! hlsdemux ! decodebin ! videoconvert ! videoscale ! autovideosink
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -19,15 +19,15 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-hlssink
|
||||
* @title: hlssink
|
||||
*
|
||||
* HTTP Live Streaming sink/server
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 videotestsrc is-live=true ! x264enc ! mpegtsmux ! hlssink max-files=5
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include "config.h"
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-iqa
|
||||
* @title: iqa
|
||||
* @short_description: Image Quality Assessment plugin.
|
||||
*
|
||||
* IQA will perform full reference image quality assessment, with the
|
||||
|
@ -48,13 +49,12 @@
|
|||
* sink_2\=\(double\)0.0082939683976297474\;",
|
||||
* time=(guint64)0;
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 -m uridecodebin uri=file:///test/file/1 ! iqa name=iqa do-dssim=true \
|
||||
* ! videoconvert ! autovideosink uridecodebin uri=file:///test/file/2 ! iqa.
|
||||
* ]| This pipeline will output messages to the console for each set of compared frames.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -45,33 +45,29 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-katedec
|
||||
* @title: katedec
|
||||
* @see_also: oggdemux
|
||||
*
|
||||
* <refsect2>
|
||||
* <para>
|
||||
* This element decodes Kate streams
|
||||
* <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec
|
||||
* for text based data, such as subtitles. Any number of kate streams can be
|
||||
* embedded in an Ogg stream.
|
||||
* </para>
|
||||
* <para>
|
||||
*
|
||||
* libkate (see above url) is needed to build this plugin.
|
||||
* </para>
|
||||
* <title>Example pipeline</title>
|
||||
* <para>
|
||||
*
|
||||
* ## Example pipeline
|
||||
*
|
||||
* This explicitely decodes a Kate stream:
|
||||
* <programlisting>
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=test.ogg ! oggdemux ! katedec ! fakesink silent=TRUE
|
||||
* </programlisting>
|
||||
* </para>
|
||||
* <para>
|
||||
* ]|
|
||||
*
|
||||
* This will automatically detect and use any Kate streams multiplexed
|
||||
* in an Ogg stream:
|
||||
* <programlisting>
|
||||
* |[
|
||||
* gst-launch-1.0 playbin uri=file:///tmp/test.ogg
|
||||
* </programlisting>
|
||||
* </para>
|
||||
* </refsect2>
|
||||
* ]|
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -46,26 +46,23 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-kateenc
|
||||
* @title: kateenc
|
||||
* @see_also: oggmux
|
||||
*
|
||||
* <refsect2>
|
||||
* <para>
|
||||
* This element encodes Kate streams
|
||||
* <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec
|
||||
* for text based data, such as subtitles. Any number of kate streams can be
|
||||
* embedded in an Ogg stream.
|
||||
* </para>
|
||||
* <para>
|
||||
*
|
||||
* libkate (see above url) is needed to build this plugin.
|
||||
* </para>
|
||||
* <title>Example pipeline</title>
|
||||
* <para>
|
||||
*
|
||||
* ## Example pipeline
|
||||
*
|
||||
* This encodes a DVD SPU track to a Kate stream:
|
||||
* <programlisting>
|
||||
* |[
|
||||
* gst-launch-1.0 dvdreadsrc ! dvddemux ! dvdsubparse ! kateenc category=spu-subtitles ! oggmux ! filesink location=test.ogg
|
||||
* </programlisting>
|
||||
* </para>
|
||||
* </refsect2>
|
||||
* ]|
|
||||
*
|
||||
*/
|
||||
|
||||
/* FIXME:
|
||||
|
|
|
@ -21,40 +21,35 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-kateparse
|
||||
* @title: kateparse
|
||||
* @short_description: parses kate streams
|
||||
* @see_also: katedec, vorbisparse, oggdemux, theoraparse
|
||||
*
|
||||
* <refsect2>
|
||||
* <para>
|
||||
* The kateparse element will parse the header packets of the Kate
|
||||
* stream and put them as the streamheader in the caps. This is used in the
|
||||
* multifdsink case where you want to stream live kate streams to multiple
|
||||
* clients, each client has to receive the streamheaders first before they can
|
||||
* consume the kate packets.
|
||||
* </para>
|
||||
* <para>
|
||||
*
|
||||
* This element also makes sure that the buffers that it pushes out are properly
|
||||
* timestamped and that their offset and offset_end are set. The buffers that
|
||||
* kateparse outputs have all of the metadata that oggmux expects to receive,
|
||||
* which allows you to (for example) remux an ogg/kate file.
|
||||
* </para>
|
||||
* <title>Example pipelines</title>
|
||||
* <para>
|
||||
* <programlisting>
|
||||
*
|
||||
* ## Example pipelines
|
||||
*
|
||||
* |[
|
||||
* gst-launch-1.0 -v filesrc location=kate.ogg ! oggdemux ! kateparse ! fakesink
|
||||
* </programlisting>
|
||||
* ]|
|
||||
* This pipeline shows that the streamheader is set in the caps, and that each
|
||||
* buffer has the timestamp, duration, offset, and offset_end set.
|
||||
* </para>
|
||||
* <para>
|
||||
* <programlisting>
|
||||
*
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=kate.ogg ! oggdemux ! kateparse \
|
||||
* ! oggmux ! filesink location=kate-remuxed.ogg
|
||||
* </programlisting>
|
||||
* ]|
|
||||
* This pipeline shows remuxing. kate-remuxed.ogg might not be exactly the same
|
||||
* as kate.ogg, but they should produce exactly the same decoded data.
|
||||
* </para>
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
|
|
|
@ -21,46 +21,41 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-katetag
|
||||
* @title: katetag
|
||||
* @see_also: #oggdemux, #oggmux, #kateparse, #GstTagSetter
|
||||
* @short_description: retags kate streams
|
||||
*
|
||||
* <refsect2>
|
||||
* <para>
|
||||
* The katetag element can change the tag contained within a raw
|
||||
* kate stream. Specifically, it modifies the comments header packet
|
||||
* of the kate stream, as well as the language and category of the
|
||||
* kate stream.
|
||||
* </para>
|
||||
* <para>
|
||||
*
|
||||
* The element will also process the stream as the #kateparse element does
|
||||
* so it can be used when remuxing an Ogg Kate stream, without additional
|
||||
* elements.
|
||||
* </para>
|
||||
* <para>
|
||||
*
|
||||
* Applications can set the tags to write using the #GstTagSetter interface.
|
||||
* Tags contained within the kate stream will be picked up
|
||||
* automatically (and merged according to the merge mode set via the tag
|
||||
* setter interface).
|
||||
* </para>
|
||||
* <title>Example pipelines</title>
|
||||
* <para>
|
||||
*
|
||||
* ## Example pipelines
|
||||
*
|
||||
* This element is only useful with gst-launch-1.0 for modifying the language
|
||||
* and/or category (which are properties of the stream located in the kate
|
||||
* beginning of stream header), because it does not support setting the tags
|
||||
* on a #GstTagSetter interface. Conceptually, the element will usually be
|
||||
* used like:
|
||||
* <programlisting>
|
||||
* |[
|
||||
* gst-launch-1.0 -v filesrc location=foo.ogg ! oggdemux ! katetag ! oggmux ! filesink location=bar.ogg
|
||||
* </programlisting>
|
||||
* </para>
|
||||
* <para>
|
||||
* ]|
|
||||
*
|
||||
* This pipeline will set the language and category of the stream to the
|
||||
* given values:
|
||||
* <programlisting>
|
||||
* |[
|
||||
* gst-launch-1.0 -v filesrc location=foo.ogg ! oggdemux ! katetag language=pt_BR category=subtitles ! oggmux ! filesink location=bar.ogg
|
||||
* </programlisting>
|
||||
* </para>
|
||||
* </refsect2>
|
||||
* ]|
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -45,32 +45,29 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-tiger
|
||||
* @title: tiger
|
||||
* @see_also: katedec
|
||||
*
|
||||
* <refsect2>
|
||||
* <para>
|
||||
* This element decodes and renders Kate streams
|
||||
* <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec
|
||||
* for text based data, such as subtitles. Any number of kate streams can be
|
||||
* embedded in an Ogg stream.
|
||||
* </para>
|
||||
* <para>
|
||||
*
|
||||
* libkate (see above url) and <ulink url="http://libtiger.googlecode.com/">libtiger</ulink>
|
||||
* are needed to build this element.
|
||||
* </para>
|
||||
* <title>Example pipeline</title>
|
||||
* <para>
|
||||
*
|
||||
* ## Example pipeline
|
||||
*
|
||||
* This pipeline renders a Kate stream on top of a Theora video multiplexed
|
||||
* in the same stream:
|
||||
* <programlisting>
|
||||
* |[
|
||||
* gst-launch-1.0 \
|
||||
* filesrc location=video.ogg ! oggdemux name=demux \
|
||||
* demux. ! queue ! theoradec ! videoconvert ! tiger name=tiger \
|
||||
* demux. ! queue ! kateparse ! tiger. \
|
||||
* tiger. ! videoconvert ! autovideosink
|
||||
* </programlisting>
|
||||
* </para>
|
||||
* </refsect2>
|
||||
* ]|
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-ladspa
|
||||
* @title: ladspa
|
||||
* @short_description: bridge for LADSPA (Linux Audio Developer's Simple Plugin API)
|
||||
* @see_also: #GstAudioConvert #GstAudioResample, #GstAudioTestSrc, #GstAutoAudioSink
|
||||
*
|
||||
|
@ -32,8 +33,7 @@
|
|||
* element classification. The functionality you get depends on the LADSPA plugins
|
||||
* you have installed.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example LADSPA line without this plugins</title>
|
||||
* ## Example LADSPA line without this plugins
|
||||
* |[
|
||||
* (padsp) listplugins
|
||||
* (padsp) analyseplugin cmt.so amp_mono
|
||||
|
@ -41,16 +41,13 @@
|
|||
* (padsp) applyplugin testin.wav testout.wav cmt.so amp_mono 2
|
||||
* gst-launch-1.0 playbin uri=file://"$PWD"/testout.wav
|
||||
* ]| Decode any audio file into wav with the format expected for the specific ladspa plugin to be applied, apply the ladspa filter and play it.
|
||||
* </refsect2>
|
||||
*
|
||||
* Now with this plugin:
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example LADSPA line with this plugins</title>
|
||||
* ## Example LADSPA line with this plugins
|
||||
* |[
|
||||
* gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! queue ! autoaudiosink myT. ! queue ! audioconvert ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4
|
||||
* ]| Get audio input, filter it through CAPS Plate and TAP Stereo Echo, play it and show a visualization (recommended hearphones).
|
||||
* </refsect2>
|
||||
*
|
||||
* In case you wonder the plugin naming scheme, quoting ladspa.h:
|
||||
* "Plugin types should be identified by file and label rather than by
|
||||
|
@ -61,60 +58,52 @@
|
|||
* on top of the audio in and out one, so some parameters are readable too.
|
||||
*
|
||||
* You can see the listing of plugins available with:
|
||||
* <refsect2>
|
||||
* <title>Inspecting the plugins list</title>
|
||||
*
|
||||
* ## Inspecting the plugins list
|
||||
* |[
|
||||
* gst-inspect ladspa
|
||||
* ]| List available LADSPA plugins on gstreamer.
|
||||
* </refsect2>
|
||||
*
|
||||
* You can see the parameters of any plugin with:
|
||||
* <refsect2>
|
||||
* <title>Inspecting the plugins</title>
|
||||
*
|
||||
* ## Inspecting the plugins
|
||||
* |[
|
||||
* gst-inspect ladspa-retro-flange-1208-so-retroflange
|
||||
* ]| List details of the plugin, parameters, range and defaults included.
|
||||
* </refsect2>
|
||||
*
|
||||
* The elements categorize in:
|
||||
* <itemizedlist>
|
||||
* <listitem><para>Filter/Effect/Audio/LADSPA:</para>
|
||||
* <refsect2>
|
||||
* <title>Example Filter/Effect/Audio/LADSPA line with this plugins</title>
|
||||
*
|
||||
* * Filter/Effect/Audio/LADSPA:
|
||||
*
|
||||
* ## Example Filter/Effect/Audio/LADSPA line with this plugins
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location="$myfile" ! decodebin ! audioconvert ! audioresample ! ladspa-calf-so-reverb decay-time=15 high-frq-damp=20000 room-size=5 diffusion=1 wet-amount=2 dry-amount=2 pre-delay=50 bass-cut=20000 treble-cut=20000 ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! autoaudiosink
|
||||
* ]| Decode any audio file, filter it through Calf Reverb LADSPA then TAP Stereo Echo, and play it.
|
||||
* </refsect2>
|
||||
* </listitem>
|
||||
* <listitem><para>Source/Audio/LADSPA:</para>
|
||||
* <refsect2>
|
||||
* <title>Example Source/Audio/LADSPA line with this plugins</title>
|
||||
*
|
||||
* * Source/Audio/LADSPA:
|
||||
*
|
||||
* ## Example Source/Audio/LADSPA line with this plugins
|
||||
* |[
|
||||
* gst-launch-1.0 ladspasrc-sine-so-sine-fcac frequency=220 amplitude=100 ! audioconvert ! autoaudiosink
|
||||
* ]| Generate a sine wave with Sine Oscillator (Freq:control, Amp:control) and play it.
|
||||
* </refsect2>
|
||||
* <refsect2>
|
||||
* <title>Example Source/Audio/LADSPA line with this plugins</title>
|
||||
*
|
||||
* ## Example Source/Audio/LADSPA line with this plugins
|
||||
* |[
|
||||
* gst-launch-1.0 ladspasrc-caps-so-click bpm=240 volume=1 ! autoaudiosink
|
||||
* ]| Generate clicks with CAPS Click - Metronome at 240 beats per minute and play it.
|
||||
* </refsect2>
|
||||
* <refsect2>
|
||||
* <title>Example Source/Audio/LADSPA line with this plugins</title>
|
||||
*
|
||||
* ## Example Source/Audio/LADSPA line with this plugins
|
||||
* |[
|
||||
* gst-launch-1.0 ladspasrc-random-1661-so-random-fcsc-oa ! ladspa-cmt-so-amp-mono gain=1.5 ! ladspa-caps-so-plate ! tee name=myT myT. ! queue ! autoaudiosink myT. ! queue ! audioconvert ! wavescope ! videoconvert ! autovideosink
|
||||
* ]| Generate random wave, filter it trhough Mono Amplifier and Versatile Plate Reverb, and play, while showing, it.
|
||||
* </refsect2>
|
||||
* </listitem>
|
||||
* <listitem><para>Sink/Audio/LADSPA:</para>
|
||||
* <refsect2>
|
||||
* <title>Example Sink/Audio/LADSPA line with this plugins</title>
|
||||
*
|
||||
* * Sink/Audio/LADSPA:
|
||||
*
|
||||
* ## Example Sink/Audio/LADSPA line with this plugins
|
||||
* |[
|
||||
* gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! audioconvert ! audioresample ! queue ! ladspasink-cmt-so-null-ai myT. ! audioconvert ! audioresample ! queue ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4
|
||||
* ]| Get audio input, filter it trhough Mono Amplifier, CAPS Plate LADSPA and TAP Stereo Echo, explicitily anulate audio with Null (Audio Output), and play a visualization (recommended hearphones).
|
||||
* </refsect2>
|
||||
* </listitem>
|
||||
* </itemizedlist>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -21,15 +21,15 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-libde265dec
|
||||
* @title: libde265dec
|
||||
*
|
||||
* Decodes HEVC/H.265 video.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=bitstream.hevc ! 'video/x-hevc,stream-format=byte-stream,framerate=25/1' ! libde265dec ! autovideosink
|
||||
* ]| The above pipeline decodes the HEVC/H.265 bitstream and renders it to the screen.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-lv2
|
||||
* @title: lv2
|
||||
* @short_description: bridge for LV2.
|
||||
*
|
||||
* LV2 is a standard for plugins and matching host applications,
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-openalsink
|
||||
* @title: openalsink
|
||||
* @see_also: openalsrc
|
||||
* @short_description: capture raw audio samples through OpenAL
|
||||
*
|
||||
|
@ -31,8 +32,7 @@
|
|||
*
|
||||
* Unfortunately the capture API doesn't have a format enumeration/check. all you can do is try opening it and see if it works.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipelines</title>
|
||||
* ## Example pipelines
|
||||
* |[
|
||||
* gst-launch-1.0 audiotestsrc ! audioconvert ! volume volume=0.5 ! openalsink
|
||||
* ]| will play a sine wave (continuous beep sound) through OpenAL.
|
||||
|
@ -42,7 +42,7 @@
|
|||
* |[
|
||||
* gst-launch-1.0 openalsrc ! "audio/x-raw,format=S16LE,rate=44100" ! audioconvert ! volume volume=0.25 ! openalsink
|
||||
* ]| will capture and play audio through OpenAL.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
/*
|
||||
|
|
|
@ -49,20 +49,20 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-openalsrc
|
||||
* @title: openalsrc
|
||||
* @see_also: openalsink
|
||||
* @short_description: capture raw audio samples through OpenAL
|
||||
*
|
||||
* This element captures raw audio samples through OpenAL.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipelines</title>
|
||||
* ## Example pipelines
|
||||
* |[
|
||||
* gst-launch-1.0 -v openalsrc ! audioconvert ! wavenc ! filesink location=stream.wav
|
||||
* ]| * will capture sound through OpenAL and encode it to a wav file.
|
||||
* |[
|
||||
* gst-launch-1.0 openalsrc ! "audio/x-raw,format=S16LE,rate=44100" ! audioconvert ! volume volume=0.25 ! openalsink
|
||||
* ]| will capture and play audio through OpenAL.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
/*
|
||||
|
|
|
@ -22,16 +22,16 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-opusparse
|
||||
* @title: opusparse
|
||||
* @see_also: opusenc, opusdec
|
||||
*
|
||||
* This element parses OPUS packets.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipelines</title>
|
||||
* ## Example pipelines
|
||||
* |[
|
||||
* gst-launch-1.0 -v filesrc location=opusdata ! opusparse ! opusdec ! audioconvert ! audioresample ! alsasink
|
||||
* ]| Decode and plays an unmuxed Opus file.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -369,7 +369,7 @@ rsn_dec_get_type (void)
|
|||
return type;
|
||||
}
|
||||
|
||||
/** Audio decoder subclass */
|
||||
/* Audio decoder subclass */
|
||||
static GstStaticPadTemplate audio_sink_template =
|
||||
GST_STATIC_PAD_TEMPLATE ("sink",
|
||||
GST_PAD_SINK,
|
||||
|
@ -422,7 +422,7 @@ rsn_audiodec_init (RsnAudioDec * self)
|
|||
{
|
||||
}
|
||||
|
||||
/** Video decoder subclass */
|
||||
/* Video decoder subclass */
|
||||
static GstStaticPadTemplate video_sink_template =
|
||||
GST_STATIC_PAD_TEMPLATE ("sink",
|
||||
GST_PAD_SINK,
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-input-selector
|
||||
* @title: input-selector
|
||||
* @see_also: #GstOutputSelector
|
||||
*
|
||||
* Direct one out of N input streams to the output pad.
|
||||
|
@ -32,21 +33,11 @@
|
|||
* The input pads are from a GstPad subclass and have additional
|
||||
* properties, which users may find useful, namely:
|
||||
*
|
||||
* <itemizedlist>
|
||||
* <listitem>
|
||||
* "running-time": Running time of stream on pad (#gint64)
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* "tags": The currently active tags on the pad (#GstTagList, boxed type)
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* "active": If the pad is currently active (#gboolean)
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* "always-ok" : Make an inactive pads return #GST_FLOW_OK instead of
|
||||
* #GST_FLOW_NOT_LINKED
|
||||
* </listitem>
|
||||
* </itemizedlist>
|
||||
* * "running-time": Running time of stream on pad (#gint64)
|
||||
* * "tags": The currently active tags on the pad (#GstTagList, boxed type)
|
||||
* * "active": If the pad is currently active (#gboolean)
|
||||
* * "always-ok" : Make an inactive pads return #GST_FLOW_OK instead of #GST_FLOW_NOT_LINKED
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -19,15 +19,15 @@
|
|||
*/
|
||||
/**
|
||||
* SECTION:element-rsvgdec
|
||||
* @title: rsvgdec
|
||||
*
|
||||
* This elements renders SVG graphics.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch lines</title>
|
||||
* ## Example launch lines
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=image.svg ! rsvgdec ! imagefreeze ! videoconvert ! autovideosink
|
||||
* ]| render and show a svg image.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-rsvgoverlay
|
||||
* @title: rsvgoverlay
|
||||
*
|
||||
* This elements overlays SVG graphics over the video. SVG data can
|
||||
* either be specified through properties, or fed through the
|
||||
|
@ -44,8 +45,7 @@
|
|||
* the values of the x/y/width/height attributes, by setting
|
||||
* height-/width-relative to 1.0. and all other attributes to 0.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch lines</title>
|
||||
* ## Example launch lines
|
||||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc ! videoconvert ! rsvgoverlay location=foo.svg ! videoconvert ! autovideosink
|
||||
* ]| specifies the SVG location through the filename property.
|
||||
|
@ -55,7 +55,7 @@
|
|||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc ! videoconvert ! rsvgoverlay data='<svg viewBox="0 0 800 600"><image x="80%" y="80%" width="10%" height="10%" xlink:href="foo.jpg" /></svg>' ! videoconvert ! autovideosink
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-rtmpsink
|
||||
* @title: rtmpsink
|
||||
*
|
||||
* This element delivers data to a streaming server via RTMP. It uses
|
||||
* librtmp, and supports any protocols/urls that librtmp supports.
|
||||
|
@ -27,12 +28,11 @@
|
|||
* for librtmp, such as 'flashver=version'. See the librtmp documentation
|
||||
* for more detail
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc ! ffenc_flv ! flvmux ! rtmpsink location='rtmp://localhost/path/to/stream live=1'
|
||||
* ]| Encode a test video stream to FLV video format and stream it via RTMP.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -26,17 +26,17 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-rtmpsrc
|
||||
* @title: rtmpsrc
|
||||
*
|
||||
* This plugin reads data from a local or remote location specified
|
||||
* by an URI. This location can be specified using any protocol supported by
|
||||
* the RTMP library, i.e. rtmp, rtmpt, rtmps, rtmpe, rtmfp, rtmpte and rtmpts.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch lines</title>
|
||||
* ## Example launch lines
|
||||
* |[
|
||||
* gst-launch-1.0 -v rtmpsrc location=rtmp://somehost/someurl ! fakesink
|
||||
* ]| Open an RTMP location and pass its content to fakesink.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -21,15 +21,15 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-sbdec
|
||||
* @title: sbdec
|
||||
*
|
||||
* This element decodes a Bluetooth SBC audio streams to raw integer PCM audio
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipelines</title>
|
||||
* ## Example pipelines
|
||||
* |[
|
||||
* gst-launch-1.0 -v filesrc location=audio.sbc ! sbcparse ! sbcdec ! audioconvert ! audioresample ! autoaudiosink
|
||||
* ]| Decode a raw SBC file.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-sbenc
|
||||
* @title: sbenc
|
||||
*
|
||||
* This element encodes raw integer PCM audio into a Bluetooth SBC audio.
|
||||
*
|
||||
|
@ -27,12 +28,11 @@
|
|||
* allocation-mode can be set by adding a capsfilter element with appropriate
|
||||
* filtercaps after the sbcenc encoder element.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipelines</title>
|
||||
* ## Example pipelines
|
||||
* |[
|
||||
* gst-launch-1.0 -v audiotestsrc ! sbcenc ! rtpsbcpay ! udpsink
|
||||
* ]| Encode a sine wave into SBC, RTP payload it and send over the network using UDP
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -22,10 +22,10 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-mssdemux
|
||||
* @title: mssdemux
|
||||
*
|
||||
* Demuxes a Microsoft's Smooth Streaming manifest into its audio and/or video streams.
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
/*
|
||||
|
|
|
@ -24,36 +24,21 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-dtmfdetect
|
||||
* @title: dtmfdetect
|
||||
* @short_description: Detects DTMF tones
|
||||
*
|
||||
* This element will detect DTMF tones and emit messages.
|
||||
*
|
||||
* The message is called <classname>"dtmf-event"</classname> and has
|
||||
* the following fields:
|
||||
* <itemizedlist>
|
||||
* <listitem>
|
||||
* <para>
|
||||
* gint <classname>type</classname> (0-1):
|
||||
* The application uses this field to specify which of the two methods
|
||||
* The message is called `dtmf-event` and has the following fields:
|
||||
*
|
||||
* * gint `type` (0-1): The application uses this field to specify which of the two methods
|
||||
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for
|
||||
* named events. Tones are specified by their frequencies and events are
|
||||
* specfied by their number. This element can only take events as input.
|
||||
* Do not confuse with "method" which specified the output.
|
||||
* </para>
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* <para>
|
||||
* gint <classname>number</classname> (0-16):
|
||||
* The event number.
|
||||
* </para>
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* <para>
|
||||
* gint <classname>method</classname> (2):
|
||||
* This field will always been 2 (ie sound) from this element.
|
||||
* </para>
|
||||
* </listitem>
|
||||
* </itemizedlist>
|
||||
* * gint `number` (0-16): The event number.
|
||||
* * gint `method` (2): This field will always been 2 (ie sound) from this element.
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-spanplc
|
||||
* @title: spanplc
|
||||
*
|
||||
* The spanplc (Packet Loss Concealment) element provides a synthetic
|
||||
* fill-in signal, to minimise the audible effect of lost packets in
|
||||
|
|
|
@ -46,6 +46,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-srtpdec
|
||||
* @title: srtpdec
|
||||
* @see_also: srtpenc
|
||||
*
|
||||
* gstrtpdec acts as a decoder that removes security from SRTP and SRTCP
|
||||
|
@ -95,8 +96,7 @@
|
|||
* other means. If no rollover counter is provided by the user, 0 is
|
||||
* used by default.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipelines</title>
|
||||
* ## Example pipelines
|
||||
* |[
|
||||
* gst-launch-1.0 udpsrc port=5004 caps='application/x-srtp, payload=(int)8, ssrc=(uint)1356955624, srtp-key=(buffer)012345678901234567890123456789012345678901234567890123456789, srtp-cipher=(string)aes-128-icm, srtp-auth=(string)hmac-sha1-80, srtcp-cipher=(string)aes-128-icm, srtcp-auth=(string)hmac-sha1-80' ! srtpdec ! rtppcmadepay ! alawdec ! pulsesink
|
||||
* ]| Receive PCMA SRTP packets through UDP using caps to specify
|
||||
|
@ -105,7 +105,7 @@
|
|||
* gst-launch-1.0 audiotestsrc ! alawenc ! rtppcmapay ! 'application/x-rtp, payload=(int)8, ssrc=(uint)1356955624' ! srtpenc key="012345678901234567890123456789012345678901234567890123456789" ! udpsink port=5004
|
||||
* ]| Send PCMA SRTP packets through UDP, nothing how the SSRC is forced so
|
||||
* that the receiver will recognize it.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -45,7 +45,8 @@
|
|||
*/
|
||||
|
||||
/**
|
||||
* SECTION:gst-plugin-bad-plugins-srtpenc
|
||||
* SECTION:element-srtpenc
|
||||
* @title: srtpenc
|
||||
* @see_also: srtpdec
|
||||
*
|
||||
* gstrtpenc acts as an encoder that adds security to RTP and RTCP
|
||||
|
|
|
@ -21,16 +21,16 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-teletextdec
|
||||
* @title: teletextdec
|
||||
*
|
||||
* Decode a stream of raw VBI packets containing teletext information to a RGBA
|
||||
* stream.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 -v -m filesrc location=recording.mpeg ! tsdemux ! teletextdec ! videoconvert ! ximagesink
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-ttmlparse
|
||||
* @title: ttmlparse
|
||||
*
|
||||
* Parses timed text subtitle files described using Timed Text Markup Language
|
||||
* (TTML). Currently, only the EBU-TT-D profile of TTML, designed for
|
||||
|
@ -35,13 +36,12 @@
|
|||
* elements. A downstream renderer element uses this information to correctly
|
||||
* render the text on top of video frames.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch lines</title>
|
||||
* ## Example launch lines
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=<media file location> ! video/quicktime ! qtdemux name=q ttmlrender name=r q. ! queue ! h264parse ! avdec_h264 ! autovideoconvert ! r.video_sink filesrc location=<subtitle file location> blocksize=16777216 ! queue ! ttmlparse ! r.text_sink r. ! ximagesink q. ! queue ! aacparse ! avdec_aac ! audioconvert ! alsasink
|
||||
* ]| Parse and render TTML subtitles contained in a single XML file over an
|
||||
* MP4 stream containing H.264 video and AAC audio.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
|
|
|
@ -25,19 +25,19 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-ttmlrender
|
||||
* @title: ttmlrender
|
||||
*
|
||||
* Renders timed text on top of a video stream. It receives text in buffers
|
||||
* from a ttmlparse element; each text string is in its own #GstMemory within
|
||||
* the GstBuffer, and the styling and layout associated with each text string
|
||||
* is in metadata attached to the #GstBuffer.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch lines</title>
|
||||
* ## Example launch lines
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=<media file location> ! video/quicktime ! qtdemux name=q ttmlrender name=r q. ! queue ! h264parse ! avdec_h264 ! autovideoconvert ! r.video_sink filesrc location=<subtitle file location> blocksize=16777216 ! queue ! ttmlparse ! r.text_sink r. ! ximagesink q. ! queue ! aacparse ! avdec_aac ! audioconvert ! alsasink
|
||||
* ]| Parse and render TTML subtitles contained in a single XML file over an
|
||||
* MP4 stream containing H.264 video and AAC audio:
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#include <gst/video/video.h>
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:gstsubtitle
|
||||
* @title: GstSubtitle
|
||||
* @short_description: Library for describing sets of static subtitles.
|
||||
*
|
||||
* This library enables the description of static text scenes made up of a
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:gstsubtitlemeta
|
||||
* @title: GstSubtitleMeta
|
||||
* @short_description: Metadata class for timed-text subtitles.
|
||||
*
|
||||
* The GstSubtitleMeta class enables the layout and styling information needed
|
||||
|
|
|
@ -19,16 +19,16 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-voaacenc
|
||||
* @title: voaacenc
|
||||
*
|
||||
* AAC audio encoder based on vo-aacenc library
|
||||
* <ulink url="http://sourceforge.net/projects/opencore-amr/files/vo-aacenc/">vo-aacenc library source file</ulink>.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=abc.wav ! wavparse ! audioresample ! audioconvert ! voaacenc ! filesink location=abc.aac
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -19,19 +19,19 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-voamrwbenc
|
||||
* @title: voamrwbenc
|
||||
* @see_also: #GstAmrWbDec, #GstAmrWbParse
|
||||
*
|
||||
* AMR wideband encoder based on the
|
||||
* <ulink url="http://www.penguin.cz/~utx/amr">reference codec implementation</ulink>.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* ## Example launch line
|
||||
* |[
|
||||
* gst-launch filesrc location=abc.wav ! wavparse ! audioresample ! audioconvert ! voamrwbenc ! filesink location=abc.amr
|
||||
* ]|
|
||||
* Please note that the above stream misses the header, that is needed to play
|
||||
* the stream.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:plugin-vulkan
|
||||
* @title: vulkan
|
||||
*
|
||||
* Cross-platform Vulkan plugin.
|
||||
*/
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:vkbuffermemory
|
||||
* @title: vkbuffermemory
|
||||
* @short_description: memory subclass for Vulkan buffer memory
|
||||
* @see_also: #GstMemory, #GstAllocator
|
||||
*
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:vkbufferpool
|
||||
* @title: GstVulkanBufferPool
|
||||
* @short_description: buffer pool for #GstVulkanBufferMemory objects
|
||||
* @see_also: #GstBufferPool, #GstVulkanBufferMemory
|
||||
*
|
||||
|
|
|
@ -26,6 +26,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:vkimagememory
|
||||
* @title: GstVkImageMemory
|
||||
* @short_description: memory subclass for Vulkan image memory
|
||||
* @see_also: #GstMemory, #GstAllocator
|
||||
*
|
||||
|
|
|
@ -28,6 +28,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:vkmemory
|
||||
* @title: GstVkMemory
|
||||
* @short_description: memory subclass for Vulkan device memory
|
||||
* @see_also: #GstMemory, #GstAllocator
|
||||
*
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-vulkansink
|
||||
* @title: vulkansink
|
||||
*
|
||||
* vulkansink renders video frames to a drawable on a local or remote
|
||||
* display using Vulkan.
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-vulkanupload
|
||||
* @title: vulkanupload
|
||||
*
|
||||
* vulkanupload uploads data into Vulkan memory objects.
|
||||
*/
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
*/
|
||||
|
||||
/**
|
||||
* SECTION:gstglwindow
|
||||
* SECTION:vkwindow
|
||||
* @short_description: window/surface abstraction
|
||||
* @title: GstVulkanWindow
|
||||
* @see_also: #GstGLContext, #GstGLDisplay
|
||||
|
|
|
@ -23,18 +23,18 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-waylandsink
|
||||
* @title: waylandsink
|
||||
*
|
||||
* The waylandsink is creating its own window and render the decoded video frames to that.
|
||||
* Setup the Wayland environment as described in
|
||||
* <ulink url="http://wayland.freedesktop.org/building.html">Wayland</ulink> home page.
|
||||
* The current implementaion is based on weston compositor.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipelines</title>
|
||||
* ## Example pipelines
|
||||
* |[
|
||||
* gst-launch-1.0 -v videotestsrc ! waylandsink
|
||||
* ]| test the video rendering in wayland
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-wildmidi
|
||||
* @title: wildmidi
|
||||
* @see_also: timidity
|
||||
*
|
||||
* This element renders midi-files as audio streams using
|
||||
|
@ -29,13 +30,12 @@
|
|||
* uses the same sound-patches as timidity (it tries the path in $WILDMIDI_CFG,
|
||||
* $HOME/.wildmidirc and /etc/wildmidi.cfg)
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example pipeline</title>
|
||||
* ## Example pipeline
|
||||
* |[
|
||||
* gst-launch-1.0 filesrc location=song.mid ! wildmidi ! alsasink
|
||||
* ]| This example pipeline will parse the midi and render to raw audio which is
|
||||
* played via alsa.
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-x265enc
|
||||
* @title: x265enc
|
||||
*
|
||||
* This element encodes raw video into H265 compressed data.
|
||||
*
|
||||
|
|
|
@ -19,64 +19,31 @@
|
|||
|
||||
/**
|
||||
* SECTION:element-zbar
|
||||
* @title: zbar
|
||||
*
|
||||
* Detect bar codes in the video streams and send them as element messages to
|
||||
* the #GstBus if .#GstZBar:message property is %TRUE.
|
||||
* If the .#GstZBar:attach-frame property is %TRUE, the posted barcode message
|
||||
* includes a sample of the frame where the barcode was detected (Since 1.6).
|
||||
*
|
||||
* The element generate messages named
|
||||
* <classname>"barcode"</classname>. The structure containes these
|
||||
* fields:
|
||||
* <itemizedlist>
|
||||
* <listitem>
|
||||
* <para>
|
||||
* #GstClockTime
|
||||
* <classname>"timestamp"</classname>:
|
||||
* the timestamp of the buffer that triggered the message.
|
||||
* </para>
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* <para>
|
||||
* gchar*
|
||||
* <classname>"type"</classname>:
|
||||
* the symbol type.
|
||||
* </para>
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* <para>
|
||||
* gchar*
|
||||
* <classname>"symbol"</classname>:
|
||||
* the deteted bar code data.
|
||||
* </para>
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* <para>
|
||||
* gint
|
||||
* <classname>"quality"</classname>:
|
||||
* an unscaled, relative quantity: larger values are better than smaller
|
||||
* values.
|
||||
* </para>
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* <para>
|
||||
* GstSample
|
||||
* <classname>"frame"</classname>:
|
||||
* the frame in which the barcode message was detected, if
|
||||
* the .#GstZBar:attach-frame property was set to %TRUE (Since 1.6)
|
||||
* </para>
|
||||
* </listitem>
|
||||
* </itemizedlist>
|
||||
* The element generate messages named`barcode`. The structure containes these fields:
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch lines</title>
|
||||
* * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message.
|
||||
* * gchar * `type`: the symbol type.
|
||||
* * gchar * `symbol`: the deteted bar code data.
|
||||
* * gint `quality`: an unscaled, relative quantity: larger values are better than smaller
|
||||
* values.
|
||||
* * GstSample `frame`: the frame in which the barcode message was detected, if
|
||||
* the .#GstZBar:attach-frame property was set to %TRUE (Since 1.6)
|
||||
*
|
||||
* ## Example launch lines
|
||||
* |[
|
||||
* gst-launch-1.0 -m v4l2src ! videoconvert ! zbar ! videoconvert ! xvimagesink
|
||||
* ]| This pipeline will detect barcodes and send them as messages.
|
||||
* |[
|
||||
* gst-launch-1.0 -m v4l2src ! tee name=t ! queue ! videoconvert ! zbar ! fakesink t. ! queue ! xvimagesink
|
||||
* ]| Same as above, but running the filter on a branch to keep the display in color
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -21,46 +21,40 @@
|
|||
*/
|
||||
/**
|
||||
* SECTION: gstaggregator
|
||||
* @title: GstAggregator
|
||||
* @short_description: manages a set of pads with the purpose of
|
||||
* aggregating their buffers.
|
||||
* @see_also: gstcollectpads for historical reasons.
|
||||
*
|
||||
* Manages a set of pads with the purpose of aggregating their buffers.
|
||||
* Control is given to the subclass when all pads have data.
|
||||
* <itemizedlist>
|
||||
* <listitem><para>
|
||||
* Base class for mixers and muxers. Subclasses should at least implement
|
||||
*
|
||||
* * Base class for mixers and muxers. Subclasses should at least implement
|
||||
* the #GstAggregatorClass.aggregate() virtual method.
|
||||
* </para></listitem>
|
||||
* <listitem><para>
|
||||
* When data is queued on all pads, tha aggregate vmethod is called.
|
||||
* </para></listitem>
|
||||
* <listitem><para>
|
||||
* One can peek at the data on any given GstAggregatorPad with the
|
||||
*
|
||||
* * When data is queued on all pads, tha aggregate vmethod is called.
|
||||
*
|
||||
* * One can peek at the data on any given GstAggregatorPad with the
|
||||
* gst_aggregator_pad_get_buffer () method, and take ownership of it
|
||||
* with the gst_aggregator_pad_steal_buffer () method. When a buffer
|
||||
* has been taken with steal_buffer (), a new buffer can be queued
|
||||
* on that pad.
|
||||
* </para></listitem>
|
||||
* <listitem><para>
|
||||
* If the subclass wishes to push a buffer downstream in its aggregate
|
||||
*
|
||||
* * If the subclass wishes to push a buffer downstream in its aggregate
|
||||
* implementation, it should do so through the
|
||||
* gst_aggregator_finish_buffer () method. This method will take care
|
||||
* of sending and ordering mandatory events such as stream start, caps
|
||||
* and segment.
|
||||
* </para></listitem>
|
||||
* <listitem><para>
|
||||
* Same goes for EOS events, which should not be pushed directly by the
|
||||
*
|
||||
* * Same goes for EOS events, which should not be pushed directly by the
|
||||
* subclass, it should instead return GST_FLOW_EOS in its aggregate
|
||||
* implementation.
|
||||
* </para></listitem>
|
||||
* <listitem><para>
|
||||
* Note that the aggregator logic regarding gap event handling is to turn
|
||||
*
|
||||
* * Note that the aggregator logic regarding gap event handling is to turn
|
||||
* these into gap buffers with matching PTS and duration. It will also
|
||||
* flag these buffers with GST_BUFFER_FLAG_GAP and GST_BUFFER_FLAG_DROPPABLE
|
||||
* to ease their identification and subsequent processing.
|
||||
* </para></listitem>
|
||||
* </itemizedlist>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -29,40 +29,30 @@
|
|||
|
||||
/**
|
||||
* SECTION:gsth264parser
|
||||
* @title: GstH264Parser
|
||||
* @short_description: Convenience library for h264 video
|
||||
* bitstream parsing.
|
||||
*
|
||||
* It offers bitstream parsing in both AVC (length-prefixed) and Annex B
|
||||
* (0x000001 start code prefix) format. To identify a NAL unit in a bitstream
|
||||
* and parse its headers, first call:
|
||||
* <itemizedlist>
|
||||
* <listitem>
|
||||
* #gst_h264_parser_identify_nalu to identify a NAL unit in an Annex B type bitstream
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* #gst_h264_parser_identify_nalu_avc to identify a NAL unit in an AVC type bitstream
|
||||
* </listitem>
|
||||
* </itemizedlist>
|
||||
*
|
||||
* * #gst_h264_parser_identify_nalu to identify a NAL unit in an Annex B type bitstream
|
||||
*
|
||||
* * #gst_h264_parser_identify_nalu_avc to identify a NAL unit in an AVC type bitstream
|
||||
*
|
||||
* The following functions are then available for parsing the structure of the
|
||||
* #GstH264NalUnit, depending on the #GstH264NalUnitType:
|
||||
* <itemizedlist>
|
||||
* <listitem>
|
||||
* From #GST_H264_NAL_SLICE to #GST_H264_NAL_SLICE_IDR: #gst_h264_parser_parse_slice_hdr
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* #GST_H264_NAL_SEI: #gst_h264_parser_parse_sei
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* #GST_H264_NAL_SPS: #gst_h264_parser_parse_sps
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* #GST_H264_NAL_PPS: #gst_h264_parser_parse_pps
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* Any other: #gst_h264_parser_parse_nal
|
||||
* </listitem>
|
||||
* </itemizedlist>
|
||||
*
|
||||
* * From #GST_H264_NAL_SLICE to #GST_H264_NAL_SLICE_IDR: #gst_h264_parser_parse_slice_hdr
|
||||
*
|
||||
* * #GST_H264_NAL_SEI: #gst_h264_parser_parse_sei
|
||||
*
|
||||
* * #GST_H264_NAL_SPS: #gst_h264_parser_parse_sps
|
||||
*
|
||||
* * #GST_H264_NAL_PPS: #gst_h264_parser_parse_pps
|
||||
*
|
||||
* * Any other: #gst_h264_parser_parse_nal
|
||||
*
|
||||
* One of these functions *must* be called on every NAL unit in the bitstream,
|
||||
* in order to keep the internal structures of the #GstH264NalParser up to
|
||||
|
@ -70,17 +60,13 @@
|
|||
* type, if no special parsing of the current NAL unit is required by the
|
||||
* application.
|
||||
*
|
||||
* For more details about the structures, look at the ITU-T H.264 and ISO/IEC 14496-10 – MPEG-4
|
||||
* For more details about the structures, look at the ITU-T H.264 and ISO/IEC 14496-10 – MPEG-4
|
||||
* Part 10 specifications, available at:
|
||||
*
|
||||
* <itemizedlist>
|
||||
* <listitem>
|
||||
* ITU-T H.264: http://www.itu.int/rec/T-REC-H.264
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* ISO/IEC 14496-10: http://www.iso.org/iso/iso_catalogue/catalogue_tc/catalogue_detail.htm?csnumber=56538
|
||||
* </listitem>
|
||||
* </itemizedlist>
|
||||
* * ITU-T H.264: http://www.itu.int/rec/T-REC-H.264
|
||||
*
|
||||
* * ISO/IEC 14496-10: http://www.iso.org/iso/iso_catalogue/catalogue_tc/catalogue_detail.htm?csnumber=56538
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -22,43 +22,32 @@
|
|||
|
||||
/**
|
||||
* SECTION:gsth265parser
|
||||
* @title: GstH265Parser
|
||||
* @short_description: Convenience library for h265 video bitstream parsing.
|
||||
*
|
||||
* It offers you bitstream parsing in HEVC mode and non-HEVC mode. To identify
|
||||
* Nals in a bitstream and parse its headers, you should call:
|
||||
* <itemizedlist>
|
||||
* <listitem>
|
||||
* gst_h265_parser_identify_nalu() to identify the following nalu in
|
||||
*
|
||||
* * gst_h265_parser_identify_nalu() to identify the following nalu in
|
||||
* non-HEVC bitstreams
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* gst_h265_parser_identify_nalu_hevc() to identify the nalu in
|
||||
*
|
||||
* * gst_h265_parser_identify_nalu_hevc() to identify the nalu in
|
||||
* HEVC bitstreams
|
||||
* </listitem>
|
||||
* </itemizedlist>
|
||||
*
|
||||
* Then, depending on the #GstH265NalUnitType of the newly parsed #GstH265NalUnit,
|
||||
* you should call the differents functions to parse the structure:
|
||||
* <itemizedlist>
|
||||
* <listitem>
|
||||
* From #GST_H265_NAL_SLICE_TRAIL_N to #GST_H265_NAL_SLICE_CRA_NUT: gst_h265_parser_parse_slice_hdr()
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* #GST_H265_NAL_SEI: gst_h265_parser_parse_sei()
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* #GST_H265_NAL_VPS: gst_h265_parser_parse_vps()
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* #GST_H265_NAL_SPS: gst_h265_parser_parse_sps()
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* #GST_H265_NAL_PPS: #gst_h265_parser_parse_pps()
|
||||
* </listitem>
|
||||
* <listitem>
|
||||
* Any other: gst_h265_parser_parse_nal()
|
||||
* </listitem>
|
||||
* </itemizedlist>
|
||||
*
|
||||
* * From #GST_H265_NAL_SLICE_TRAIL_N to #GST_H265_NAL_SLICE_CRA_NUT: gst_h265_parser_parse_slice_hdr()
|
||||
*
|
||||
* * #GST_H265_NAL_SEI: gst_h265_parser_parse_sei()
|
||||
*
|
||||
* * #GST_H265_NAL_VPS: gst_h265_parser_parse_vps()
|
||||
*
|
||||
* * #GST_H265_NAL_SPS: gst_h265_parser_parse_sps()
|
||||
*
|
||||
* * #GST_H265_NAL_PPS: #gst_h265_parser_parse_pps()
|
||||
*
|
||||
* * Any other: gst_h265_parser_parse_nal()
|
||||
*
|
||||
* Note: You should always call gst_h265_parser_parse_nal() if you don't
|
||||
* actually need #GstH265NalUnitType to be parsed for your personal use, in
|
||||
|
@ -67,11 +56,8 @@
|
|||
* For more details about the structures, look at the ITU-T H.265
|
||||
* specifications, you can download them from:
|
||||
*
|
||||
* <itemizedlist>
|
||||
* <listitem>
|
||||
* ITU-T H.265: http://www.itu.int/rec/T-REC-H.265
|
||||
* </listitem>
|
||||
* </itemizedlist>
|
||||
* * ITU-T H.265: http://www.itu.int/rec/T-REC-H.265
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
/**
|
||||
* SECTION:gstjpeg2000sampling
|
||||
* @title: GstJpeg2000Sampling
|
||||
* @short_description: Manage JPEG 2000 sampling and colorspace fields
|
||||
*
|
||||
*/
|
||||
|
|
|
@ -20,13 +20,11 @@
|
|||
|
||||
/**
|
||||
* SECTION:gstjpegparser
|
||||
* @title: GstJpegParser
|
||||
* @short_description: Convenience library for JPEG bitstream parsing.
|
||||
*
|
||||
* <refsect2>
|
||||
* <para>
|
||||
* Provides useful functions for parsing JPEG images
|
||||
* </para>
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#include <string.h>
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
*/
|
||||
/**
|
||||
* SECTION:gstmpeg4parser
|
||||
* @title: GstMpeg4Parser
|
||||
* @short_description: Convenience library for parsing mpeg4 part 2 video
|
||||
* bitstream.
|
||||
*
|
||||
|
|
|
@ -25,14 +25,12 @@
|
|||
|
||||
/**
|
||||
* SECTION:gstmpegvideoparser
|
||||
* @title: GstMpegvideoParser
|
||||
* @short_description: Convenience library for mpeg1 and 2 video
|
||||
* bitstream parsing.
|
||||
*
|
||||
* <refsect2>
|
||||
* <para>
|
||||
* Provides useful functions for mpeg videos bitstream parsing.
|
||||
* </para>
|
||||
* </refsect2>
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue