docs: Port all docstring to gtk-doc markdown

This commit is contained in:
Thibault Saunier 2017-03-08 15:01:13 -03:00
parent 90f766cc51
commit 78022a6e0c
257 changed files with 943 additions and 1141 deletions

View file

@ -20,15 +20,15 @@
/** /**
* SECTION:element-assrender * SECTION:element-assrender
* @title: assrender
* *
* Renders timestamped SSA/ASS subtitles on top of a video stream. * Renders timestamped SSA/ASS subtitles on top of a video stream.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mpegaudioparse ! mpg123audiodec ! audioconvert ! autoaudiosink d. ! queue ! h264parse ! avdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink * gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskademux name=d ! queue ! mpegaudioparse ! mpg123audiodec ! audioconvert ! autoaudiosink d. ! queue ! h264parse ! avdec_h264 ! videoconvert ! r. d. ! queue ! "application/x-ass" ! assrender name=r ! videoconvert ! autovideosink
* ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video. * ]| This pipeline demuxes a Matroska file with h.264 video, MP3 audio and embedded ASS subtitles and renders the subtitles on top of the video.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,17 +20,17 @@
/** /**
* SECTION:element-bs2b * SECTION:element-bs2b
* @title: bs2b
* *
* Improve headphone listening of stereo audio records using the bs2b library. * Improve headphone listening of stereo audio records using the bs2b library.
* It does so by mixing the left and right channel in a way that simulates * It does so by mixing the left and right channel in a way that simulates
* a stereo speaker setup while using headphones. * a stereo speaker setup while using headphones.
* *
* <refsect2> * ## Example pipelines
* <title>Example pipelines</title>
* |[ * |[
* gst-launch-1.0 audiotestsrc ! "audio/x-raw,channel-mask=(bitmask)0x1" ! interleave name=i ! bs2b ! autoaudiosink audiotestsrc freq=330 ! "audio/x-raw,channel-mask=(bitmask)0x2" ! i. * gst-launch-1.0 audiotestsrc ! "audio/x-raw,channel-mask=(bitmask)0x1" ! interleave name=i ! bs2b ! autoaudiosink audiotestsrc freq=330 ! "audio/x-raw,channel-mask=(bitmask)0x2" ! i.
* ]| Play two independent sine test sources and crossfeed them. * ]| Play two independent sine test sources and crossfeed them.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -23,18 +23,18 @@
/** /**
* SECTION:element-chromaprint * SECTION:element-chromaprint
* @title: chromaprint
* *
* The chromaprint element calculates an acoustic fingerprint for an * The chromaprint element calculates an acoustic fingerprint for an
* audio stream which can be used to identify a song and look up * audio stream which can be used to identify a song and look up
* further metadata from the <ulink url="http://acoustid.org/">Acoustid</ulink> * further metadata from the <ulink url="http://acoustid.org/">Acoustid</ulink>
* and Musicbrainz databases. * and Musicbrainz databases.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 -m uridecodebin uri=file:///path/to/song.ogg ! audioconvert ! chromaprint ! fakesink * gst-launch-1.0 -m uridecodebin uri=file:///path/to/song.ogg ! audioconvert ! chromaprint ! fakesink
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -19,14 +19,14 @@
/** /**
* SECTION:element-curlsink * SECTION:element-curlsink
* @title: curlsink
* @short_description: sink that uploads data to a server using libcurl * @short_description: sink that uploads data to a server using libcurl
* @see_also: * @see_also:
* *
* This is a network sink that uses libcurl as a client to upload data to * This is a network sink that uses libcurl as a client to upload data to
* a server (e.g. a HTTP/FTP server). * a server (e.g. a HTTP/FTP server).
* *
* <refsect2> * ## Example launch line (upload a JPEG file to an HTTP server)
* <title>Example launch line (upload a JPEG file to an HTTP server)</title>
* |[ * |[
* gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlsink \ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlsink \
* file-name=image.jpg \ * file-name=image.jpg \
@ -35,7 +35,7 @@
* content-type=image/jpeg \ * content-type=image/jpeg \
* use-content-length=false * use-content-length=false
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -19,21 +19,20 @@
/** /**
* SECTION:element-curlfilesink * SECTION:element-curlfilesink
* @title: curlfilesink
* @short_description: sink that uploads data to a server using libcurl * @short_description: sink that uploads data to a server using libcurl
* @see_also: * @see_also:
* *
* This is a network sink that uses libcurl as a client to upload data to * This is a network sink that uses libcurl as a client to upload data to
* a local or network drive. * a local or network drive.
* *
* <refsect2> * ## Example launch line (upload a JPEG file to /home/test/images directory)
* <title>Example launch line (upload a JPEG file to /home/test/images
* directory)</title>
* |[ * |[
* gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlfilesink \ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlfilesink \
* file-name=image.jpg \ * file-name=image.jpg \
* location=file:///home/test/images/ * location=file:///home/test/images/
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -19,21 +19,23 @@
/** /**
* SECTION:element-curlftpsink * SECTION:element-curlftpsink
* @title: curlftpsink
* @short_description: sink that uploads data to a server using libcurl * @short_description: sink that uploads data to a server using libcurl
* @see_also: * @see_also:
* *
* This is a network sink that uses libcurl as a client to upload data to * This is a network sink that uses libcurl as a client to upload data to
* an FTP server. * an FTP server.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line (upload a JPEG file to /home/test/images *
* directory)</title> * Upload a JPEG file to /home/test/images * directory)
*
* |[ * |[
* gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlftpsink \ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlftpsink \
* file-name=image.jpg \ * file-name=image.jpg \
* location=ftp://192.168.0.1/images/ * location=ftp://192.168.0.1/images/
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -19,14 +19,17 @@
/** /**
* SECTION:element-curlhttpsink * SECTION:element-curlhttpsink
* @title: curlhttpsink
* @short_description: sink that uploads data to a server using libcurl * @short_description: sink that uploads data to a server using libcurl
* @see_also: * @see_also:
* *
* This is a network sink that uses libcurl as a client to upload data to * This is a network sink that uses libcurl as a client to upload data to
* an HTTP server. * an HTTP server.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line (upload a JPEG file to an HTTP server)</title> *
* Upload a JPEG file to an HTTP server.
*
* |[ * |[
* gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlhttpsink \ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlhttpsink \
* file-name=image.jpg \ * file-name=image.jpg \
@ -35,7 +38,6 @@
* content-type=image/jpeg \ * content-type=image/jpeg \
* use-content-length=false * use-content-length=false
* ]| * ]|
* </refsect2>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -19,14 +19,17 @@
/** /**
* SECTION:element-curlsftpsink * SECTION:element-curlsftpsink
* @title: curlsftpsink
* @short_description: sink that uploads data to a server using libcurl * @short_description: sink that uploads data to a server using libcurl
* @see_also: * @see_also:
* *
* This is a network sink that uses libcurl as a client to upload data to * This is a network sink that uses libcurl as a client to upload data to
* a SFTP (SSH File Transfer Protocol) server. * a SFTP (SSH File Transfer Protocol) server.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line (upload a file to /home/john/sftp_tests/)</title> *
* Upload a file to /home/john/sftp_tests/
*
* |[ * |[
* gst-launch-1.0 filesrc location=/home/jdoe/some.file ! curlsftpsink \ * gst-launch-1.0 filesrc location=/home/jdoe/some.file ! curlsftpsink \
* file-name=some.file.backup \ * file-name=some.file.backup \
@ -36,8 +39,6 @@
* ssh-priv-keyfile=/home/jdoe/.ssh/id_rsa \ * ssh-priv-keyfile=/home/jdoe/.ssh/id_rsa \
* create-dirs=TRUE * create-dirs=TRUE
* ]| * ]|
* </refsect2>
*
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -19,14 +19,17 @@
/** /**
* SECTION:element-curlsink * SECTION:element-curlsink
* @title: curlsink
* @short_description: sink that uploads data to a server using libcurl * @short_description: sink that uploads data to a server using libcurl
* @see_also: * @see_also:
* *
* This is a network sink that uses libcurl as a client to upload data to * This is a network sink that uses libcurl as a client to upload data to
* an SMTP server. * an SMTP server.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line (upload a JPEG file to an SMTP server)</title> *
* Upload a JPEG file to an SMTP server.
*
* |[ * |[
* gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlsmtpsink \ * gst-launch-1.0 filesrc location=image.jpg ! jpegparse ! curlsmtpsink \
* file-name=image.jpg \ * file-name=image.jpg \
@ -38,7 +41,7 @@
* use-ssl=TRUE \ * use-ssl=TRUE \
* insecure=TRUE * insecure=TRUE
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -19,6 +19,7 @@
/** /**
* SECTION:element-curlsshsink * SECTION:element-curlsshsink
* @title: curlsshsink
* @short_description: sink that uploads data to a server using libcurl * @short_description: sink that uploads data to a server using libcurl
* @see_also: * @see_also:
* *

View file

@ -19,6 +19,7 @@
/** /**
* SECTION:element-curltlssink * SECTION:element-curltlssink
* @title: curltlssink
* @short_description: sink that uploads data to a server using libcurl * @short_description: sink that uploads data to a server using libcurl
* @see_also: * @see_also:
* *

View file

@ -23,6 +23,7 @@
/** /**
* SECTION:element-daaladec * SECTION:element-daaladec
* @title: daaladec
* @see_also: daalaenc, oggdemux * @see_also: daalaenc, oggdemux
* *
* This element decodes daala streams into raw video * This element decodes daala streams into raw video
@ -30,13 +31,12 @@
* video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org * video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org
* Foundation</ulink>. * Foundation</ulink>.
* *
* <refsect2> * ## Example pipeline
* <title>Example pipeline</title>
* |[ * |[
* gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! daaladec ! xvimagesink * gst-launch-1.0 -v filesrc location=videotestsrc.ogg ! oggdemux ! daaladec ! xvimagesink
* ]| This example pipeline will decode an ogg stream and decodes the daala video. Refer to * ]| This example pipeline will decode an ogg stream and decodes the daala video. Refer to
* the daalaenc example to create the ogg file. * the daalaenc example to create the ogg file.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -23,6 +23,7 @@
/** /**
* SECTION:element-daalaenc * SECTION:element-daalaenc
* @title: daalaenc
* @see_also: daaladec, oggmux * @see_also: daaladec, oggmux
* *
* This element encodes raw video into a Daala stream. * This element encodes raw video into a Daala stream.
@ -30,14 +31,13 @@
* video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org * video codec maintained by the <ulink url="http://www.xiph.org/">Xiph.org
* Foundation</ulink>. * Foundation</ulink>.
* *
* <refsect2> * ## Example pipeline
* <title>Example pipeline</title>
* |[ * |[
* gst-launch-1.0 -v videotestsrc num-buffers=1000 ! daalaenc ! oggmux ! filesink location=videotestsrc.ogg * gst-launch-1.0 -v videotestsrc num-buffers=1000 ! daalaenc ! oggmux ! filesink location=videotestsrc.ogg
* ]| This example pipeline will encode a test video source to daala muxed in an * ]| This example pipeline will encode a test video source to daala muxed in an
* ogg container. Refer to the daaladec documentation to decode the create * ogg container. Refer to the daaladec documentation to decode the create
* stream. * stream.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -29,9 +29,10 @@
*/ */
/** /**
* SECTION:element-dashdemux * SECTION:element-dashdemux
* @title: dashdemux
* *
* DASH demuxer element. * DASH demuxer element.
* <title>Example launch line</title> * ## Example launch line
* |[ * |[
* gst-launch-1.0 playbin uri="http://www-itec.uni-klu.ac.at/ftp/datasets/mmsys12/RedBullPlayStreets/redbull_4s/RedBullPlayStreets_4s_isoffmain_DIS_23009_1_v_2_1c2_2011_08_30.mpd" * gst-launch-1.0 playbin uri="http://www-itec.uni-klu.ac.at/ftp/datasets/mmsys12/RedBullPlayStreets/redbull_4s/RedBullPlayStreets_4s_isoffmain_DIS_23009_1_v_2_1c2_2011_08_30.mpd"
* ]| * ]|

View file

@ -20,6 +20,7 @@
/** /**
* SECTION:element-dc1394src * SECTION:element-dc1394src
* @title: dc1394src
* *
* Source for IIDC (Instrumentation & Industrial Digital Camera) firewire * Source for IIDC (Instrumentation & Industrial Digital Camera) firewire
* cameras. If several cameras are connected to the system, the desired one * cameras. If several cameras are connected to the system, the desired one
@ -31,8 +32,7 @@
* corresponding video formats are exposed in the capabilities. * corresponding video formats are exposed in the capabilities.
* The Bayer pattern is left unspecified. * The Bayer pattern is left unspecified.
* *
* <refsect2> * ## Example launch lines
* <title>Example launch lines</title>
* |[ * |[
* gst-launch-1.0 -v dc1394src ! videoconvert ! autovideosink * gst-launch-1.0 -v dc1394src ! videoconvert ! autovideosink
* ]| Capture and display frames from the first camera available in the system. * ]| Capture and display frames from the first camera available in the system.
@ -41,7 +41,7 @@
* ! "video/x-bayer,format=gbrg,width=1280,height=960,framerate=15/2" \ * ! "video/x-bayer,format=gbrg,width=1280,height=960,framerate=15/2" \
* ! bayer2rgb ! videoconvert ! autovideosink * ! bayer2rgb ! videoconvert ! autovideosink
* ]| Capture and display frames from a specific camera in the desired format. * ]| Capture and display frames from a specific camera in the desired format.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,66 +20,52 @@
/** /**
* SECTION:element-dfbvideosink * SECTION:element-dfbvideosink
* @title: dfbvideosink
* *
* DfbVideoSink renders video frames using the * DfbVideoSink renders video frames using the
* <ulink url="http://www.directfb.org/">DirectFB</ulink> library. * <ulink url="http://www.directfb.org/">DirectFB</ulink> library.
* Rendering can happen in two different modes : * Rendering can happen in two different modes :
* <itemizedlist> *
* <listitem> * * Standalone: this mode will take complete control of the monitor forcing
* <para>
* Standalone: this mode will take complete control of the monitor forcing
* <ulink url="http://www.directfb.org/">DirectFB</ulink> to fullscreen layout. * <ulink url="http://www.directfb.org/">DirectFB</ulink> to fullscreen layout.
* This is convenient to test using the gst-launch-1.0 command line tool or * This is convenient to test using the gst-launch-1.0 command line tool or
* other simple applications. It is possible to interrupt playback while * other simple applications. It is possible to interrupt playback while
* being in this mode by pressing the Escape key. * being in this mode by pressing the Escape key.
* </para>
* <para>
* This mode handles navigation events for every input device supported by * This mode handles navigation events for every input device supported by
* the <ulink url="http://www.directfb.org/">DirectFB</ulink> library, it will * the <ulink url="http://www.directfb.org/">DirectFB</ulink> library, it will
* look for available video modes in the fb.modes file and try to switch * look for available video modes in the fb.modes file and try to switch
* the framebuffer video mode to the most suitable one. Depending on * the framebuffer video mode to the most suitable one. Depending on
* hardware acceleration capabilities the element will handle scaling or not. * hardware acceleration capabilities the element will handle scaling or not.
* If no acceleration is available it will do clipping or centering of the * If no acceleration is available it will do clipping or centering of the
* video frames respecting the original aspect ratio. * video frames respecting the original aspect ratio.
* </para> *
* </listitem> * * Embedded: this mode will render video frames in a
* <listitem>
* <para>
* Embedded: this mode will render video frames in a
* #GstDfbVideoSink:surface provided by the * #GstDfbVideoSink:surface provided by the
* application developer. This is a more advanced usage of the element and * application developer. This is a more advanced usage of the element and
* it is required to integrate video playback in existing * it is required to integrate video playback in existing
* <ulink url="http://www.directfb.org/">DirectFB</ulink> applications. * <ulink url="http://www.directfb.org/">DirectFB</ulink> applications.
* </para>
* <para>
* When using this mode the element just renders to the * When using this mode the element just renders to the
* #GstDfbVideoSink:surface provided by the * #GstDfbVideoSink:surface provided by the
* application, that means it won't handle navigation events and won't resize * application, that means it won't handle navigation events and won't resize
* the #GstDfbVideoSink:surface to fit video * the #GstDfbVideoSink:surface to fit video
* frames geometry. Application has to implement the necessary code to grab * frames geometry. Application has to implement the necessary code to grab
* informations about the negotiated geometry and resize there * informations about the negotiated geometry and resize there
* #GstDfbVideoSink:surface accordingly. * #GstDfbVideoSink:surface accordingly.
* </para> *
* </listitem> * For both modes the element implements a buffer pool allocation system to
* </itemizedlist> * optimize memory allocation time and handle reverse negotiation. Indeed if
* For both modes the element implements a buffer pool allocation system to
* optimize memory allocation time and handle reverse negotiation. Indeed if
* you insert an element like videoscale in the pipeline the video sink will * you insert an element like videoscale in the pipeline the video sink will
* negotiate with it to try get a scaled video for either the fullscreen layout * negotiate with it to try get a scaled video for either the fullscreen layout
* or the application provided external #GstDfbVideoSink:surface. * or the application provided external #GstDfbVideoSink:surface.
* *
* <refsect2> * ## Example application
* <title>Example application</title> *
* <para>
* <include xmlns="http://www.w3.org/2003/XInclude" href="element-dfb-example.xml" /> * <include xmlns="http://www.w3.org/2003/XInclude" href="element-dfb-example.xml" />
* </para> *
* </refsect2> * ## Example pipelines
* <refsect2>
* <title>Example pipelines</title>
* |[ * |[
* gst-launch-1.0 -v videotestsrc ! dfbvideosink hue=20000 saturation=40000 brightness=25000 * gst-launch-1.0 -v videotestsrc ! dfbvideosink hue=20000 saturation=40000 brightness=25000
* ]| test the colorbalance interface implementation in dfbvideosink * ]| test the colorbalance interface implementation in dfbvideosink
* </refsect2>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
@ -810,7 +796,7 @@ gst_dfbvideosink_setup (GstDfbVideoSink * dfbvideosink)
dfbvideosink->backbuffer = FALSE; dfbvideosink->backbuffer = FALSE;
dfbvideosink->pixel_format = DSPF_UNKNOWN; dfbvideosink->pixel_format = DSPF_UNKNOWN;
/* If we do it all by ourself we create the DirectFB context, get the /* If we do it all by ourself we create the DirectFB context, get the
primary layer and use a fullscreen configuration */ primary layer and use a fullscreen configuration */
if (!dfbvideosink->ext_surface) { if (!dfbvideosink->ext_surface) {
GST_DEBUG_OBJECT (dfbvideosink, "no external surface, taking over " GST_DEBUG_OBJECT (dfbvideosink, "no external surface, taking over "

View file

@ -20,18 +20,18 @@
/** /**
* SECTION:element-dtsdec * SECTION:element-dtsdec
* @title: dtsdec
* *
* Digital Theatre System (DTS) audio decoder * Digital Theatre System (DTS) audio decoder
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 dvdreadsrc title=1 ! mpegpsdemux ! dtsdec ! audioresample ! audioconvert ! alsasink * gst-launch-1.0 dvdreadsrc title=1 ! mpegpsdemux ! dtsdec ! audioresample ! audioconvert ! alsasink
* ]| Play a DTS audio track from a dvd. * ]| Play a DTS audio track from a dvd.
* |[ * |[
* gst-launch-1.0 filesrc location=abc.dts ! dtsdec ! audioresample ! audioconvert ! alsasink * gst-launch-1.0 filesrc location=abc.dts ! dtsdec ! audioresample ! audioconvert ! alsasink
* ]| Decode a standalone file and play it. * ]| Decode a standalone file and play it.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,16 +20,16 @@
/** /**
* SECTION:element-faac * SECTION:element-faac
* @title: faac
* @see_also: faad * @see_also: faad
* *
* faac encodes raw audio to AAC (MPEG-4 part 3) streams. * faac encodes raw audio to AAC (MPEG-4 part 3) streams.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 audiotestsrc wave=sine num-buffers=100 ! audioconvert ! faac ! matroskamux ! filesink location=sine.mkv * gst-launch-1.0 audiotestsrc wave=sine num-buffers=100 ! audioconvert ! faac ! matroskamux ! filesink location=sine.mkv
* ]| Encode a sine beep as aac and write to matroska container. * ]| Encode a sine beep as aac and write to matroska container.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,19 +20,19 @@
/** /**
* SECTION:element-faad * SECTION:element-faad
* @title: faad
* @seealso: faac * @seealso: faac
* *
* faad decodes AAC (MPEG-4 part 3) stream. * faad decodes AAC (MPEG-4 part 3) stream.
* *
* <refsect2> * ## Example launch lines
* <title>Example launch lines</title>
* |[ * |[
* gst-launch-1.0 filesrc location=example.mp4 ! qtdemux ! faad ! audioconvert ! audioresample ! autoaudiosink * gst-launch-1.0 filesrc location=example.mp4 ! qtdemux ! faad ! audioconvert ! audioresample ! autoaudiosink
* ]| Play aac from mp4 file. * ]| Play aac from mp4 file.
* |[ * |[
* gst-launch-1.0 filesrc location=example.adts ! faad ! audioconvert ! audioresample ! autoaudiosink * gst-launch-1.0 filesrc location=example.adts ! faad ! audioconvert ! audioresample ! autoaudiosink
* ]| Play standalone aac bitstream. * ]| Play standalone aac bitstream.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -21,19 +21,19 @@
/** /**
* SECTION:element-fluiddec * SECTION:element-fluiddec
* @title: fluiddec
* @see_also: timidity, wildmidi * @see_also: timidity, wildmidi
* *
* This element renders midi-events as audio streams using * This element renders midi-events as audio streams using
* <ulink url="http://fluidsynth.sourceforge.net//">Fluidsynth</ulink>. * <ulink url="http://fluidsynth.sourceforge.net//">Fluidsynth</ulink>.
* It offers better sound quality compared to the timidity or wildmidi element. * It offers better sound quality compared to the timidity or wildmidi element.
* *
* <refsect2> * ## Example pipeline
* <title>Example pipeline</title>
* |[ * |[
* gst-launch-1.0 filesrc location=song.mid ! midiparse ! fluiddec ! pulsesink * gst-launch-1.0 filesrc location=song.mid ! midiparse ! fluiddec ! pulsesink
* ]| This example pipeline will parse the midi and render to raw audio which is * ]| This example pipeline will parse the midi and render to raw audio which is
* played via pulseaudio. * played via pulseaudio.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -21,16 +21,16 @@
/** /**
* SECTION:element-glbumper * SECTION:element-glbumper
* @title: glbumper
* *
* Bump mapping using the normal method. * Bump mapping using the normal method.
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 -v videotestsrc ! glupload ! glbumper location=normalmap.bmp ! glimagesink * gst-launch-1.0 -v videotestsrc ! glupload ! glbumper location=normalmap.bmp ! glimagesink
* ]| A pipeline to test normal mapping. * ]| A pipeline to test normal mapping.
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -22,16 +22,16 @@
/** /**
* SECTION:element-glcolorbalance * SECTION:element-glcolorbalance
* @title: glcolorbalance
* *
* Adjusts brightness, contrast, hue, saturation on a video stream. * Adjusts brightness, contrast, hue, saturation on a video stream.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 videotestsrc ! glupload ! glcolorbalance saturation=0.0 ! glcolorconvert ! gldownload ! ximagesink * gst-launch-1.0 videotestsrc ! glupload ! glcolorbalance saturation=0.0 ! glcolorconvert ! gldownload ! ximagesink
* ]| This pipeline converts the image to black and white by setting the * ]| This pipeline converts the image to black and white by setting the
* saturation to 0.0. * saturation to 0.0.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,17 +20,15 @@
/** /**
* SECTION:element-glcolorscale * SECTION:element-glcolorscale
* @title: glcolorscale
* *
* video frame scaling and colorspace conversion. * video frame scaling and colorspace conversion.
* *
* <refsect2> * ## Scaling and Color space conversion
* <title>Scaling and Color space conversion</title> *
* <para>
* Equivalent to glupload ! gldownload. * Equivalent to glupload ! gldownload.
* </para> *
* </refsect2> * ## Examples
* <refsect2>
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 -v videotestsrc ! video/x-raw ! glcolorscale ! ximagesink * gst-launch-1.0 -v videotestsrc ! video/x-raw ! glcolorscale ! ximagesink
* ]| A pipeline to test colorspace conversion. * ]| A pipeline to test colorspace conversion.
@ -40,7 +38,7 @@
* video/x-raw, width=320, height=240, format=YV12 ! videoconvert ! autovideosink * video/x-raw, width=320, height=240, format=YV12 ! videoconvert ! autovideosink
* ]| A pipeline to test hardware scaling and colorspace conversion. * ]| A pipeline to test hardware scaling and colorspace conversion.
* FBO and GLSL are required. * FBO and GLSL are required.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,16 +20,16 @@
/** /**
* SECTION:element-deinterlace * SECTION:element-deinterlace
* @title: deinterlace
* *
* Deinterlacing using based on fragment shaders. * Deinterlacing using based on fragment shaders.
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 videotestsrc ! glupload ! gldeinterlace ! glimagesink * gst-launch-1.0 videotestsrc ! glupload ! gldeinterlace ! glimagesink
* ]| * ]|
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,16 +20,16 @@
/** /**
* SECTION:element-gldifferencematte. * SECTION:element-gldifferencematte.
* @title: gldifferencematte.
* *
* Saves a background frame and replace it with a pixbuf. * Saves a background frame and replace it with a pixbuf.
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 videotestsrc ! glupload ! gldifferencemate location=backgroundimagefile ! glimagesink * gst-launch-1.0 videotestsrc ! glupload ! gldifferencemate location=backgroundimagefile ! glimagesink
* ]| * ]|
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,16 +20,16 @@
/** /**
* SECTION:element-gleffects. * SECTION:element-gleffects.
* @title: gleffects.
* *
* GL Shading Language effects. * GL Shading Language effects.
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 videotestsrc ! glupload ! gleffects effect=5 ! glimagesink * gst-launch-1.0 videotestsrc ! glupload ! gleffects effect=5 ! glimagesink
* ]| * ]|
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,20 +20,18 @@
/** /**
* SECTION:element-glfilterapp * SECTION:element-glfilterapp
* @title: glfilterapp
* *
* The resize and redraw callbacks can be set from a client code. * The resize and redraw callbacks can be set from a client code.
* *
* <refsect2> * ## CLient callbacks
* <title>CLient callbacks</title> *
* <para> * The graphic scene can be written from a client code through the
* The graphic scene can be written from a client code through the
* two glfilterapp properties. * two glfilterapp properties.
* </para> *
* </refsect2> * ## Examples
* <refsect2>
* <title>Examples</title>
* see gst-plugins-gl/tests/examples/generic/recordgraphic * see gst-plugins-gl/tests/examples/generic/recordgraphic
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,11 +20,11 @@
/** /**
* SECTION:element-glfiltercube * SECTION:element-glfiltercube
* @title: glfiltercube
* *
* The resize and redraw callbacks can be set from a client code. * The resize and redraw callbacks can be set from a client code.
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 -v videotestsrc ! glfiltercube ! glimagesink * gst-launch-1.0 -v videotestsrc ! glfiltercube ! glimagesink
* ]| A pipeline to mpa textures on the 6 cube faces.. * ]| A pipeline to mpa textures on the 6 cube faces..
@ -37,7 +37,7 @@
* gst-launch-1.0 -v videotestsrc ! video/x-raw, width=640, height=480 ! glfiltercube ! glimagesink * gst-launch-1.0 -v videotestsrc ! video/x-raw, width=640, height=480 ! glfiltercube ! glimagesink
* ]| Resize scene before drawing the cube. * ]| Resize scene before drawing the cube.
* The scene size is greater than the input video size. * The scene size is greater than the input video size.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -21,11 +21,11 @@
/** /**
* SECTION:element-glfilterglass * SECTION:element-glfilterglass
* @title: glfilterglass
* *
* Map textures on moving glass. * Map textures on moving glass.
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 -v videotestsrc ! glfilterglass ! glimagesink * gst-launch-1.0 -v videotestsrc ! glfilterglass ! glimagesink
* ]| A pipeline inspired from http://www.mdk.org.pl/2007/11/17/gl-colorspace-conversions * ]| A pipeline inspired from http://www.mdk.org.pl/2007/11/17/gl-colorspace-conversions
@ -33,7 +33,7 @@
* |[ * |[
* gst-launch-1.0 -v videotestsrc ! glfilterglass ! video/x-raw, width=640, height=480 ! glimagesink * gst-launch-1.0 -v videotestsrc ! glfilterglass ! video/x-raw, width=640, height=480 ! glimagesink
* ]| The scene is greater than the input size. * ]| The scene is greater than the input size.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,15 +20,15 @@
/** /**
* SECTION:element-glfilterreflectedscreen * SECTION:element-glfilterreflectedscreen
* @title: glfilterreflectedscreen
* *
* Map Video Texture upon a screen, on a reflecting surface * Map Video Texture upon a screen, on a reflecting surface
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 videotestsrc ! glupload ! glfilterreflectedscreen ! glimagesink * gst-launch-1.0 videotestsrc ! glupload ! glfilterreflectedscreen ! glimagesink
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -21,11 +21,11 @@
/** /**
* SECTION:element-glshader * SECTION:element-glshader
* @title: glshader
* *
* OpenGL fragment shader filter * OpenGL fragment shader filter
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 videotestsrc ! glupload ! glshader fragment="\"`cat myshader.frag`\"" ! glimagesink * gst-launch-1.0 videotestsrc ! glupload ! glshader fragment="\"`cat myshader.frag`\"" ! glimagesink
* ]| * ]|
@ -45,13 +45,12 @@
* uniform float time; * uniform float time;
* uniform float width; * uniform float width;
* uniform float height; * uniform float height;
* *
* void main () { * void main () {
* gl_FragColor = texture2D( tex, v_texcoord ); * gl_FragColor = texture2D( tex, v_texcoord );
* } * }
* ]| * ]|
* *
* </refsect2>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"

View file

@ -23,6 +23,7 @@
/** /**
* SECTION:element-glimagesink * SECTION:element-glimagesink
* @title: glimagesink
* *
* glimagesink renders video frames to a drawable on a local or remote * glimagesink renders video frames to a drawable on a local or remote
* display using OpenGL. This element can receive a Window ID from the * display using OpenGL. This element can receive a Window ID from the
@ -34,28 +35,23 @@
* See the #GstGLDisplay documentation for a list of environment variables that * See the #GstGLDisplay documentation for a list of environment variables that
* can override window/platform detection. * can override window/platform detection.
* *
* <refsect2> * ## Scaling
* <title>Scaling</title> *
* <para>
* Depends on the driver, OpenGL handles hardware accelerated * Depends on the driver, OpenGL handles hardware accelerated
* scaling of video frames. This means that the element will just accept * scaling of video frames. This means that the element will just accept
* incoming video frames no matter their geometry and will then put them to the * incoming video frames no matter their geometry and will then put them to the
* drawable scaling them on the fly. Using the #GstGLImageSink:force-aspect-ratio * drawable scaling them on the fly. Using the #GstGLImageSink:force-aspect-ratio
* property it is possible to enforce scaling with a constant aspect ratio, * property it is possible to enforce scaling with a constant aspect ratio,
* which means drawing black borders around the video frame. * which means drawing black borders around the video frame.
* </para> *
* </refsect2> * ## Events
* <refsect2> *
* <title>Events</title>
* <para>
* Through the gl thread, glimagesink handle some events coming from the drawable * Through the gl thread, glimagesink handle some events coming from the drawable
* to manage its appearance even when the data is not flowing (GST_STATE_PAUSED). * to manage its appearance even when the data is not flowing (GST_STATE_PAUSED).
* That means that even when the element is paused, it will receive expose events * That means that even when the element is paused, it will receive expose events
* from the drawable and draw the latest frame with correct borders/aspect-ratio. * from the drawable and draw the latest frame with correct borders/aspect-ratio.
* </para> *
* </refsect2> * ## Examples
* <refsect2>
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 -v videotestsrc ! video/x-raw ! glimagesink * gst-launch-1.0 -v videotestsrc ! video/x-raw ! glimagesink
* ]| A pipeline to test hardware scaling. * ]| A pipeline to test hardware scaling.
@ -80,7 +76,7 @@
* ]| The graphic FPS scene can be greater than the input video FPS. * ]| The graphic FPS scene can be greater than the input video FPS.
* The graphic scene can be written from a client code through the * The graphic scene can be written from a client code through the
* two glfilterapp properties. * two glfilterapp properties.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,14 +20,14 @@
/** /**
* SECTION:element-glmosaic * SECTION:element-glmosaic
* @title: glmosaic
* *
* glmixer sub element. N gl sink pads to 1 source pad. * glmixer sub element. N gl sink pads to 1 source pad.
* N + 1 OpenGL contexts shared together. * N + 1 OpenGL contexts shared together.
* N <= 6 because the rendering is more a like a cube than a mosaic * N <= 6 because the rendering is more a like a cube than a mosaic
* Each opengl input stream is rendered on a cube face * Each opengl input stream is rendered on a cube face
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 videotestsrc ! video/x-raw, format=YUY2 ! queue ! glmosaic name=m ! glimagesink \ * gst-launch-1.0 videotestsrc ! video/x-raw, format=YUY2 ! queue ! glmosaic name=m ! glimagesink \
* videotestsrc pattern=12 ! video/x-raw, format=I420, framerate=5/1, width=100, height=200 ! queue ! m. \ * videotestsrc pattern=12 ! video/x-raw, format=I420, framerate=5/1, width=100, height=200 ! queue ! m. \
@ -37,7 +37,7 @@
* videotestsrc ! gleffects effect=6 ! queue ! m. * videotestsrc ! gleffects effect=6 ! queue ! m.
* ]| * ]|
* FBO (Frame Buffer Object) is required. * FBO (Frame Buffer Object) is required.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,16 +20,16 @@
/** /**
* SECTION:element-gloverlay * SECTION:element-gloverlay
* @title: gloverlay
* *
* Overlay GL video texture with a PNG image * Overlay GL video texture with a PNG image
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 videotestsrc ! gloverlay location=image.jpg ! glimagesink * gst-launch-1.0 videotestsrc ! gloverlay location=image.jpg ! glimagesink
* ]| * ]|
* FBO (Frame Buffer Object) is required. * FBO (Frame Buffer Object) is required.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -23,6 +23,7 @@
/** /**
* SECTION:element-glstereomix * SECTION:element-glstereomix
* @title: glstereomix
* *
* Combine 2 input streams to produce a stereoscopic output * Combine 2 input streams to produce a stereoscopic output
* stream. Input views are taken from the left pad and right pad * stream. Input views are taken from the left pad and right pad
@ -34,8 +35,7 @@
* The multiview representation on the output is chosen according to * The multiview representation on the output is chosen according to
* the downstream caps. * the downstream caps.
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 -v videotestsrc pattern=ball name=left \ * gst-launch-1.0 -v videotestsrc pattern=ball name=left \
* videotestsrc name=right glstereomix name=mix \ * videotestsrc name=right glstereomix name=mix \
@ -52,10 +52,10 @@
* right. ! video/x-raw,width=640,height=480 ! glupload ! mix. \ * right. ! video/x-raw,width=640,height=480 ! glupload ! mix. \
* mix. ! video/x-raw'(memory:GLMemory)',multiview-mode=top-bottom ! \ * mix. ! video/x-raw'(memory:GLMemory)',multiview-mode=top-bottom ! \
* glcolorconvert ! gldownload ! queue ! x264enc ! h264parse ! \ * glcolorconvert ! gldownload ! queue ! x264enc ! h264parse ! \
* mp4mux ! progressreport ! filesink location=output.mp4 * mp4mux ! progressreport ! filesink location=output.mp4
* ]| Mix the input from a camera to the left view, and videotestsrc to the right view, * ]| Mix the input from a camera to the left view, and videotestsrc to the right view,
* and encode as a top-bottom frame packed H.264 video. * and encode as a top-bottom frame packed H.264 video.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"

View file

@ -20,16 +20,16 @@
/** /**
* SECTION:element-glstereosplit * SECTION:element-glstereosplit
* @title: glstereosplit
* *
* Receive a stereoscopic video stream and split into left/right * Receive a stereoscopic video stream and split into left/right
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 videotestsrc ! glstereosplit name=s ! queue ! glimagesink s. ! queue ! glimagesink * gst-launch-1.0 videotestsrc ! glstereosplit name=s ! queue ! glimagesink s. ! queue ! glimagesink
* ]| * ]|
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -23,21 +23,19 @@
/** /**
* SECTION:element-gltestsrc * SECTION:element-gltestsrc
* @title: gltestsrc
* *
* <refsect2>
* <para>
* The gltestsrc element is used to produce test video texture. * The gltestsrc element is used to produce test video texture.
* The video test produced can be controlled with the "pattern" * The video test produced can be controlled with the "pattern"
* property. * property.
* </para> *
* <title>Example launch line</title> * ## Example launch line
* <para> *
* <programlisting> * |[
* gst-launch-1.0 -v gltestsrc pattern=smpte ! glimagesink * gst-launch-1.0 -v gltestsrc pattern=smpte ! glimagesink
* </programlisting> * ]|
* Shows original SMPTE color bars in a window. * Shows original SMPTE color bars in a window.
* </para> *
* </refsect2>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -21,11 +21,11 @@
/** /**
* SECTION:element-gltransformation * SECTION:element-gltransformation
* @title: gltransformation
* *
* Transforms video on the GPU. * Transforms video on the GPU.
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 gltestsrc ! gltransformation rotation-z=45 ! glimagesink * gst-launch-1.0 gltestsrc ! gltransformation rotation-z=45 ! glimagesink
* ]| A pipeline to rotate by 45 degrees * ]| A pipeline to rotate by 45 degrees
@ -38,7 +38,7 @@
* |[ * |[
* gst-launch-1.0 gltestsrc ! gltransformation rotation-x=-45 ortho=True ! glimagesink * gst-launch-1.0 gltestsrc ! gltransformation rotation-x=-45 ortho=True ! glimagesink
* ]| Rotate the video around the X-Axis by -45° with an orthographic projection * ]| Rotate the video around the X-Axis by -45° with an orthographic projection
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,15 +20,15 @@
/** /**
* SECTION:element-glvideo_flip * SECTION:element-glvideo_flip
* @title: glvideo_flip
* *
* Transforms video on the GPU. * Transforms video on the GPU.
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 videotestsrc ! glupload ! glvideoflip method=clockwise ! glimagesinkelement * gst-launch-1.0 videotestsrc ! glupload ! glvideoflip method=clockwise ! glimagesinkelement
* ]| This pipeline flips the test image 90 degrees clockwise. * ]| This pipeline flips the test image 90 degrees clockwise.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,13 +20,13 @@
/** /**
* SECTION:element-glvideomixer * SECTION:element-glvideomixer
* @title: glvideomixer
* *
* Composites a number of streams into a single output scene using OpenGL in * Composites a number of streams into a single output scene using OpenGL in
* a similar fashion to compositor and videomixer. See the compositor plugin * a similar fashion to compositor and videomixer. See the compositor plugin
* for documentation about the #GstGLVideoMixerPad properties. * for documentation about the #GstGLVideoMixerPad properties.
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 glvideomixer name=m ! glimagesink \ * gst-launch-1.0 glvideomixer name=m ! glimagesink \
* videotestsrc ! video/x-raw, format=YUY2 ! glupload ! glcolorconvert ! m. \ * videotestsrc ! video/x-raw, format=YUY2 ! glupload ! glcolorconvert ! m. \
@ -36,7 +36,7 @@
* videotestsrc ! glupload ! glfiltercube ! queue ! m. \ * videotestsrc ! glupload ! glfiltercube ! queue ! m. \
* videotestsrc ! glupload ! gleffects effect=6 ! queue ! m.gst-launch-1.0 glvideomixer name=m ! glimagesink \ * videotestsrc ! glupload ! gleffects effect=6 ! queue ! m.gst-launch-1.0 glvideomixer name=m ! glimagesink \
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -22,14 +22,14 @@
/** /**
* SECTION:element-glviewconvert * SECTION:element-glviewconvert
* @title: glviewconvert
* *
* Convert stereoscopic video between different representations using fragment shaders. * Convert stereoscopic video between different representations using fragment shaders.
* *
* The element can use either property settings or caps negotiation to choose the * The element can use either property settings or caps negotiation to choose the
* input and output formats to process. * input and output formats to process.
* *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 videotestsrc ! glupload ! glviewconvert ! glimagesink * gst-launch-1.0 videotestsrc ! glupload ! glviewconvert ! glimagesink
* ]| Simple placebo example demonstrating identity passthrough of mono video * ]| Simple placebo example demonstrating identity passthrough of mono video
@ -39,7 +39,7 @@
* ]| Force re-interpretation of the input checkers pattern as a side-by-side stereoscopic * ]| Force re-interpretation of the input checkers pattern as a side-by-side stereoscopic
* image and display in glimagesink. * image and display in glimagesink.
* FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -23,20 +23,20 @@
/** /**
* SECTION:plugin-opengl * SECTION:plugin-opengl
* @title: GstOpengl
* *
* Cross-platform OpenGL plugin. * Cross-platform OpenGL plugin.
* <refsect2> *
* <title>Debugging</title> * ## Debugging
* </refsect2> *
* <refsect2> * ## Examples
* <title>Examples</title>
* |[ * |[
* gst-launch-1.0 --gst-debug=gldisplay:3 videotestsrc ! glimagesink * gst-launch-1.0 --gst-debug=gldisplay:3 videotestsrc ! glimagesink
* ]| A debugging pipeline. * ]| A debugging pipeline.
|[ |[
* GST_DEBUG=gl*:6 gst-launch-1.0 videotestsrc ! glimagesink * GST_DEBUG=gl*:6 gst-launch-1.0 videotestsrc ! glimagesink
* ]| A debugging pipelines related to shaders. * ]| A debugging pipelines related to shaders.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,6 +20,7 @@
/** /**
* SECTION:gtkgstsink * SECTION:gtkgstsink
* @title: GstGtkBaseSink
* *
*/ */

View file

@ -19,8 +19,8 @@
*/ */
/** /**
* SECTION:gstgtkglsink * SECTION:element-gtkglsink
* * @title: gtkglsink
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -19,7 +19,8 @@
*/ */
/** /**
* SECTION:gtkgstsink * SECTION:element-gtkgstsink
* @title: gtkgstsink
* *
*/ */

View file

@ -41,6 +41,7 @@
/** /**
* SECTION:gtkgstglwidget * SECTION:gtkgstglwidget
* @title: GtkGstGlWidget
* @short_description: a #GtkGLArea that renders GStreamer video #GstBuffers * @short_description: a #GtkGLArea that renders GStreamer video #GstBuffers
* @see_also: #GtkGLArea, #GstBuffer * @see_also: #GtkGLArea, #GstBuffer
* *

View file

@ -29,6 +29,7 @@
/** /**
* SECTION:gtkgstwidget * SECTION:gtkgstwidget
* @title: GtkGstWidget
* @short_description: a #GtkWidget that renders GStreamer video #GstBuffers * @short_description: a #GtkWidget that renders GStreamer video #GstBuffers
* @see_also: #GtkDrawingArea, #GstBuffer * @see_also: #GtkDrawingArea, #GstBuffer
* *

View file

@ -26,15 +26,15 @@
*/ */
/** /**
* SECTION:element-hlsdemux * SECTION:element-hlsdemux
* @title: hlsdemux
* *
* HTTP Live Streaming demuxer element. * HTTP Live Streaming demuxer element.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 souphttpsrc location=http://devimages.apple.com/iphone/samples/bipbop/gear4/prog_index.m3u8 ! hlsdemux ! decodebin ! videoconvert ! videoscale ! autovideosink * gst-launch-1.0 souphttpsrc location=http://devimages.apple.com/iphone/samples/bipbop/gear4/prog_index.m3u8 ! hlsdemux ! decodebin ! videoconvert ! videoscale ! autovideosink
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -19,15 +19,15 @@
/** /**
* SECTION:element-hlssink * SECTION:element-hlssink
* @title: hlssink
* *
* HTTP Live Streaming sink/server * HTTP Live Streaming sink/server
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 videotestsrc is-live=true ! x264enc ! mpegtsmux ! hlssink max-files=5 * gst-launch-1.0 videotestsrc is-live=true ! x264enc ! mpegtsmux ! hlssink max-files=5
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H
#include "config.h" #include "config.h"

View file

@ -19,6 +19,7 @@
/** /**
* SECTION:element-iqa * SECTION:element-iqa
* @title: iqa
* @short_description: Image Quality Assessment plugin. * @short_description: Image Quality Assessment plugin.
* *
* IQA will perform full reference image quality assessment, with the * IQA will perform full reference image quality assessment, with the
@ -48,13 +49,12 @@
* sink_2\=\(double\)0.0082939683976297474\;", * sink_2\=\(double\)0.0082939683976297474\;",
* time=(guint64)0; * time=(guint64)0;
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 -m uridecodebin uri=file:///test/file/1 ! iqa name=iqa do-dssim=true \ * gst-launch-1.0 -m uridecodebin uri=file:///test/file/1 ! iqa name=iqa do-dssim=true \
* ! videoconvert ! autovideosink uridecodebin uri=file:///test/file/2 ! iqa. * ! videoconvert ! autovideosink uridecodebin uri=file:///test/file/2 ! iqa.
* ]| This pipeline will output messages to the console for each set of compared frames. * ]| This pipeline will output messages to the console for each set of compared frames.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -45,33 +45,29 @@
/** /**
* SECTION:element-katedec * SECTION:element-katedec
* @title: katedec
* @see_also: oggdemux * @see_also: oggdemux
* *
* <refsect2>
* <para>
* This element decodes Kate streams * This element decodes Kate streams
* <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec * <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec
* for text based data, such as subtitles. Any number of kate streams can be * for text based data, such as subtitles. Any number of kate streams can be
* embedded in an Ogg stream. * embedded in an Ogg stream.
* </para> *
* <para>
* libkate (see above url) is needed to build this plugin. * libkate (see above url) is needed to build this plugin.
* </para> *
* <title>Example pipeline</title> * ## Example pipeline
* <para> *
* This explicitely decodes a Kate stream: * This explicitely decodes a Kate stream:
* <programlisting> * |[
* gst-launch-1.0 filesrc location=test.ogg ! oggdemux ! katedec ! fakesink silent=TRUE * gst-launch-1.0 filesrc location=test.ogg ! oggdemux ! katedec ! fakesink silent=TRUE
* </programlisting> * ]|
* </para> *
* <para>
* This will automatically detect and use any Kate streams multiplexed * This will automatically detect and use any Kate streams multiplexed
* in an Ogg stream: * in an Ogg stream:
* <programlisting> * |[
* gst-launch-1.0 playbin uri=file:///tmp/test.ogg * gst-launch-1.0 playbin uri=file:///tmp/test.ogg
* </programlisting> * ]|
* </para> *
* </refsect2>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -46,26 +46,23 @@
/** /**
* SECTION:element-kateenc * SECTION:element-kateenc
* @title: kateenc
* @see_also: oggmux * @see_also: oggmux
* *
* <refsect2>
* <para>
* This element encodes Kate streams * This element encodes Kate streams
* <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec * <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec
* for text based data, such as subtitles. Any number of kate streams can be * for text based data, such as subtitles. Any number of kate streams can be
* embedded in an Ogg stream. * embedded in an Ogg stream.
* </para> *
* <para>
* libkate (see above url) is needed to build this plugin. * libkate (see above url) is needed to build this plugin.
* </para> *
* <title>Example pipeline</title> * ## Example pipeline
* <para> *
* This encodes a DVD SPU track to a Kate stream: * This encodes a DVD SPU track to a Kate stream:
* <programlisting> * |[
* gst-launch-1.0 dvdreadsrc ! dvddemux ! dvdsubparse ! kateenc category=spu-subtitles ! oggmux ! filesink location=test.ogg * gst-launch-1.0 dvdreadsrc ! dvddemux ! dvdsubparse ! kateenc category=spu-subtitles ! oggmux ! filesink location=test.ogg
* </programlisting> * ]|
* </para> *
* </refsect2>
*/ */
/* FIXME: /* FIXME:

View file

@ -21,40 +21,35 @@
/** /**
* SECTION:element-kateparse * SECTION:element-kateparse
* @title: kateparse
* @short_description: parses kate streams * @short_description: parses kate streams
* @see_also: katedec, vorbisparse, oggdemux, theoraparse * @see_also: katedec, vorbisparse, oggdemux, theoraparse
* *
* <refsect2>
* <para>
* The kateparse element will parse the header packets of the Kate * The kateparse element will parse the header packets of the Kate
* stream and put them as the streamheader in the caps. This is used in the * stream and put them as the streamheader in the caps. This is used in the
* multifdsink case where you want to stream live kate streams to multiple * multifdsink case where you want to stream live kate streams to multiple
* clients, each client has to receive the streamheaders first before they can * clients, each client has to receive the streamheaders first before they can
* consume the kate packets. * consume the kate packets.
* </para> *
* <para>
* This element also makes sure that the buffers that it pushes out are properly * This element also makes sure that the buffers that it pushes out are properly
* timestamped and that their offset and offset_end are set. The buffers that * timestamped and that their offset and offset_end are set. The buffers that
* kateparse outputs have all of the metadata that oggmux expects to receive, * kateparse outputs have all of the metadata that oggmux expects to receive,
* which allows you to (for example) remux an ogg/kate file. * which allows you to (for example) remux an ogg/kate file.
* </para> *
* <title>Example pipelines</title> * ## Example pipelines
* <para> *
* <programlisting> * |[
* gst-launch-1.0 -v filesrc location=kate.ogg ! oggdemux ! kateparse ! fakesink * gst-launch-1.0 -v filesrc location=kate.ogg ! oggdemux ! kateparse ! fakesink
* </programlisting> * ]|
* This pipeline shows that the streamheader is set in the caps, and that each * This pipeline shows that the streamheader is set in the caps, and that each
* buffer has the timestamp, duration, offset, and offset_end set. * buffer has the timestamp, duration, offset, and offset_end set.
* </para> *
* <para> * |[
* <programlisting>
* gst-launch-1.0 filesrc location=kate.ogg ! oggdemux ! kateparse \ * gst-launch-1.0 filesrc location=kate.ogg ! oggdemux ! kateparse \
* ! oggmux ! filesink location=kate-remuxed.ogg * ! oggmux ! filesink location=kate-remuxed.ogg
* </programlisting> * ]|
* This pipeline shows remuxing. kate-remuxed.ogg might not be exactly the same * This pipeline shows remuxing. kate-remuxed.ogg might not be exactly the same
* as kate.ogg, but they should produce exactly the same decoded data. * as kate.ogg, but they should produce exactly the same decoded data.
* </para>
* </refsect2>
* *
*/ */

View file

@ -21,46 +21,41 @@
/** /**
* SECTION:element-katetag * SECTION:element-katetag
* @title: katetag
* @see_also: #oggdemux, #oggmux, #kateparse, #GstTagSetter * @see_also: #oggdemux, #oggmux, #kateparse, #GstTagSetter
* @short_description: retags kate streams * @short_description: retags kate streams
* *
* <refsect2>
* <para>
* The katetag element can change the tag contained within a raw * The katetag element can change the tag contained within a raw
* kate stream. Specifically, it modifies the comments header packet * kate stream. Specifically, it modifies the comments header packet
* of the kate stream, as well as the language and category of the * of the kate stream, as well as the language and category of the
* kate stream. * kate stream.
* </para> *
* <para>
* The element will also process the stream as the #kateparse element does * The element will also process the stream as the #kateparse element does
* so it can be used when remuxing an Ogg Kate stream, without additional * so it can be used when remuxing an Ogg Kate stream, without additional
* elements. * elements.
* </para> *
* <para>
* Applications can set the tags to write using the #GstTagSetter interface. * Applications can set the tags to write using the #GstTagSetter interface.
* Tags contained within the kate stream will be picked up * Tags contained within the kate stream will be picked up
* automatically (and merged according to the merge mode set via the tag * automatically (and merged according to the merge mode set via the tag
* setter interface). * setter interface).
* </para> *
* <title>Example pipelines</title> * ## Example pipelines
* <para> *
* This element is only useful with gst-launch-1.0 for modifying the language * This element is only useful with gst-launch-1.0 for modifying the language
* and/or category (which are properties of the stream located in the kate * and/or category (which are properties of the stream located in the kate
* beginning of stream header), because it does not support setting the tags * beginning of stream header), because it does not support setting the tags
* on a #GstTagSetter interface. Conceptually, the element will usually be * on a #GstTagSetter interface. Conceptually, the element will usually be
* used like: * used like:
* <programlisting> * |[
* gst-launch-1.0 -v filesrc location=foo.ogg ! oggdemux ! katetag ! oggmux ! filesink location=bar.ogg * gst-launch-1.0 -v filesrc location=foo.ogg ! oggdemux ! katetag ! oggmux ! filesink location=bar.ogg
* </programlisting> * ]|
* </para> *
* <para>
* This pipeline will set the language and category of the stream to the * This pipeline will set the language and category of the stream to the
* given values: * given values:
* <programlisting> * |[
* gst-launch-1.0 -v filesrc location=foo.ogg ! oggdemux ! katetag language=pt_BR category=subtitles ! oggmux ! filesink location=bar.ogg * gst-launch-1.0 -v filesrc location=foo.ogg ! oggdemux ! katetag language=pt_BR category=subtitles ! oggmux ! filesink location=bar.ogg
* </programlisting> * ]|
* </para> *
* </refsect2>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -45,32 +45,29 @@
/** /**
* SECTION:element-tiger * SECTION:element-tiger
* @title: tiger
* @see_also: katedec * @see_also: katedec
* *
* <refsect2>
* <para>
* This element decodes and renders Kate streams * This element decodes and renders Kate streams
* <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec * <ulink url="http://libkate.googlecode.com/">Kate</ulink> is a free codec
* for text based data, such as subtitles. Any number of kate streams can be * for text based data, such as subtitles. Any number of kate streams can be
* embedded in an Ogg stream. * embedded in an Ogg stream.
* </para> *
* <para>
* libkate (see above url) and <ulink url="http://libtiger.googlecode.com/">libtiger</ulink> * libkate (see above url) and <ulink url="http://libtiger.googlecode.com/">libtiger</ulink>
* are needed to build this element. * are needed to build this element.
* </para> *
* <title>Example pipeline</title> * ## Example pipeline
* <para> *
* This pipeline renders a Kate stream on top of a Theora video multiplexed * This pipeline renders a Kate stream on top of a Theora video multiplexed
* in the same stream: * in the same stream:
* <programlisting> * |[
* gst-launch-1.0 \ * gst-launch-1.0 \
* filesrc location=video.ogg ! oggdemux name=demux \ * filesrc location=video.ogg ! oggdemux name=demux \
* demux. ! queue ! theoradec ! videoconvert ! tiger name=tiger \ * demux. ! queue ! theoradec ! videoconvert ! tiger name=tiger \
* demux. ! queue ! kateparse ! tiger. \ * demux. ! queue ! kateparse ! tiger. \
* tiger. ! videoconvert ! autovideosink * tiger. ! videoconvert ! autovideosink
* </programlisting> * ]|
* </para> *
* </refsect2>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -22,6 +22,7 @@
/** /**
* SECTION:element-ladspa * SECTION:element-ladspa
* @title: ladspa
* @short_description: bridge for LADSPA (Linux Audio Developer's Simple Plugin API) * @short_description: bridge for LADSPA (Linux Audio Developer's Simple Plugin API)
* @see_also: #GstAudioConvert #GstAudioResample, #GstAudioTestSrc, #GstAutoAudioSink * @see_also: #GstAudioConvert #GstAudioResample, #GstAudioTestSrc, #GstAutoAudioSink
* *
@ -32,8 +33,7 @@
* element classification. The functionality you get depends on the LADSPA plugins * element classification. The functionality you get depends on the LADSPA plugins
* you have installed. * you have installed.
* *
* <refsect2> * ## Example LADSPA line without this plugins
* <title>Example LADSPA line without this plugins</title>
* |[ * |[
* (padsp) listplugins * (padsp) listplugins
* (padsp) analyseplugin cmt.so amp_mono * (padsp) analyseplugin cmt.so amp_mono
@ -41,16 +41,13 @@
* (padsp) applyplugin testin.wav testout.wav cmt.so amp_mono 2 * (padsp) applyplugin testin.wav testout.wav cmt.so amp_mono 2
* gst-launch-1.0 playbin uri=file://"$PWD"/testout.wav * gst-launch-1.0 playbin uri=file://"$PWD"/testout.wav
* ]| Decode any audio file into wav with the format expected for the specific ladspa plugin to be applied, apply the ladspa filter and play it. * ]| Decode any audio file into wav with the format expected for the specific ladspa plugin to be applied, apply the ladspa filter and play it.
* </refsect2>
* *
* Now with this plugin: * Now with this plugin:
* *
* <refsect2> * ## Example LADSPA line with this plugins
* <title>Example LADSPA line with this plugins</title>
* |[ * |[
* gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! queue ! autoaudiosink myT. ! queue ! audioconvert ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4 * gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! queue ! autoaudiosink myT. ! queue ! audioconvert ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4
* ]| Get audio input, filter it through CAPS Plate and TAP Stereo Echo, play it and show a visualization (recommended hearphones). * ]| Get audio input, filter it through CAPS Plate and TAP Stereo Echo, play it and show a visualization (recommended hearphones).
* </refsect2>
* *
* In case you wonder the plugin naming scheme, quoting ladspa.h: * In case you wonder the plugin naming scheme, quoting ladspa.h:
* "Plugin types should be identified by file and label rather than by * "Plugin types should be identified by file and label rather than by
@ -61,60 +58,52 @@
* on top of the audio in and out one, so some parameters are readable too. * on top of the audio in and out one, so some parameters are readable too.
* *
* You can see the listing of plugins available with: * You can see the listing of plugins available with:
* <refsect2> *
* <title>Inspecting the plugins list</title> * ## Inspecting the plugins list
* |[ * |[
* gst-inspect ladspa * gst-inspect ladspa
* ]| List available LADSPA plugins on gstreamer. * ]| List available LADSPA plugins on gstreamer.
* </refsect2>
* *
* You can see the parameters of any plugin with: * You can see the parameters of any plugin with:
* <refsect2> *
* <title>Inspecting the plugins</title> * ## Inspecting the plugins
* |[ * |[
* gst-inspect ladspa-retro-flange-1208-so-retroflange * gst-inspect ladspa-retro-flange-1208-so-retroflange
* ]| List details of the plugin, parameters, range and defaults included. * ]| List details of the plugin, parameters, range and defaults included.
* </refsect2>
* *
* The elements categorize in: * The elements categorize in:
* <itemizedlist> *
* <listitem><para>Filter/Effect/Audio/LADSPA:</para> * * Filter/Effect/Audio/LADSPA:
* <refsect2> *
* <title>Example Filter/Effect/Audio/LADSPA line with this plugins</title> * ## Example Filter/Effect/Audio/LADSPA line with this plugins
* |[ * |[
* gst-launch-1.0 filesrc location="$myfile" ! decodebin ! audioconvert ! audioresample ! ladspa-calf-so-reverb decay-time=15 high-frq-damp=20000 room-size=5 diffusion=1 wet-amount=2 dry-amount=2 pre-delay=50 bass-cut=20000 treble-cut=20000 ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! autoaudiosink * gst-launch-1.0 filesrc location="$myfile" ! decodebin ! audioconvert ! audioresample ! ladspa-calf-so-reverb decay-time=15 high-frq-damp=20000 room-size=5 diffusion=1 wet-amount=2 dry-amount=2 pre-delay=50 bass-cut=20000 treble-cut=20000 ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! autoaudiosink
* ]| Decode any audio file, filter it through Calf Reverb LADSPA then TAP Stereo Echo, and play it. * ]| Decode any audio file, filter it through Calf Reverb LADSPA then TAP Stereo Echo, and play it.
* </refsect2> *
* </listitem> * * Source/Audio/LADSPA:
* <listitem><para>Source/Audio/LADSPA:</para> *
* <refsect2> * ## Example Source/Audio/LADSPA line with this plugins
* <title>Example Source/Audio/LADSPA line with this plugins</title>
* |[ * |[
* gst-launch-1.0 ladspasrc-sine-so-sine-fcac frequency=220 amplitude=100 ! audioconvert ! autoaudiosink * gst-launch-1.0 ladspasrc-sine-so-sine-fcac frequency=220 amplitude=100 ! audioconvert ! autoaudiosink
* ]| Generate a sine wave with Sine Oscillator (Freq:control, Amp:control) and play it. * ]| Generate a sine wave with Sine Oscillator (Freq:control, Amp:control) and play it.
* </refsect2> *
* <refsect2> * ## Example Source/Audio/LADSPA line with this plugins
* <title>Example Source/Audio/LADSPA line with this plugins</title>
* |[ * |[
* gst-launch-1.0 ladspasrc-caps-so-click bpm=240 volume=1 ! autoaudiosink * gst-launch-1.0 ladspasrc-caps-so-click bpm=240 volume=1 ! autoaudiosink
* ]| Generate clicks with CAPS Click - Metronome at 240 beats per minute and play it. * ]| Generate clicks with CAPS Click - Metronome at 240 beats per minute and play it.
* </refsect2> *
* <refsect2> * ## Example Source/Audio/LADSPA line with this plugins
* <title>Example Source/Audio/LADSPA line with this plugins</title>
* |[ * |[
* gst-launch-1.0 ladspasrc-random-1661-so-random-fcsc-oa ! ladspa-cmt-so-amp-mono gain=1.5 ! ladspa-caps-so-plate ! tee name=myT myT. ! queue ! autoaudiosink myT. ! queue ! audioconvert ! wavescope ! videoconvert ! autovideosink * gst-launch-1.0 ladspasrc-random-1661-so-random-fcsc-oa ! ladspa-cmt-so-amp-mono gain=1.5 ! ladspa-caps-so-plate ! tee name=myT myT. ! queue ! autoaudiosink myT. ! queue ! audioconvert ! wavescope ! videoconvert ! autovideosink
* ]| Generate random wave, filter it trhough Mono Amplifier and Versatile Plate Reverb, and play, while showing, it. * ]| Generate random wave, filter it trhough Mono Amplifier and Versatile Plate Reverb, and play, while showing, it.
* </refsect2> *
* </listitem> * * Sink/Audio/LADSPA:
* <listitem><para>Sink/Audio/LADSPA:</para> *
* <refsect2> * ## Example Sink/Audio/LADSPA line with this plugins
* <title>Example Sink/Audio/LADSPA line with this plugins</title>
* |[ * |[
* gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! audioconvert ! audioresample ! queue ! ladspasink-cmt-so-null-ai myT. ! audioconvert ! audioresample ! queue ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4 * gst-launch-1.0 autoaudiosrc ! ladspa-cmt-so-amp-mono gain=2 ! ladspa-caps-so-plate ! ladspa-tap-echo-so-tap-stereo-echo l-delay=500 r-haas-delay=500 ! tee name=myT myT. ! audioconvert ! audioresample ! queue ! ladspasink-cmt-so-null-ai myT. ! audioconvert ! audioresample ! queue ! goom ! videoconvert ! xvimagesink pixel-aspect-ratio=3/4
* ]| Get audio input, filter it trhough Mono Amplifier, CAPS Plate LADSPA and TAP Stereo Echo, explicitily anulate audio with Null (Audio Output), and play a visualization (recommended hearphones). * ]| Get audio input, filter it trhough Mono Amplifier, CAPS Plate LADSPA and TAP Stereo Echo, explicitily anulate audio with Null (Audio Output), and play a visualization (recommended hearphones).
* </refsect2> *
* </listitem>
* </itemizedlist>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -21,15 +21,15 @@
/** /**
* SECTION:element-libde265dec * SECTION:element-libde265dec
* @title: libde265dec
* *
* Decodes HEVC/H.265 video. * Decodes HEVC/H.265 video.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 filesrc location=bitstream.hevc ! 'video/x-hevc,stream-format=byte-stream,framerate=25/1' ! libde265dec ! autovideosink * gst-launch-1.0 filesrc location=bitstream.hevc ! 'video/x-hevc,stream-format=byte-stream,framerate=25/1' ! libde265dec ! autovideosink
* ]| The above pipeline decodes the HEVC/H.265 bitstream and renders it to the screen. * ]| The above pipeline decodes the HEVC/H.265 bitstream and renders it to the screen.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -22,6 +22,7 @@
/** /**
* SECTION:element-lv2 * SECTION:element-lv2
* @title: lv2
* @short_description: bridge for LV2. * @short_description: bridge for LV2.
* *
* LV2 is a standard for plugins and matching host applications, * LV2 is a standard for plugins and matching host applications,

View file

@ -24,6 +24,7 @@
/** /**
* SECTION:element-openalsink * SECTION:element-openalsink
* @title: openalsink
* @see_also: openalsrc * @see_also: openalsrc
* @short_description: capture raw audio samples through OpenAL * @short_description: capture raw audio samples through OpenAL
* *
@ -31,8 +32,7 @@
* *
* Unfortunately the capture API doesn't have a format enumeration/check. all you can do is try opening it and see if it works. * Unfortunately the capture API doesn't have a format enumeration/check. all you can do is try opening it and see if it works.
* *
* <refsect2> * ## Example pipelines
* <title>Example pipelines</title>
* |[ * |[
* gst-launch-1.0 audiotestsrc ! audioconvert ! volume volume=0.5 ! openalsink * gst-launch-1.0 audiotestsrc ! audioconvert ! volume volume=0.5 ! openalsink
* ]| will play a sine wave (continuous beep sound) through OpenAL. * ]| will play a sine wave (continuous beep sound) through OpenAL.
@ -42,7 +42,7 @@
* |[ * |[
* gst-launch-1.0 openalsrc ! "audio/x-raw,format=S16LE,rate=44100" ! audioconvert ! volume volume=0.25 ! openalsink * gst-launch-1.0 openalsrc ! "audio/x-raw,format=S16LE,rate=44100" ! audioconvert ! volume volume=0.25 ! openalsink
* ]| will capture and play audio through OpenAL. * ]| will capture and play audio through OpenAL.
* </refsect2> *
*/ */
/* /*

View file

@ -49,20 +49,20 @@
/** /**
* SECTION:element-openalsrc * SECTION:element-openalsrc
* @title: openalsrc
* @see_also: openalsink * @see_also: openalsink
* @short_description: capture raw audio samples through OpenAL * @short_description: capture raw audio samples through OpenAL
* *
* This element captures raw audio samples through OpenAL. * This element captures raw audio samples through OpenAL.
* *
* <refsect2> * ## Example pipelines
* <title>Example pipelines</title>
* |[ * |[
* gst-launch-1.0 -v openalsrc ! audioconvert ! wavenc ! filesink location=stream.wav * gst-launch-1.0 -v openalsrc ! audioconvert ! wavenc ! filesink location=stream.wav
* ]| * will capture sound through OpenAL and encode it to a wav file. * ]| * will capture sound through OpenAL and encode it to a wav file.
* |[ * |[
* gst-launch-1.0 openalsrc ! "audio/x-raw,format=S16LE,rate=44100" ! audioconvert ! volume volume=0.25 ! openalsink * gst-launch-1.0 openalsrc ! "audio/x-raw,format=S16LE,rate=44100" ! audioconvert ! volume volume=0.25 ! openalsink
* ]| will capture and play audio through OpenAL. * ]| will capture and play audio through OpenAL.
* </refsect2> *
*/ */
/* /*

View file

@ -22,16 +22,16 @@
/** /**
* SECTION:element-opusparse * SECTION:element-opusparse
* @title: opusparse
* @see_also: opusenc, opusdec * @see_also: opusenc, opusdec
* *
* This element parses OPUS packets. * This element parses OPUS packets.
* *
* <refsect2> * ## Example pipelines
* <title>Example pipelines</title>
* |[ * |[
* gst-launch-1.0 -v filesrc location=opusdata ! opusparse ! opusdec ! audioconvert ! audioresample ! alsasink * gst-launch-1.0 -v filesrc location=opusdata ! opusparse ! opusdec ! audioconvert ! audioresample ! alsasink
* ]| Decode and plays an unmuxed Opus file. * ]| Decode and plays an unmuxed Opus file.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -369,7 +369,7 @@ rsn_dec_get_type (void)
return type; return type;
} }
/** Audio decoder subclass */ /* Audio decoder subclass */
static GstStaticPadTemplate audio_sink_template = static GstStaticPadTemplate audio_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink", GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK, GST_PAD_SINK,
@ -422,7 +422,7 @@ rsn_audiodec_init (RsnAudioDec * self)
{ {
} }
/** Video decoder subclass */ /* Video decoder subclass */
static GstStaticPadTemplate video_sink_template = static GstStaticPadTemplate video_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink", GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK, GST_PAD_SINK,

View file

@ -25,6 +25,7 @@
/** /**
* SECTION:element-input-selector * SECTION:element-input-selector
* @title: input-selector
* @see_also: #GstOutputSelector * @see_also: #GstOutputSelector
* *
* Direct one out of N input streams to the output pad. * Direct one out of N input streams to the output pad.
@ -32,21 +33,11 @@
* The input pads are from a GstPad subclass and have additional * The input pads are from a GstPad subclass and have additional
* properties, which users may find useful, namely: * properties, which users may find useful, namely:
* *
* <itemizedlist> * * "running-time": Running time of stream on pad (#gint64)
* <listitem> * * "tags": The currently active tags on the pad (#GstTagList, boxed type)
* "running-time": Running time of stream on pad (#gint64) * * "active": If the pad is currently active (#gboolean)
* </listitem> * * "always-ok" : Make an inactive pads return #GST_FLOW_OK instead of #GST_FLOW_NOT_LINKED
* <listitem> *
* "tags": The currently active tags on the pad (#GstTagList, boxed type)
* </listitem>
* <listitem>
* "active": If the pad is currently active (#gboolean)
* </listitem>
* <listitem>
* "always-ok" : Make an inactive pads return #GST_FLOW_OK instead of
* #GST_FLOW_NOT_LINKED
* </listitem>
* </itemizedlist>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -19,15 +19,15 @@
*/ */
/** /**
* SECTION:element-rsvgdec * SECTION:element-rsvgdec
* @title: rsvgdec
* *
* This elements renders SVG graphics. * This elements renders SVG graphics.
* *
* <refsect2> * ## Example launch lines
* <title>Example launch lines</title>
* |[ * |[
* gst-launch-1.0 filesrc location=image.svg ! rsvgdec ! imagefreeze ! videoconvert ! autovideosink * gst-launch-1.0 filesrc location=image.svg ! rsvgdec ! imagefreeze ! videoconvert ! autovideosink
* ]| render and show a svg image. * ]| render and show a svg image.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -21,6 +21,7 @@
/** /**
* SECTION:element-rsvgoverlay * SECTION:element-rsvgoverlay
* @title: rsvgoverlay
* *
* This elements overlays SVG graphics over the video. SVG data can * This elements overlays SVG graphics over the video. SVG data can
* either be specified through properties, or fed through the * either be specified through properties, or fed through the
@ -44,8 +45,7 @@
* the values of the x/y/width/height attributes, by setting * the values of the x/y/width/height attributes, by setting
* height-/width-relative to 1.0. and all other attributes to 0. * height-/width-relative to 1.0. and all other attributes to 0.
* *
* <refsect2> * ## Example launch lines
* <title>Example launch lines</title>
* |[ * |[
* gst-launch-1.0 -v videotestsrc ! videoconvert ! rsvgoverlay location=foo.svg ! videoconvert ! autovideosink * gst-launch-1.0 -v videotestsrc ! videoconvert ! rsvgoverlay location=foo.svg ! videoconvert ! autovideosink
* ]| specifies the SVG location through the filename property. * ]| specifies the SVG location through the filename property.
@ -55,7 +55,7 @@
* |[ * |[
* gst-launch-1.0 -v videotestsrc ! videoconvert ! rsvgoverlay data='&lt;svg viewBox="0 0 800 600"&gt;&lt;image x="80%" y="80%" width="10%" height="10%" xlink:href="foo.jpg" /&gt;&lt;/svg&gt;' ! videoconvert ! autovideosink * gst-launch-1.0 -v videotestsrc ! videoconvert ! rsvgoverlay data='&lt;svg viewBox="0 0 800 600"&gt;&lt;image x="80%" y="80%" width="10%" height="10%" xlink:href="foo.jpg" /&gt;&lt;/svg&gt;' ! videoconvert ! autovideosink
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,6 +20,7 @@
/** /**
* SECTION:element-rtmpsink * SECTION:element-rtmpsink
* @title: rtmpsink
* *
* This element delivers data to a streaming server via RTMP. It uses * This element delivers data to a streaming server via RTMP. It uses
* librtmp, and supports any protocols/urls that librtmp supports. * librtmp, and supports any protocols/urls that librtmp supports.
@ -27,12 +28,11 @@
* for librtmp, such as 'flashver=version'. See the librtmp documentation * for librtmp, such as 'flashver=version'. See the librtmp documentation
* for more detail * for more detail
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 -v videotestsrc ! ffenc_flv ! flvmux ! rtmpsink location='rtmp://localhost/path/to/stream live=1' * gst-launch-1.0 -v videotestsrc ! ffenc_flv ! flvmux ! rtmpsink location='rtmp://localhost/path/to/stream live=1'
* ]| Encode a test video stream to FLV video format and stream it via RTMP. * ]| Encode a test video stream to FLV video format and stream it via RTMP.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -26,17 +26,17 @@
/** /**
* SECTION:element-rtmpsrc * SECTION:element-rtmpsrc
* @title: rtmpsrc
* *
* This plugin reads data from a local or remote location specified * This plugin reads data from a local or remote location specified
* by an URI. This location can be specified using any protocol supported by * by an URI. This location can be specified using any protocol supported by
* the RTMP library, i.e. rtmp, rtmpt, rtmps, rtmpe, rtmfp, rtmpte and rtmpts. * the RTMP library, i.e. rtmp, rtmpt, rtmps, rtmpe, rtmfp, rtmpte and rtmpts.
* *
* <refsect2> * ## Example launch lines
* <title>Example launch lines</title>
* |[ * |[
* gst-launch-1.0 -v rtmpsrc location=rtmp://somehost/someurl ! fakesink * gst-launch-1.0 -v rtmpsrc location=rtmp://somehost/someurl ! fakesink
* ]| Open an RTMP location and pass its content to fakesink. * ]| Open an RTMP location and pass its content to fakesink.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -21,15 +21,15 @@
/** /**
* SECTION:element-sbdec * SECTION:element-sbdec
* @title: sbdec
* *
* This element decodes a Bluetooth SBC audio streams to raw integer PCM audio * This element decodes a Bluetooth SBC audio streams to raw integer PCM audio
* *
* <refsect2> * ## Example pipelines
* <title>Example pipelines</title>
* |[ * |[
* gst-launch-1.0 -v filesrc location=audio.sbc ! sbcparse ! sbcdec ! audioconvert ! audioresample ! autoaudiosink * gst-launch-1.0 -v filesrc location=audio.sbc ! sbcparse ! sbcdec ! audioconvert ! audioresample ! autoaudiosink
* ]| Decode a raw SBC file. * ]| Decode a raw SBC file.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,6 +20,7 @@
/** /**
* SECTION:element-sbenc * SECTION:element-sbenc
* @title: sbenc
* *
* This element encodes raw integer PCM audio into a Bluetooth SBC audio. * This element encodes raw integer PCM audio into a Bluetooth SBC audio.
* *
@ -27,12 +28,11 @@
* allocation-mode can be set by adding a capsfilter element with appropriate * allocation-mode can be set by adding a capsfilter element with appropriate
* filtercaps after the sbcenc encoder element. * filtercaps after the sbcenc encoder element.
* *
* <refsect2> * ## Example pipelines
* <title>Example pipelines</title>
* |[ * |[
* gst-launch-1.0 -v audiotestsrc ! sbcenc ! rtpsbcpay ! udpsink * gst-launch-1.0 -v audiotestsrc ! sbcenc ! rtpsbcpay ! udpsink
* ]| Encode a sine wave into SBC, RTP payload it and send over the network using UDP * ]| Encode a sine wave into SBC, RTP payload it and send over the network using UDP
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -22,10 +22,10 @@
/** /**
* SECTION:element-mssdemux * SECTION:element-mssdemux
* @title: mssdemux
* *
* Demuxes a Microsoft's Smooth Streaming manifest into its audio and/or video streams. * Demuxes a Microsoft's Smooth Streaming manifest into its audio and/or video streams.
* *
*
*/ */
/* /*

View file

@ -24,36 +24,21 @@
/** /**
* SECTION:element-dtmfdetect * SECTION:element-dtmfdetect
* @title: dtmfdetect
* @short_description: Detects DTMF tones * @short_description: Detects DTMF tones
* *
* This element will detect DTMF tones and emit messages. * This element will detect DTMF tones and emit messages.
* *
* The message is called <classname>&quot;dtmf-event&quot;</classname> and has * The message is called `dtmf-event` and has the following fields:
* the following fields: *
* <itemizedlist> * * gint `type` (0-1): The application uses this field to specify which of the two methods
* <listitem>
* <para>
* gint <classname>type</classname> (0-1):
* The application uses this field to specify which of the two methods
* specified in RFC 2833 to use. The value should be 0 for tones and 1 for * specified in RFC 2833 to use. The value should be 0 for tones and 1 for
* named events. Tones are specified by their frequencies and events are * named events. Tones are specified by their frequencies and events are
* specfied by their number. This element can only take events as input. * specfied by their number. This element can only take events as input.
* Do not confuse with "method" which specified the output. * Do not confuse with "method" which specified the output.
* </para> * * gint `number` (0-16): The event number.
* </listitem> * * gint `method` (2): This field will always been 2 (ie sound) from this element.
* <listitem> *
* <para>
* gint <classname>number</classname> (0-16):
* The event number.
* </para>
* </listitem>
* <listitem>
* <para>
* gint <classname>method</classname> (2):
* This field will always been 2 (ie sound) from this element.
* </para>
* </listitem>
* </itemizedlist>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,6 +20,7 @@
/** /**
* SECTION:element-spanplc * SECTION:element-spanplc
* @title: spanplc
* *
* The spanplc (Packet Loss Concealment) element provides a synthetic * The spanplc (Packet Loss Concealment) element provides a synthetic
* fill-in signal, to minimise the audible effect of lost packets in * fill-in signal, to minimise the audible effect of lost packets in

View file

@ -46,6 +46,7 @@
/** /**
* SECTION:element-srtpdec * SECTION:element-srtpdec
* @title: srtpdec
* @see_also: srtpenc * @see_also: srtpenc
* *
* gstrtpdec acts as a decoder that removes security from SRTP and SRTCP * gstrtpdec acts as a decoder that removes security from SRTP and SRTCP
@ -95,8 +96,7 @@
* other means. If no rollover counter is provided by the user, 0 is * other means. If no rollover counter is provided by the user, 0 is
* used by default. * used by default.
* *
* <refsect2> * ## Example pipelines
* <title>Example pipelines</title>
* |[ * |[
* gst-launch-1.0 udpsrc port=5004 caps='application/x-srtp, payload=(int)8, ssrc=(uint)1356955624, srtp-key=(buffer)012345678901234567890123456789012345678901234567890123456789, srtp-cipher=(string)aes-128-icm, srtp-auth=(string)hmac-sha1-80, srtcp-cipher=(string)aes-128-icm, srtcp-auth=(string)hmac-sha1-80' ! srtpdec ! rtppcmadepay ! alawdec ! pulsesink * gst-launch-1.0 udpsrc port=5004 caps='application/x-srtp, payload=(int)8, ssrc=(uint)1356955624, srtp-key=(buffer)012345678901234567890123456789012345678901234567890123456789, srtp-cipher=(string)aes-128-icm, srtp-auth=(string)hmac-sha1-80, srtcp-cipher=(string)aes-128-icm, srtcp-auth=(string)hmac-sha1-80' ! srtpdec ! rtppcmadepay ! alawdec ! pulsesink
* ]| Receive PCMA SRTP packets through UDP using caps to specify * ]| Receive PCMA SRTP packets through UDP using caps to specify
@ -105,7 +105,7 @@
* gst-launch-1.0 audiotestsrc ! alawenc ! rtppcmapay ! 'application/x-rtp, payload=(int)8, ssrc=(uint)1356955624' ! srtpenc key="012345678901234567890123456789012345678901234567890123456789" ! udpsink port=5004 * gst-launch-1.0 audiotestsrc ! alawenc ! rtppcmapay ! 'application/x-rtp, payload=(int)8, ssrc=(uint)1356955624' ! srtpenc key="012345678901234567890123456789012345678901234567890123456789" ! udpsink port=5004
* ]| Send PCMA SRTP packets through UDP, nothing how the SSRC is forced so * ]| Send PCMA SRTP packets through UDP, nothing how the SSRC is forced so
* that the receiver will recognize it. * that the receiver will recognize it.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -45,7 +45,8 @@
*/ */
/** /**
* SECTION:gst-plugin-bad-plugins-srtpenc * SECTION:element-srtpenc
* @title: srtpenc
* @see_also: srtpdec * @see_also: srtpdec
* *
* gstrtpenc acts as an encoder that adds security to RTP and RTCP * gstrtpenc acts as an encoder that adds security to RTP and RTCP

View file

@ -21,16 +21,16 @@
/** /**
* SECTION:element-teletextdec * SECTION:element-teletextdec
* @title: teletextdec
* *
* Decode a stream of raw VBI packets containing teletext information to a RGBA * Decode a stream of raw VBI packets containing teletext information to a RGBA
* stream. * stream.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 -v -m filesrc location=recording.mpeg ! tsdemux ! teletextdec ! videoconvert ! ximagesink * gst-launch-1.0 -v -m filesrc location=recording.mpeg ! tsdemux ! teletextdec ! videoconvert ! ximagesink
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -22,6 +22,7 @@
/** /**
* SECTION:element-ttmlparse * SECTION:element-ttmlparse
* @title: ttmlparse
* *
* Parses timed text subtitle files described using Timed Text Markup Language * Parses timed text subtitle files described using Timed Text Markup Language
* (TTML). Currently, only the EBU-TT-D profile of TTML, designed for * (TTML). Currently, only the EBU-TT-D profile of TTML, designed for
@ -35,13 +36,12 @@
* elements. A downstream renderer element uses this information to correctly * elements. A downstream renderer element uses this information to correctly
* render the text on top of video frames. * render the text on top of video frames.
* *
* <refsect2> * ## Example launch lines
* <title>Example launch lines</title>
* |[ * |[
* gst-launch-1.0 filesrc location=<media file location> ! video/quicktime ! qtdemux name=q ttmlrender name=r q. ! queue ! h264parse ! avdec_h264 ! autovideoconvert ! r.video_sink filesrc location=<subtitle file location> blocksize=16777216 ! queue ! ttmlparse ! r.text_sink r. ! ximagesink q. ! queue ! aacparse ! avdec_aac ! audioconvert ! alsasink * gst-launch-1.0 filesrc location=<media file location> ! video/quicktime ! qtdemux name=q ttmlrender name=r q. ! queue ! h264parse ! avdec_h264 ! autovideoconvert ! r.video_sink filesrc location=<subtitle file location> blocksize=16777216 ! queue ! ttmlparse ! r.text_sink r. ! ximagesink q. ! queue ! aacparse ! avdec_aac ! audioconvert ! alsasink
* ]| Parse and render TTML subtitles contained in a single XML file over an * ]| Parse and render TTML subtitles contained in a single XML file over an
* MP4 stream containing H.264 video and AAC audio. * MP4 stream containing H.264 video and AAC audio.
* </refsect2> *
*/ */
#include <stdio.h> #include <stdio.h>

View file

@ -25,19 +25,19 @@
/** /**
* SECTION:element-ttmlrender * SECTION:element-ttmlrender
* @title: ttmlrender
* *
* Renders timed text on top of a video stream. It receives text in buffers * Renders timed text on top of a video stream. It receives text in buffers
* from a ttmlparse element; each text string is in its own #GstMemory within * from a ttmlparse element; each text string is in its own #GstMemory within
* the GstBuffer, and the styling and layout associated with each text string * the GstBuffer, and the styling and layout associated with each text string
* is in metadata attached to the #GstBuffer. * is in metadata attached to the #GstBuffer.
* *
* <refsect2> * ## Example launch lines
* <title>Example launch lines</title>
* |[ * |[
* gst-launch-1.0 filesrc location=<media file location> ! video/quicktime ! qtdemux name=q ttmlrender name=r q. ! queue ! h264parse ! avdec_h264 ! autovideoconvert ! r.video_sink filesrc location=<subtitle file location> blocksize=16777216 ! queue ! ttmlparse ! r.text_sink r. ! ximagesink q. ! queue ! aacparse ! avdec_aac ! audioconvert ! alsasink * gst-launch-1.0 filesrc location=<media file location> ! video/quicktime ! qtdemux name=q ttmlrender name=r q. ! queue ! h264parse ! avdec_h264 ! autovideoconvert ! r.video_sink filesrc location=<subtitle file location> blocksize=16777216 ! queue ! ttmlparse ! r.text_sink r. ! ximagesink q. ! queue ! aacparse ! avdec_aac ! audioconvert ! alsasink
* ]| Parse and render TTML subtitles contained in a single XML file over an * ]| Parse and render TTML subtitles contained in a single XML file over an
* MP4 stream containing H.264 video and AAC audio: * MP4 stream containing H.264 video and AAC audio:
* </refsect2> *
*/ */
#include <gst/video/video.h> #include <gst/video/video.h>

View file

@ -20,6 +20,7 @@
/** /**
* SECTION:gstsubtitle * SECTION:gstsubtitle
* @title: GstSubtitle
* @short_description: Library for describing sets of static subtitles. * @short_description: Library for describing sets of static subtitles.
* *
* This library enables the description of static text scenes made up of a * This library enables the description of static text scenes made up of a

View file

@ -20,6 +20,7 @@
/** /**
* SECTION:gstsubtitlemeta * SECTION:gstsubtitlemeta
* @title: GstSubtitleMeta
* @short_description: Metadata class for timed-text subtitles. * @short_description: Metadata class for timed-text subtitles.
* *
* The GstSubtitleMeta class enables the layout and styling information needed * The GstSubtitleMeta class enables the layout and styling information needed

View file

@ -19,16 +19,16 @@
/** /**
* SECTION:element-voaacenc * SECTION:element-voaacenc
* @title: voaacenc
* *
* AAC audio encoder based on vo-aacenc library * AAC audio encoder based on vo-aacenc library
* <ulink url="http://sourceforge.net/projects/opencore-amr/files/vo-aacenc/">vo-aacenc library source file</ulink>. * <ulink url="http://sourceforge.net/projects/opencore-amr/files/vo-aacenc/">vo-aacenc library source file</ulink>.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch-1.0 filesrc location=abc.wav ! wavparse ! audioresample ! audioconvert ! voaacenc ! filesink location=abc.aac * gst-launch-1.0 filesrc location=abc.wav ! wavparse ! audioresample ! audioconvert ! voaacenc ! filesink location=abc.aac
* ]| * ]|
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -19,19 +19,19 @@
/** /**
* SECTION:element-voamrwbenc * SECTION:element-voamrwbenc
* @title: voamrwbenc
* @see_also: #GstAmrWbDec, #GstAmrWbParse * @see_also: #GstAmrWbDec, #GstAmrWbParse
* *
* AMR wideband encoder based on the * AMR wideband encoder based on the
* <ulink url="http://www.penguin.cz/~utx/amr">reference codec implementation</ulink>. * <ulink url="http://www.penguin.cz/~utx/amr">reference codec implementation</ulink>.
* *
* <refsect2> * ## Example launch line
* <title>Example launch line</title>
* |[ * |[
* gst-launch filesrc location=abc.wav ! wavparse ! audioresample ! audioconvert ! voamrwbenc ! filesink location=abc.amr * gst-launch filesrc location=abc.wav ! wavparse ! audioresample ! audioconvert ! voamrwbenc ! filesink location=abc.amr
* ]| * ]|
* Please note that the above stream misses the header, that is needed to play * Please note that the above stream misses the header, that is needed to play
* the stream. * the stream.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,6 +20,7 @@
/** /**
* SECTION:plugin-vulkan * SECTION:plugin-vulkan
* @title: vulkan
* *
* Cross-platform Vulkan plugin. * Cross-platform Vulkan plugin.
*/ */

View file

@ -26,6 +26,7 @@
/** /**
* SECTION:vkbuffermemory * SECTION:vkbuffermemory
* @title: vkbuffermemory
* @short_description: memory subclass for Vulkan buffer memory * @short_description: memory subclass for Vulkan buffer memory
* @see_also: #GstMemory, #GstAllocator * @see_also: #GstMemory, #GstAllocator
* *
@ -420,7 +421,7 @@ gst_vulkan_buffer_memory_init_once (void)
/** /**
* gst_is_vulkan_buffer_memory: * gst_is_vulkan_buffer_memory:
* @mem:a #GstMemory * @mem:a #GstMemory
* *
* Returns: whether the memory at @mem is a #GstVulkanBufferMemory * Returns: whether the memory at @mem is a #GstVulkanBufferMemory
*/ */
gboolean gboolean

View file

@ -26,6 +26,7 @@
/** /**
* SECTION:vkbufferpool * SECTION:vkbufferpool
* @title: GstVulkanBufferPool
* @short_description: buffer pool for #GstVulkanBufferMemory objects * @short_description: buffer pool for #GstVulkanBufferMemory objects
* @see_also: #GstBufferPool, #GstVulkanBufferMemory * @see_also: #GstBufferPool, #GstVulkanBufferMemory
* *
@ -33,7 +34,7 @@
* *
* A #GstVulkanBufferPool is created with gst_vulkan_buffer_pool_new() * A #GstVulkanBufferPool is created with gst_vulkan_buffer_pool_new()
* *
* #GstVulkanBufferPool implements the VideoMeta buffer pool option * #GstVulkanBufferPool implements the VideoMeta buffer pool option
* #GST_BUFFER_POOL_OPTION_VIDEO_META * #GST_BUFFER_POOL_OPTION_VIDEO_META
*/ */

View file

@ -26,6 +26,7 @@
/** /**
* SECTION:vkimagememory * SECTION:vkimagememory
* @title: GstVkImageMemory
* @short_description: memory subclass for Vulkan image memory * @short_description: memory subclass for Vulkan image memory
* @see_also: #GstMemory, #GstAllocator * @see_also: #GstMemory, #GstAllocator
* *
@ -559,7 +560,7 @@ gst_vulkan_image_memory_init_once (void)
/** /**
* gst_is_vulkan_image_memory: * gst_is_vulkan_image_memory:
* @mem:a #GstMemory * @mem:a #GstMemory
* *
* Returns: whether the memory at @mem is a #GstVulkanImageMemory * Returns: whether the memory at @mem is a #GstVulkanImageMemory
*/ */
gboolean gboolean

View file

@ -28,11 +28,12 @@
/** /**
* SECTION:vkmemory * SECTION:vkmemory
* @title: GstVkMemory
* @short_description: memory subclass for Vulkan device memory * @short_description: memory subclass for Vulkan device memory
* @see_also: #GstMemory, #GstAllocator * @see_also: #GstMemory, #GstAllocator
* *
* GstVulkanMemory is a #GstMemory subclass providing support for the mapping of * GstVulkanMemory is a #GstMemory subclass providing support for the mapping of
* Vulkan device memory. * Vulkan device memory.
*/ */
/* WARNING: while suballocation is allowed, nothing prevents aliasing which /* WARNING: while suballocation is allowed, nothing prevents aliasing which
@ -347,7 +348,7 @@ gst_vulkan_memory_init_once (void)
/** /**
* gst_is_vulkan_memory: * gst_is_vulkan_memory:
* @mem:a #GstMemory * @mem:a #GstMemory
* *
* Returns: whether the memory at @mem is a #GstVulkanMemory * Returns: whether the memory at @mem is a #GstVulkanMemory
*/ */
gboolean gboolean

View file

@ -20,6 +20,7 @@
/** /**
* SECTION:element-vulkansink * SECTION:element-vulkansink
* @title: vulkansink
* *
* vulkansink renders video frames to a drawable on a local or remote * vulkansink renders video frames to a drawable on a local or remote
* display using Vulkan. * display using Vulkan.

View file

@ -20,6 +20,7 @@
/** /**
* SECTION:element-vulkanupload * SECTION:element-vulkanupload
* @title: vulkanupload
* *
* vulkanupload uploads data into Vulkan memory objects. * vulkanupload uploads data into Vulkan memory objects.
*/ */

View file

@ -19,7 +19,7 @@
*/ */
/** /**
* SECTION:gstglwindow * SECTION:vkwindow
* @short_description: window/surface abstraction * @short_description: window/surface abstraction
* @title: GstVulkanWindow * @title: GstVulkanWindow
* @see_also: #GstGLContext, #GstGLDisplay * @see_also: #GstGLContext, #GstGLDisplay

View file

@ -23,18 +23,18 @@
/** /**
* SECTION:element-waylandsink * SECTION:element-waylandsink
* @title: waylandsink
* *
* The waylandsink is creating its own window and render the decoded video frames to that. * The waylandsink is creating its own window and render the decoded video frames to that.
* Setup the Wayland environment as described in * Setup the Wayland environment as described in
* <ulink url="http://wayland.freedesktop.org/building.html">Wayland</ulink> home page. * <ulink url="http://wayland.freedesktop.org/building.html">Wayland</ulink> home page.
* The current implementaion is based on weston compositor. * The current implementaion is based on weston compositor.
* *
* <refsect2> * ## Example pipelines
* <title>Example pipelines</title>
* |[ * |[
* gst-launch-1.0 -v videotestsrc ! waylandsink * gst-launch-1.0 -v videotestsrc ! waylandsink
* ]| test the video rendering in wayland * ]| test the video rendering in wayland
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -21,6 +21,7 @@
/** /**
* SECTION:element-wildmidi * SECTION:element-wildmidi
* @title: wildmidi
* @see_also: timidity * @see_also: timidity
* *
* This element renders midi-files as audio streams using * This element renders midi-files as audio streams using
@ -29,13 +30,12 @@
* uses the same sound-patches as timidity (it tries the path in $WILDMIDI_CFG, * uses the same sound-patches as timidity (it tries the path in $WILDMIDI_CFG,
* $HOME/.wildmidirc and /etc/wildmidi.cfg) * $HOME/.wildmidirc and /etc/wildmidi.cfg)
* *
* <refsect2> * ## Example pipeline
* <title>Example pipeline</title>
* |[ * |[
* gst-launch-1.0 filesrc location=song.mid ! wildmidi ! alsasink * gst-launch-1.0 filesrc location=song.mid ! wildmidi ! alsasink
* ]| This example pipeline will parse the midi and render to raw audio which is * ]| This example pipeline will parse the midi and render to raw audio which is
* played via alsa. * played via alsa.
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -22,6 +22,7 @@
/** /**
* SECTION:element-x265enc * SECTION:element-x265enc
* @title: x265enc
* *
* This element encodes raw video into H265 compressed data. * This element encodes raw video into H265 compressed data.
* *

View file

@ -19,64 +19,31 @@
/** /**
* SECTION:element-zbar * SECTION:element-zbar
* @title: zbar
* *
* Detect bar codes in the video streams and send them as element messages to * Detect bar codes in the video streams and send them as element messages to
* the #GstBus if .#GstZBar:message property is %TRUE. * the #GstBus if .#GstZBar:message property is %TRUE.
* If the .#GstZBar:attach-frame property is %TRUE, the posted barcode message * If the .#GstZBar:attach-frame property is %TRUE, the posted barcode message
* includes a sample of the frame where the barcode was detected (Since 1.6). * includes a sample of the frame where the barcode was detected (Since 1.6).
* *
* The element generate messages named * The element generate messages named`barcode`. The structure containes these fields:
* <classname>&quot;barcode&quot;</classname>. The structure containes these
* fields:
* <itemizedlist>
* <listitem>
* <para>
* #GstClockTime
* <classname>&quot;timestamp&quot;</classname>:
* the timestamp of the buffer that triggered the message.
* </para>
* </listitem>
* <listitem>
* <para>
* gchar*
* <classname>&quot;type&quot;</classname>:
* the symbol type.
* </para>
* </listitem>
* <listitem>
* <para>
* gchar*
* <classname>&quot;symbol&quot;</classname>:
* the deteted bar code data.
* </para>
* </listitem>
* <listitem>
* <para>
* gint
* <classname>&quot;quality&quot;</classname>:
* an unscaled, relative quantity: larger values are better than smaller
* values.
* </para>
* </listitem>
* <listitem>
* <para>
* GstSample
* <classname>&quot;frame&quot;</classname>:
* the frame in which the barcode message was detected, if
* the .#GstZBar:attach-frame property was set to %TRUE (Since 1.6)
* </para>
* </listitem>
* </itemizedlist>
* *
* <refsect2> * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message.
* <title>Example launch lines</title> * * gchar * `type`: the symbol type.
* * gchar * `symbol`: the deteted bar code data.
* * gint `quality`: an unscaled, relative quantity: larger values are better than smaller
* values.
* * GstSample `frame`: the frame in which the barcode message was detected, if
* the .#GstZBar:attach-frame property was set to %TRUE (Since 1.6)
*
* ## Example launch lines
* |[ * |[
* gst-launch-1.0 -m v4l2src ! videoconvert ! zbar ! videoconvert ! xvimagesink * gst-launch-1.0 -m v4l2src ! videoconvert ! zbar ! videoconvert ! xvimagesink
* ]| This pipeline will detect barcodes and send them as messages. * ]| This pipeline will detect barcodes and send them as messages.
* |[ * |[
* gst-launch-1.0 -m v4l2src ! tee name=t ! queue ! videoconvert ! zbar ! fakesink t. ! queue ! xvimagesink * gst-launch-1.0 -m v4l2src ! tee name=t ! queue ! videoconvert ! zbar ! fakesink t. ! queue ! xvimagesink
* ]| Same as above, but running the filter on a branch to keep the display in color * ]| Same as above, but running the filter on a branch to keep the display in color
* </refsect2> *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -21,46 +21,40 @@
*/ */
/** /**
* SECTION: gstaggregator * SECTION: gstaggregator
* @title: GstAggregator
* @short_description: manages a set of pads with the purpose of * @short_description: manages a set of pads with the purpose of
* aggregating their buffers. * aggregating their buffers.
* @see_also: gstcollectpads for historical reasons. * @see_also: gstcollectpads for historical reasons.
* *
* Manages a set of pads with the purpose of aggregating their buffers. * Manages a set of pads with the purpose of aggregating their buffers.
* Control is given to the subclass when all pads have data. * Control is given to the subclass when all pads have data.
* <itemizedlist> *
* <listitem><para> * * Base class for mixers and muxers. Subclasses should at least implement
* Base class for mixers and muxers. Subclasses should at least implement
* the #GstAggregatorClass.aggregate() virtual method. * the #GstAggregatorClass.aggregate() virtual method.
* </para></listitem> *
* <listitem><para> * * When data is queued on all pads, tha aggregate vmethod is called.
* When data is queued on all pads, tha aggregate vmethod is called. *
* </para></listitem> * * One can peek at the data on any given GstAggregatorPad with the
* <listitem><para>
* One can peek at the data on any given GstAggregatorPad with the
* gst_aggregator_pad_get_buffer () method, and take ownership of it * gst_aggregator_pad_get_buffer () method, and take ownership of it
* with the gst_aggregator_pad_steal_buffer () method. When a buffer * with the gst_aggregator_pad_steal_buffer () method. When a buffer
* has been taken with steal_buffer (), a new buffer can be queued * has been taken with steal_buffer (), a new buffer can be queued
* on that pad. * on that pad.
* </para></listitem> *
* <listitem><para> * * If the subclass wishes to push a buffer downstream in its aggregate
* If the subclass wishes to push a buffer downstream in its aggregate
* implementation, it should do so through the * implementation, it should do so through the
* gst_aggregator_finish_buffer () method. This method will take care * gst_aggregator_finish_buffer () method. This method will take care
* of sending and ordering mandatory events such as stream start, caps * of sending and ordering mandatory events such as stream start, caps
* and segment. * and segment.
* </para></listitem> *
* <listitem><para> * * Same goes for EOS events, which should not be pushed directly by the
* Same goes for EOS events, which should not be pushed directly by the
* subclass, it should instead return GST_FLOW_EOS in its aggregate * subclass, it should instead return GST_FLOW_EOS in its aggregate
* implementation. * implementation.
* </para></listitem> *
* <listitem><para> * * Note that the aggregator logic regarding gap event handling is to turn
* Note that the aggregator logic regarding gap event handling is to turn
* these into gap buffers with matching PTS and duration. It will also * these into gap buffers with matching PTS and duration. It will also
* flag these buffers with GST_BUFFER_FLAG_GAP and GST_BUFFER_FLAG_DROPPABLE * flag these buffers with GST_BUFFER_FLAG_GAP and GST_BUFFER_FLAG_DROPPABLE
* to ease their identification and subsequent processing. * to ease their identification and subsequent processing.
* </para></listitem> *
* </itemizedlist>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -29,40 +29,30 @@
/** /**
* SECTION:gsth264parser * SECTION:gsth264parser
* @title: GstH264Parser
* @short_description: Convenience library for h264 video * @short_description: Convenience library for h264 video
* bitstream parsing. * bitstream parsing.
* *
* It offers bitstream parsing in both AVC (length-prefixed) and Annex B * It offers bitstream parsing in both AVC (length-prefixed) and Annex B
* (0x000001 start code prefix) format. To identify a NAL unit in a bitstream * (0x000001 start code prefix) format. To identify a NAL unit in a bitstream
* and parse its headers, first call: * and parse its headers, first call:
* <itemizedlist> *
* <listitem> * * #gst_h264_parser_identify_nalu to identify a NAL unit in an Annex B type bitstream
* #gst_h264_parser_identify_nalu to identify a NAL unit in an Annex B type bitstream *
* </listitem> * * #gst_h264_parser_identify_nalu_avc to identify a NAL unit in an AVC type bitstream
* <listitem>
* #gst_h264_parser_identify_nalu_avc to identify a NAL unit in an AVC type bitstream
* </listitem>
* </itemizedlist>
* *
* The following functions are then available for parsing the structure of the * The following functions are then available for parsing the structure of the
* #GstH264NalUnit, depending on the #GstH264NalUnitType: * #GstH264NalUnit, depending on the #GstH264NalUnitType:
* <itemizedlist> *
* <listitem> * * From #GST_H264_NAL_SLICE to #GST_H264_NAL_SLICE_IDR: #gst_h264_parser_parse_slice_hdr
* From #GST_H264_NAL_SLICE to #GST_H264_NAL_SLICE_IDR: #gst_h264_parser_parse_slice_hdr *
* </listitem> * * #GST_H264_NAL_SEI: #gst_h264_parser_parse_sei
* <listitem> *
* #GST_H264_NAL_SEI: #gst_h264_parser_parse_sei * * #GST_H264_NAL_SPS: #gst_h264_parser_parse_sps
* </listitem> *
* <listitem> * * #GST_H264_NAL_PPS: #gst_h264_parser_parse_pps
* #GST_H264_NAL_SPS: #gst_h264_parser_parse_sps *
* </listitem> * * Any other: #gst_h264_parser_parse_nal
* <listitem>
* #GST_H264_NAL_PPS: #gst_h264_parser_parse_pps
* </listitem>
* <listitem>
* Any other: #gst_h264_parser_parse_nal
* </listitem>
* </itemizedlist>
* *
* One of these functions *must* be called on every NAL unit in the bitstream, * One of these functions *must* be called on every NAL unit in the bitstream,
* in order to keep the internal structures of the #GstH264NalParser up to * in order to keep the internal structures of the #GstH264NalParser up to
@ -70,17 +60,13 @@
* type, if no special parsing of the current NAL unit is required by the * type, if no special parsing of the current NAL unit is required by the
* application. * application.
* *
* For more details about the structures, look at the ITU-T H.264 and ISO/IEC 14496-10 MPEG-4 * For more details about the structures, look at the ITU-T H.264 and ISO/IEC 14496-10 â MPEG-4
* Part 10 specifications, available at: * Part 10 specifications, available at:
* *
* <itemizedlist> * * ITU-T H.264: http://www.itu.int/rec/T-REC-H.264
* <listitem> *
* ITU-T H.264: http://www.itu.int/rec/T-REC-H.264 * * ISO/IEC 14496-10: http://www.iso.org/iso/iso_catalogue/catalogue_tc/catalogue_detail.htm?csnumber=56538
* </listitem> *
* <listitem>
* ISO/IEC 14496-10: http://www.iso.org/iso/iso_catalogue/catalogue_tc/catalogue_detail.htm?csnumber=56538
* </listitem>
* </itemizedlist>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -22,43 +22,32 @@
/** /**
* SECTION:gsth265parser * SECTION:gsth265parser
* @title: GstH265Parser
* @short_description: Convenience library for h265 video bitstream parsing. * @short_description: Convenience library for h265 video bitstream parsing.
* *
* It offers you bitstream parsing in HEVC mode and non-HEVC mode. To identify * It offers you bitstream parsing in HEVC mode and non-HEVC mode. To identify
* Nals in a bitstream and parse its headers, you should call: * Nals in a bitstream and parse its headers, you should call:
* <itemizedlist> *
* <listitem> * * gst_h265_parser_identify_nalu() to identify the following nalu in
* gst_h265_parser_identify_nalu() to identify the following nalu in
* non-HEVC bitstreams * non-HEVC bitstreams
* </listitem> *
* <listitem> * * gst_h265_parser_identify_nalu_hevc() to identify the nalu in
* gst_h265_parser_identify_nalu_hevc() to identify the nalu in
* HEVC bitstreams * HEVC bitstreams
* </listitem>
* </itemizedlist>
* *
* Then, depending on the #GstH265NalUnitType of the newly parsed #GstH265NalUnit, * Then, depending on the #GstH265NalUnitType of the newly parsed #GstH265NalUnit,
* you should call the differents functions to parse the structure: * you should call the differents functions to parse the structure:
* <itemizedlist> *
* <listitem> * * From #GST_H265_NAL_SLICE_TRAIL_N to #GST_H265_NAL_SLICE_CRA_NUT: gst_h265_parser_parse_slice_hdr()
* From #GST_H265_NAL_SLICE_TRAIL_N to #GST_H265_NAL_SLICE_CRA_NUT: gst_h265_parser_parse_slice_hdr() *
* </listitem> * * #GST_H265_NAL_SEI: gst_h265_parser_parse_sei()
* <listitem> *
* #GST_H265_NAL_SEI: gst_h265_parser_parse_sei() * * #GST_H265_NAL_VPS: gst_h265_parser_parse_vps()
* </listitem> *
* <listitem> * * #GST_H265_NAL_SPS: gst_h265_parser_parse_sps()
* #GST_H265_NAL_VPS: gst_h265_parser_parse_vps() *
* </listitem> * * #GST_H265_NAL_PPS: #gst_h265_parser_parse_pps()
* <listitem> *
* #GST_H265_NAL_SPS: gst_h265_parser_parse_sps() * * Any other: gst_h265_parser_parse_nal()
* </listitem>
* <listitem>
* #GST_H265_NAL_PPS: #gst_h265_parser_parse_pps()
* </listitem>
* <listitem>
* Any other: gst_h265_parser_parse_nal()
* </listitem>
* </itemizedlist>
* *
* Note: You should always call gst_h265_parser_parse_nal() if you don't * Note: You should always call gst_h265_parser_parse_nal() if you don't
* actually need #GstH265NalUnitType to be parsed for your personal use, in * actually need #GstH265NalUnitType to be parsed for your personal use, in
@ -67,11 +56,8 @@
* For more details about the structures, look at the ITU-T H.265 * For more details about the structures, look at the ITU-T H.265
* specifications, you can download them from: * specifications, you can download them from:
* *
* <itemizedlist> * * ITU-T H.265: http://www.itu.int/rec/T-REC-H.265
* <listitem> *
* ITU-T H.265: http://www.itu.int/rec/T-REC-H.265
* </listitem>
* </itemizedlist>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -20,6 +20,7 @@
/** /**
* SECTION:gstjpeg2000sampling * SECTION:gstjpeg2000sampling
* @title: GstJpeg2000Sampling
* @short_description: Manage JPEG 2000 sampling and colorspace fields * @short_description: Manage JPEG 2000 sampling and colorspace fields
* *
*/ */

View file

@ -20,13 +20,11 @@
/** /**
* SECTION:gstjpegparser * SECTION:gstjpegparser
* @title: GstJpegParser
* @short_description: Convenience library for JPEG bitstream parsing. * @short_description: Convenience library for JPEG bitstream parsing.
* *
* <refsect2>
* <para>
* Provides useful functions for parsing JPEG images * Provides useful functions for parsing JPEG images
* </para> *
* </refsect2>
*/ */
#include <string.h> #include <string.h>

View file

@ -20,6 +20,7 @@
*/ */
/** /**
* SECTION:gstmpeg4parser * SECTION:gstmpeg4parser
* @title: GstMpeg4Parser
* @short_description: Convenience library for parsing mpeg4 part 2 video * @short_description: Convenience library for parsing mpeg4 part 2 video
* bitstream. * bitstream.
* *

View file

@ -25,14 +25,12 @@
/** /**
* SECTION:gstmpegvideoparser * SECTION:gstmpegvideoparser
* @title: GstMpegvideoParser
* @short_description: Convenience library for mpeg1 and 2 video * @short_description: Convenience library for mpeg1 and 2 video
* bitstream parsing. * bitstream parsing.
* *
* <refsect2>
* <para>
* Provides useful functions for mpeg videos bitstream parsing. * Provides useful functions for mpeg videos bitstream parsing.
* </para> *
* </refsect2>
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

Some files were not shown because too many files have changed in this diff Show more