diff --git a/configure.ac b/configure.ac index 544e584159..17f0d59192 100644 --- a/configure.ac +++ b/configure.ac @@ -1057,7 +1057,6 @@ tests/examples/adapter/Makefile tests/examples/controller/Makefile tests/examples/stepping/Makefile tests/examples/helloworld/Makefile -tests/examples/manual/Makefile tests/examples/memory/Makefile tests/examples/netclock/Makefile tests/examples/ptp/Makefile @@ -1073,8 +1072,6 @@ docs/gst/Makefile docs/gst/gstreamer.types docs/libs/Makefile docs/plugins/Makefile -docs/manual/Makefile -docs/pwg/Makefile docs/slides/Makefile docs/xsl/Makefile docs/version.entities diff --git a/docs/Makefile.am b/docs/Makefile.am index c39b7891b6..ac6432cc84 100644 --- a/docs/Makefile.am +++ b/docs/Makefile.am @@ -1,5 +1,5 @@ if ENABLE_DOCBOOK -SUBDIRS_DOCBOOK = faq manual pwg +SUBDIRS_DOCBOOK = faq else SUBDIRS_DOCBOOK = endif @@ -17,7 +17,7 @@ endif BUILT_SOURCES = version.entities SUBDIRS = design gst libs $(PLUGIN_DOCS_DIRS) $(SUBDIRS_DOCBOOK) -DIST_SUBDIRS = design gst libs plugins faq manual pwg slides xsl +DIST_SUBDIRS = design gst libs plugins faq slides xsl EXTRA_DIST = \ manuals.mak htmlinstall.mak \ diff --git a/docs/manual/.gitignore b/docs/manual/.gitignore deleted file mode 100644 index db6eb2ffb1..0000000000 --- a/docs/manual/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -Makefile -Makefile.in -.deps -build -html -*.pdf -*.ps diff --git a/docs/manual/Makefile.am b/docs/manual/Makefile.am deleted file mode 100644 index 65b6034441..0000000000 --- a/docs/manual/Makefile.am +++ /dev/null @@ -1,41 +0,0 @@ -### this is the part you can customize if you need to - -# parallel builds don't work, probably due to temporary files -MAKEFLAGS = -j1 - -# base name of doc -DOC = manual -# formats defined for upload-doc.mak -FORMATS=html ps pdf - -# main xml file -MAIN = $(DOC).xml -# all xml sources -XML = $(notdir $(wildcard $(srcdir)/*.xml)) -# base style sheet -CSS = base.css - -# image sources -PNG_SRC = $(notdir $(wildcard $(srcdir)/*.png)) -FIG_SRC = $(notdir $(wildcard $(srcdir)/*.fig)) - -# extra sources to copy in build directory -EXTRA_SRC = - -### this is the generic bit and you shouldn't need to change this - -# get the generic docbuilding Makefile stuff -include $(srcdir)/../manuals.mak -# get the generic upload target -include $(top_srcdir)/common/upload-doc.mak - -### this is standard automake stuff - -# package up all the source -EXTRA_DIST = $(SRC) README - -# install documentation -manualdir = $(docdir)/$(DOC) -manual_DATA = $(PDF_DAT) $(PS_DAT) - -include $(srcdir)/../htmlinstall.mak diff --git a/docs/manual/README b/docs/manual/README deleted file mode 100644 index e2c4ea6af9..0000000000 --- a/docs/manual/README +++ /dev/null @@ -1,61 +0,0 @@ -Current requirements for building the docs : --------------------------------------------- - -libxslt >= 1.0.6 -libxml2 >= 2.4.12 - - -These are not included with RH72. They are in debian. GDE has good rpms. - -To build pdf's from xslt stuff, you need xmltex and (on redhat) -passivetex. They are not known to have been built on either redhat or -debian yet though. - -Wingo's new comments on the doc building : ------------------------------------------- -* Well he should add them soon here since he overhauled it. And did a good - job on it too ;) - -Thomas's new comments on the doc building : -------------------------------------------- -* originally the manual was written with DocBook 3.0 in mind, which - supported the graphic tag. That is now deprecated, so I changed it to - the new mediaobject tag set. - -* eps files in images/ should be generated from the makefile. You need to - have fig2dev installed for that. - -Ensonic's comments on the doc build system : --------------------------------------------- -In case you like to share files between the manual and the pwg - it's -not trivial. -Before anything is done, the build-system copies all xml files into the build -subdir and this breaks including shared docs via entities. - -The examples should be updated in the xml. We run a perlscript in -tests/examples/manual that extracts them. - -Wtay's original comments : --------------------------- - -For now use: - - db2html gstreamer-manual - -(On debian, db2html is in the cygnus-stylesheets package) - -You will need the png support for docbook (see GNOME documentation project) - -convert the fig images to png with: - - fig2dev -L png -s 16 fig/.fig images/.png - -Put a link in the gstreamer-manual directory with - - ln -s ../images gstreamer-manual/images - -point your browser to gstreamer-manual/gstreamer.html - -Fix typing errors and correct bad english. -Let me know about the stuff that needs some more explanation. -Let me know about the structure of the document. diff --git a/docs/manual/advanced-autoplugging.xml b/docs/manual/advanced-autoplugging.xml deleted file mode 100644 index 5f7a0e0e67..0000000000 --- a/docs/manual/advanced-autoplugging.xml +++ /dev/null @@ -1,242 +0,0 @@ - - Autoplugging - - In , you've learned to build a - simple media player for Ogg/Vorbis files. By using alternative elements, - you are able to build media players for other media types, such as - Ogg/Speex, MP3 or even video formats. However, you would rather want - to build an application that can automatically detect the media type - of a stream and automatically generate the best possible pipeline - by looking at all available elements in a system. This process is called - autoplugging, and &GStreamer; contains high-quality autopluggers. If - you're looking for an autoplugger, don't read any further and go to - . This chapter will explain the - concept of autoplugging and typefinding. It will - explain what systems &GStreamer; includes to dynamically detect the - type of a media stream, and how to generate a pipeline of decoder - elements to playback this media. The same principles can also be used - for transcoding. Because of the full dynamicity of this concept, - &GStreamer; can be automatically extended to support new media types - without needing any adaptations to its autopluggers. - - - We will first introduce the concept of Media types as a dynamic and - extendible way of identifying media streams. After that, we will introduce - the concept of typefinding to find the type of a media stream. Lastly, - we will explain how autoplugging and the &GStreamer; registry can be - used to setup a pipeline that will convert media from one mediatype to - another, for example for media decoding. - - - - Media types as a way to identify streams - - We have previously introduced the concept of capabilities as a way - for elements (or, rather, pads) to agree on a media type when - streaming data from one element to the next (see ). We have explained that a capability is - a combination of a media type and a set of properties. For most - container formats (those are the files that you will find on your - hard disk; Ogg, for example, is a container format), no properties - are needed to describe the stream. Only a media type is needed. A - full list of media types and accompanying properties can be found - in the - Plugin Writer's Guide. - - - An element must associate a media type to its source and sink pads - when it is loaded into the system. &GStreamer; knows about the - different elements and what type of data they expect and emit through - the &GStreamer; registry. This allows for very dynamic and extensible - element creation as we will see. - - - - In , we've learned to build a - music player for Ogg/Vorbis files. Let's look at the media types - associated with each pad in this pipeline. shows what media type belongs to each - pad in this pipeline. - - -
- The Hello world pipeline with media types - - - - - -
- - - Now that we have an idea how &GStreamer; identifies known media - streams, we can look at methods &GStreamer; uses to setup pipelines - for media handling and for media type detection. - -
- - - Media stream type detection - - Usually, when loading a media stream, the type of the stream is not - known. This means that before we can choose a pipeline to decode the - stream, we first need to detect the stream type. &GStreamer; uses the - concept of typefinding for this. Typefinding is a normal part of a - pipeline, it will read data for as long as the type of a stream is - unknown. During this period, it will provide data to all plugins - that implement a typefinder. When one of the typefinders recognizes - the stream, the typefind element will emit a signal and act as a - passthrough module from that point on. If no type was found, it will - emit an error and further media processing will stop. - - - Once the typefind element has found a type, the application can - use this to plug together a pipeline to decode the media stream. - This will be discussed in the next section. - - - Plugins in &GStreamer; can, as mentioned before, implement typefinder - functionality. A plugin implementing this functionality will submit - a media type, optionally a set of file extensions commonly used for this - media type, and a typefind function. Once this typefind function inside - the plugin is called, the plugin will see if the data in this media - stream matches a specific pattern that marks the media type identified - by that media type. If it does, it will notify the typefind element of - this fact, telling which mediatype was recognized and how certain we - are that this stream is indeed that mediatype. Once this run has been - completed for all plugins implementing a typefind functionality, the - typefind element will tell the application what kind of media stream - it thinks to have recognized. - - - The following code should explain how to use the typefind element. - It will print the detected media type, or tell that the media type - was not found. The next section will introduce more useful behaviours, - such as plugging together a decoding pipeline. - - -#include <gst/gst.h> - -[.. my_bus_callback goes here ..] - -static gboolean -idle_exit_loop (gpointer data) -{ - g_main_loop_quit ((GMainLoop *) data); - - /* once */ - return FALSE; -} - -static void -cb_typefound (GstElement *typefind, - guint probability, - GstCaps *caps, - gpointer data) -{ - GMainLoop *loop = data; - gchar *type; - - type = gst_caps_to_string (caps); - g_print ("Media type %s found, probability %d%%\n", type, probability); - g_free (type); - - /* since we connect to a signal in the pipeline thread context, we need - * to set an idle handler to exit the main loop in the mainloop context. - * Normally, your app should not need to worry about such things. */ - g_idle_add (idle_exit_loop, loop); -} - -gint -main (gint argc, - gchar *argv[]) -{ - GMainLoop *loop; - GstElement *pipeline, *filesrc, *typefind, *fakesink; - GstBus *bus; - - /* init GStreamer */ - gst_init (&argc, &argv); - loop = g_main_loop_new (NULL, FALSE); - - /* check args */ - if (argc != 2) { - g_print ("Usage: %s <filename>\n", argv[0]); - return -1; - } - - /* create a new pipeline to hold the elements */ - pipeline = gst_pipeline_new ("pipe"); - - bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); - gst_bus_add_watch (bus, my_bus_callback, NULL); - gst_object_unref (bus); - - /* create file source and typefind element */ - filesrc = gst_element_factory_make ("filesrc", "source"); - g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL); - typefind = gst_element_factory_make ("typefind", "typefinder"); - g_signal_connect (typefind, "have-type", G_CALLBACK (cb_typefound), loop); - fakesink = gst_element_factory_make ("fakesink", "sink"); - - /* setup */ - gst_bin_add_many (GST_BIN (pipeline), filesrc, typefind, fakesink, NULL); - gst_element_link_many (filesrc, typefind, fakesink, NULL); - gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); - g_main_loop_run (loop); - - /* unset */ - gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL); - gst_object_unref (GST_OBJECT (pipeline)); - - return 0; -} - - - Once a media type has been detected, you can plug an element (e.g. a - demuxer or decoder) to the source pad of the typefind element, and - decoding of the media stream will start right after. - - - - - Dynamically autoplugging a pipeline - - See for using the high - level object that you can use to dynamically construct pipelines. - - -
diff --git a/docs/manual/advanced-buffering.xml b/docs/manual/advanced-buffering.xml deleted file mode 100644 index 7facd85433..0000000000 --- a/docs/manual/advanced-buffering.xml +++ /dev/null @@ -1,450 +0,0 @@ - - Buffering - - The purpose of buffering is to accumulate enough data in a pipeline so that - playback can occur smoothly and without interruptions. It is typically done - when reading from a (slow) and non-live network source but can also be - used for live sources. - - - &GStreamer; provides support for the following use cases: - - - - Buffering up to a specific amount of data, in memory, before starting - playback so that network fluctuations are minimized. - See . - - - - - Download of the network file to a local disk with fast seeking in the - downloaded data. This is similar to the quicktime/youtube players. - See . - - - - - Caching of (semi)-live streams to a local, on disk, ringbuffer with - seeking in the cached area. This is similar to tivo-like timeshifting. - See . - - - - - - &GStreamer; can provide the application with progress reports about the - current buffering state as well as let the application decide on how - to buffer and when the buffering stops. - - - In the most simple case, the application has to listen for BUFFERING - messages on the bus. If the percent indicator inside the BUFFERING message - is smaller than 100, the pipeline is buffering. When a message is - received with 100 percent, buffering is complete. In the buffering state, - the application should keep the pipeline in the PAUSED state. When buffering - completes, it can put the pipeline (back) in the PLAYING state. - - - What follows is an example of how the message handler could deal with - the BUFFERING messages. We will see more advanced methods in - . - - - - - - - Stream buffering - - +---------+ +---------+ +-------+ - | httpsrc | | buffer | | demux | - | src - sink src - sink .... - +---------+ +---------+ +-------+ - - - In this case we are reading from a slow network source into a buffer - element (such as queue2). - - - The buffer element has a low and high watermark expressed in bytes. The - buffer uses the watermarks as follows: - - - - - The buffer element will post BUFFERING messages until the high - watermark is hit. This instructs the application to keep the pipeline - PAUSED, which will eventually block the srcpad from pushing while - data is prerolled in the sinks. - - - - - When the high watermark is hit, a BUFFERING message with 100% will be - posted, which instructs the application to continue playback. - - - - - When during playback, the low watermark is hit, the queue will start - posting BUFFERING messages again, making the application PAUSE the - pipeline again until the high watermark is hit again. This is called - the rebuffering stage. - - - - - During playback, the queue level will fluctuate between the high and - the low watermark as a way to compensate for network irregularities. - - - - - This buffering method is usable when the demuxer operates in push mode. - Seeking in the stream requires the seek to happen in the network source. - It is mostly desirable when the total duration of the file is not known, - such as in live streaming or when efficient seeking is not - possible/required. - - - The problem is configuring a good low and high watermark. Here are some - ideas: - - - - - It is possible to measure the network bandwidth and configure the - low/high watermarks in such a way that buffering takes a fixed - amount of time. - - - The queue2 element in &GStreamer; core has the max-size-time property - that, together with the use-rate-estimate property, does exactly - that. Also the playbin buffer-duration property uses the rate estimate - to scale the amount of data that is buffered. - - - - - Based on the codec bitrate, it is also possible to set the watermarks - in such a way that a fixed amount of data is buffered before playback - starts. Normally, the buffering element doesn't know about the - bitrate of the stream but it can get this with a query. - - - - - Start with a fixed amount of bytes, measure the time between - rebuffering and increase the queue size until the time between - rebuffering is within the application's chosen limits. - - - - - The buffering element can be inserted anywhere in the pipeline. You could, - for example, insert the buffering element before a decoder. This would - make it possible to set the low/high watermarks based on time. - - - The buffering flag on playbin, performs buffering on the parsed data. - Another advantage of doing the buffering at a later stage is that you can - let the demuxer operate in pull mode. When reading data from a slow - network drive (with filesrc) this can be an interesting way to buffer. - - - - - Download buffering - - +---------+ +---------+ +-------+ - | httpsrc | | buffer | | demux | - | src - sink src - sink .... - +---------+ +----|----+ +-------+ - V - file - - - If we know the server is streaming a fixed length file to the client, - the application can choose to download the entire file on disk. The - buffer element will provide a push or pull based srcpad to the demuxer - to navigate in the downloaded file. - - - This mode is only suitable when the client can determine the length of - the file on the server. - - - In this case, buffering messages will be emitted as usual when the - requested range is not within the downloaded area + buffersize. The - buffering message will also contain an indication that incremental - download is being performed. This flag can be used to let the application - control the buffering in a more intelligent way, using the BUFFERING - query, for example. See . - - - - - Timeshift buffering - - +---------+ +---------+ +-------+ - | httpsrc | | buffer | | demux | - | src - sink src - sink .... - +---------+ +----|----+ +-------+ - V - file-ringbuffer - - - In this mode, a fixed size ringbuffer is kept to download the server - content. This allows for seeking in the buffered data. Depending on the - size of the ringbuffer one can seek further back in time. - - - This mode is suitable for all live streams. As with the incremental - download mode, buffering messages are emitted along with an indication - that timeshifting download is in progress. - - - - - Live buffering - - In live pipelines we usually introduce some fixed latency between the - capture and the playback elements. This latency can be introduced by - a queue (such as a jitterbuffer) or by other means (in the audiosink). - - - Buffering messages can be emitted in those live pipelines as well and - serve as an indication to the user of the latency buffering. The - application usually does not react to these buffering messages with a - state change. - - - - - Buffering strategies - - What follows are some ideas for implementing different buffering - strategies based on the buffering messages and buffering query. - - - - No-rebuffer strategy - - We would like to buffer enough data in the pipeline so that playback - continues without interruptions. What we need to know to implement - this is know the total remaining playback time in the file and the - total remaining download time. If the buffering time is less than the - playback time, we can start playback without interruptions. - - - We have all this information available with the DURATION, POSITION and - BUFFERING queries. We need to periodically execute the buffering query - to get the current buffering status. We also need to have a large - enough buffer to hold the complete file, worst case. It is best to - use this buffering strategy with download buffering (see - ). - - - This is what the code would look like: - - - - - -GstState target_state; -static gboolean is_live; -static gboolean is_buffering; - -static gboolean -buffer_timeout (gpointer data) -{ - GstElement *pipeline = data; - GstQuery *query; - gboolean busy; - gint percent; - gint64 estimated_total; - gint64 position, duration; - guint64 play_left; - - query = gst_query_new_buffering (GST_FORMAT_TIME); - - if (!gst_element_query (pipeline, query)) - return TRUE; - - gst_query_parse_buffering_percent (query, &busy, &percent); - gst_query_parse_buffering_range (query, NULL, NULL, NULL, &estimated_total); - - if (estimated_total == -1) - estimated_total = 0; - - /* calculate the remaining playback time */ - if (!gst_element_query_position (pipeline, GST_FORMAT_TIME, &position)) - position = -1; - if (!gst_element_query_duration (pipeline, GST_FORMAT_TIME, &duration)) - duration = -1; - - if (duration != -1 && position != -1) - play_left = GST_TIME_AS_MSECONDS (duration - position); - else - play_left = 0; - - g_message ("play_left %" G_GUINT64_FORMAT", estimated_total %" G_GUINT64_FORMAT - ", percent %d", play_left, estimated_total, percent); - - /* we are buffering or the estimated download time is bigger than the - * remaining playback time. We keep buffering. */ - is_buffering = (busy || estimated_total * 1.1 > play_left); - - if (!is_buffering) - gst_element_set_state (pipeline, target_state); - - return is_buffering; -} - -static void -on_message_buffering (GstBus *bus, GstMessage *message, gpointer user_data) -{ - GstElement *pipeline = user_data; - gint percent; - - /* no state management needed for live pipelines */ - if (is_live) - return; - - gst_message_parse_buffering (message, &percent); - - if (percent < 100) { - /* buffering busy */ - if (!is_buffering) { - is_buffering = TRUE; - if (target_state == GST_STATE_PLAYING) { - /* we were not buffering but PLAYING, PAUSE the pipeline. */ - gst_element_set_state (pipeline, GST_STATE_PAUSED); - } - } - } -} - -static void -on_message_async_done (GstBus *bus, GstMessage *message, gpointer user_data) -{ - GstElement *pipeline = user_data; - - if (!is_buffering) - gst_element_set_state (pipeline, target_state); - else - g_timeout_add (500, buffer_timeout, pipeline); -} - -gint -main (gint argc, - gchar *argv[]) -{ - GstElement *pipeline; - GMainLoop *loop; - GstBus *bus; - GstStateChangeReturn ret; - - /* init GStreamer */ - gst_init (&argc, &argv); - loop = g_main_loop_new (NULL, FALSE); - - /* make sure we have a URI */ - if (argc != 2) { - g_print ("Usage: %s <URI>\n", argv[0]); - return -1; - } - - /* set up */ - pipeline = gst_element_factory_make ("playbin", "pipeline"); - g_object_set (G_OBJECT (pipeline), "uri", argv[1], NULL); - g_object_set (G_OBJECT (pipeline), "flags", 0x697 , NULL); - - bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); - gst_bus_add_signal_watch (bus); - - g_signal_connect (bus, "message::buffering", - (GCallback) on_message_buffering, pipeline); - g_signal_connect (bus, "message::async-done", - (GCallback) on_message_async_done, pipeline); - gst_object_unref (bus); - - is_buffering = FALSE; - target_state = GST_STATE_PLAYING; - ret = gst_element_set_state (pipeline, GST_STATE_PAUSED); - - switch (ret) { - case GST_STATE_CHANGE_SUCCESS: - is_live = FALSE; - break; - - case GST_STATE_CHANGE_FAILURE: - g_warning ("failed to PAUSE"); - return -1; - - case GST_STATE_CHANGE_NO_PREROLL: - is_live = TRUE; - break; - - default: - break; - } - - /* now run */ - g_main_loop_run (loop); - - /* also clean up */ - gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (GST_OBJECT (pipeline)); - g_main_loop_unref (loop); - - return 0; -} -]]> - - - - See how we set the pipeline to the PAUSED state first. We will receive - buffering messages during the preroll state when buffering is needed. - When we are prerolled (on_message_async_done) we see if buffering is - going on, if not, we start playback. If buffering was going on, we start - a timeout to poll the buffering state. If the estimated time to download - is less than the remaining playback time, we start playback. - - - - diff --git a/docs/manual/advanced-clocks.xml b/docs/manual/advanced-clocks.xml deleted file mode 100644 index 41306272e0..0000000000 --- a/docs/manual/advanced-clocks.xml +++ /dev/null @@ -1,295 +0,0 @@ - - Clocks and synchronization in &GStreamer; - - - When playing complex media, each sound and video sample must be played in a - specific order at a specific time. For this purpose, GStreamer provides a - synchronization mechanism. - - - &GStreamer; provides support for the following use cases: - - - - Non-live sources with access faster than playback rate. This is - the case where one is reading media from a file and playing it - back in a synchronized fashion. In this case, multiple streams need - to be synchronized, like audio, video and subtitles. - - - - - Capture and synchronized muxing/mixing of media from multiple live - sources. This is a typical use case where you record audio and - video from a microphone/camera and mux it into a file for - storage. - - - - - Streaming from (slow) network streams with buffering. This is the - typical web streaming case where you access content from a streaming - server with http. - - - - - Capture from live source and and playback to live source with - configurable latency. This is used when, for example, capture from - a camera, apply an effect and display the result. It is also used - when streaming low latency content over a network with UDP. - - - - - Simultaneous live capture and playback from prerecorded content. - This is used in audio recording cases where you play a previously - recorded audio and record new samples, the purpose is to have the - new audio perfectly in sync with the previously recorded data. - - - - - - &GStreamer; uses a GstClock object, buffer - timestamps and a SEGMENT event to synchronize streams in a pipeline - as we will see in the next sections. - - - - Clock running-time - - In a typical computer, there are many sources that can be used as a - time source, e.g., the system time, soundcards, CPU performance - counters, ... For this reason, there are many - GstClock implementations available in &GStreamer;. - The clock time doesn't always start from 0 or from some known value. - Some clocks start counting from some known start date, other clocks start - counting since last reboot, etc... - - - A GstClock returns the - absolute-time - according to that clock with gst_clock_get_time (). - The absolute-time (or clock time) of a clock is monotonically increasing. - From the absolute-time is a running-time - calculated, which is simply the difference between a previous snapshot - of the absolute-time called the base-time. - So: - - - running-time = absolute-time - base-time - - - A &GStreamer; GstPipeline object maintains a - GstClock object and a base-time when it goes - to the PLAYING state. The pipeline gives a handle to the selected - GstClock to each element in the pipeline along - with selected base-time. The pipeline will select a base-time in such - a way that the running-time reflects the total time spent in the - PLAYING state. As a result, when the pipeline is PAUSED, the - running-time stands still. - - - Because all objects in the pipeline have the same clock and base-time, - they can thus all calculate the running-time according to the pipeline - clock. - - - - - Buffer running-time - - To calculate a buffer running-time, we need a buffer timestamp and - the SEGMENT event that preceeded the buffer. First we can convert - the SEGMENT event into a GstSegment object - and then we can use the - gst_segment_to_running_time () function to - perform the calculation of the buffer running-time. - - - Synchronization is now a matter of making sure that a buffer with a - certain running-time is played when the clock reaches the same - running-time. Usually this task is done by sink elements. Sink also - have to take into account the latency configured in the pipeline and - add this to the buffer running-time before synchronizing to the - pipeline clock. - - - Non-live sources timestamp buffers with a running-time starting - from 0. After a flushing seek, they will produce buffers again - from a running-time of 0. - - - Live sources need to timestamp buffers with a running-time matching - the pipeline running-time when the first byte of the buffer was - captured. - - - - - Buffer stream-time - - The buffer stream-time, also known as the position in the stream, - is calculated from the buffer timestamps and the preceding SEGMENT - event. It represents the time inside the media as a value between - 0 and the total duration of the media. - - - The stream-time is used in: - - - - Report the current position in the stream with the POSITION - query. - - - - - The position used in the seek events and queries. - - - - - The position used to synchronize controlled values. - - - - - - The stream-time is never used to synchronize streams, this is only - done with the running-time. - - - - - Time overview - - Here is an overview of the various timelines used in &GStreamer;. - - - The image below represents the different times in the pipeline when - playing a 100ms sample and repeating the part between 50ms and - 100ms. - - -
- &GStreamer; clock and various times - - - - - -
- - - You can see how the running-time of a buffer always increments - monotonically along with the clock-time. Buffers are played when their - running-time is equal to the clock-time - base-time. The stream-time - represents the position in the stream and jumps backwards when - repeating. - -
- - - Clock providers - - A clock provider is an element in the pipeline that can provide - a GstClock object. The clock object needs to - report an absolute-time that is monotonically increasing when the - element is in the PLAYING state. It is allowed to pause the clock - while the element is PAUSED. - - - Clock providers exist because they play back media at some rate, and - this rate is not necessarily the same as the system clock rate. For - example, a soundcard may playback at 44,1 kHz, but that doesn't mean - that after exactly 1 second according - to the system clock, the soundcard has played back 44.100 - samples. This is only true by approximation. In fact, the audio - device has an internal clock based on the number of samples played - that we can expose. - - - If an element with an internal clock needs to synchronize, it needs - to estimate when a time according to the pipeline clock will take - place according to the internal clock. To estimate this, it needs - to slave its clock to the pipeline clock. - - - If the pipeline clock is exactly the internal clock of an element, - the element can skip the slaving step and directly use the pipeline - clock to schedule playback. This can be both faster and more - accurate. - Therefore, generally, elements with an internal clock like audio - input or output devices will be a clock provider for the pipeline. - - - When the pipeline goes to the PLAYING state, it will go over all - elements in the pipeline from sink to source and ask each element - if they can provide a clock. The last element that can provide a - clock will be used as the clock provider in the pipeline. - This algorithm prefers a clock from an audio sink in a typical - playback pipeline and a clock from source elements in a typical - capture pipeline. - - - There exist some bus messages to let you know about the clock and - clock providers in the pipeline. You can see what clock is selected - in the pipeline by looking at the NEW_CLOCK message on the bus. - When a clock provider is removed from the pipeline, a CLOCK_LOST - message is posted and the application should go to PAUSED and back - to PLAYING to select a new clock. - - - - - Latency - - The latency is the time it takes for a sample captured at timestamp X - to reach the sink. This time is measured against the clock in the - pipeline. For pipelines where the only elements that synchronize against - the clock are the sinks, the latency is always 0 since no other element - is delaying the buffer. - - - For pipelines with live sources, a latency is introduced, mostly because - of the way a live source works. Consider an audio source, it will start - capturing the first sample at time 0. If the source pushes buffers with - 44100 samples at a time at 44100Hz it will have collected the buffer at - second 1. Since the timestamp of the buffer is 0 and the time of the - clock is now >= 1 second, the sink will drop this buffer because it is - too late. Without any latency compensation in the sink, all buffers will - be dropped. - - - - Latency compensation - - Before the pipeline goes to the PLAYING state, it will, in addition to - selecting a clock and calculating a base-time, calculate the latency - in the pipeline. It does this by doing a LATENCY query on all the sinks - in the pipeline. The pipeline then selects the maximum latency in the - pipeline and configures this with a LATENCY event. - - - All sink elements will delay playback by the value in the LATENCY event. - Since all sinks delay with the same amount of time, they will be - relative in sync. - - - - - Dynamic Latency - - Adding/removing elements to/from a pipeline or changing element - properties can change the latency in a pipeline. An element can - request a latency change in the pipeline by posting a LATENCY - message on the bus. The application can then decide to query and - redistribute a new latency or not. Changing the latency in a - pipeline might cause visual or audible glitches and should - therefore only be done by the application when it is allowed. - - - -
diff --git a/docs/manual/advanced-dataaccess.xml b/docs/manual/advanced-dataaccess.xml deleted file mode 100644 index 10f5d6befa..0000000000 --- a/docs/manual/advanced-dataaccess.xml +++ /dev/null @@ -1,1614 +0,0 @@ - - Pipeline manipulation - - This chapter will discuss how you can manipulate your pipeline in several - ways from your application on. Parts of this chapter are very - lowlevel, so be assured that you'll need some programming knowledge - and a good understanding of &GStreamer; before you start reading this. - - - Topics that will be discussed here include how you can insert data into - a pipeline from your application, how to read data from a pipeline, - how to manipulate the pipeline's speed, length, starting point and how - to listen to a pipeline's data processing. - - - - Using probes - - Probing is best envisioned as a pad listener. Technically, a probe is - nothing more than a callback that can be attached to a pad. - You can attach a probe using gst_pad_add_probe (). - Similarly, one can use the - gst_pad_remove_probe () - to remove the callback again. The probe notifies you of any activity - that happens on the pad, like buffers, events and queries. You can - define what kind of notifications you are interested in when you - add the probe. - - - The probe can notify you of the following activity on pads: - - - - - A buffer is pushed or pulled. You want to specify the - GST_PAD_PROBE_TYPE_BUFFER when registering the probe. Because the - pad can be scheduled in different ways, it is possible to also - specify in what scheduling mode you are interested with the - optional GST_PAD_PROBE_TYPE_PUSH and GST_PAD_PROBE_TYPE_PULL - flags. - - - You can use this probe to inspect, modify or drop the buffer. - See . - - - - - A bufferlist is pushed. Use the GST_PAD_PROBE_TYPE_BUFFER_LIST - when registering the probe. - - - - - An event travels over a pad. Use the GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM - and GST_PAD_PROBE_TYPE_EVENT_UPSTREAM flags to select downstream - and upstream events. There is also a convenience - GST_PAD_PROBE_TYPE_EVENT_BOTH to be notified of events going both - upstream and downstream. By default, flush events do not cause - a notification. You need to explicitly enable GST_PAD_PROBE_TYPE_EVENT_FLUSH - to receive callbacks from flushing events. Events are always - only notified in push mode. - - - You can use this probe to inspect, modify or drop the event. - - - - - A query travels over a pad. Use the GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM - and GST_PAD_PROBE_TYPE_QUERY_UPSTREAM flags to select downstream - and upstream queries. The convenience GST_PAD_PROBE_TYPE_QUERY_BOTH - can also be used to select both directions. Query probes will be - notified twice, once when the query travels upstream/downstream and - once when the query result is returned. You can select in what stage - the callback will be called with the GST_PAD_PROBE_TYPE_PUSH and - GST_PAD_PROBE_TYPE_PULL, respectively when the query is performed - and when the query result is returned. - - - You can use this probe to inspect or modify the query. You can also - answer the query in the probe callback by placing the result value - in the query and by returning GST_PAD_PROBE_DROP from the - callback. - - - - - In addition to notifying you of dataflow, you can also ask the - probe to block the dataflow when the callback returns. This is - called a blocking probe and is activated by specifying the - GST_PAD_PROBE_TYPE_BLOCK flag. You can use this flag with the - other flags to only block dataflow on selected activity. A pad - becomes unblocked again if you remove the probe or when you return - GST_PAD_PROBE_REMOVE from the callback. You can let only the - currently blocked item pass by returning GST_PAD_PROBE_PASS - from the callback, it will block again on the next item. - - - Blocking probes are used to temporarily block pads because they - are unlinked or because you are going to unlink them. If the - dataflow is not blocked, the pipeline would go into an error - state if data is pushed on an unlinked pad. We will se how - to use blocking probes to partially preroll a pipeline. - See also . - - - - - Be notified when no activity is happening on a pad. You install - this probe with the GST_PAD_PROBE_TYPE_IDLE flag. You can specify - GST_PAD_PROBE_TYPE_PUSH and/or GST_PAD_PROBE_TYPE_PULL to - only be notified depending on the pad scheduling mode. - The IDLE probe is also a blocking probe in that it will not let - any data pass on the pad for as long as the IDLE probe is - installed. - - - You can use idle probes to dynamically relink a pad. We will see - how to use idle probes to replace an element in the pipeline. - See also . - - - - - - Data probes - - Data probes allow you to be notified when there is data passing - on a pad. When adding the probe, specify the GST_PAD_PROBE_TYPE_BUFFER - and/or GST_PAD_PROBE_TYPE_BUFFER_LIST. - - - Data probes run in pipeline streaming thread context, so callbacks - should try to not block and generally not do any weird stuff, since - this could have a negative impact on pipeline performance or, in case - of bugs, cause deadlocks or crashes. More precisely, one should usually - not call any GUI-related functions from within a probe callback, nor try - to change the state of the pipeline. An application may post custom - messages on the pipeline's bus though to communicate with the main - application thread and have it do things like stop the pipeline. - - - In any case, most common buffer operations - that elements can do in _chain () functions, can - be done in probe callbacks as well. The example below gives a short - impression on how to use them. - - - - - -static GstPadProbeReturn -cb_have_data (GstPad *pad, - GstPadProbeInfo *info, - gpointer user_data) -{ - gint x, y; - GstMapInfo map; - guint16 *ptr, t; - GstBuffer *buffer; - - buffer = GST_PAD_PROBE_INFO_BUFFER (info); - - buffer = gst_buffer_make_writable (buffer); - - /* Making a buffer writable can fail (for example if it - * cannot be copied and is used more than once) - */ - if (buffer == NULL) - return GST_PAD_PROBE_OK; - - /* Mapping a buffer can fail (non-writable) */ - if (gst_buffer_map (buffer, &map, GST_MAP_WRITE)) { - ptr = (guint16 *) map.data; - /* invert data */ - for (y = 0; y < 288; y++) { - for (x = 0; x < 384 / 2; x++) { - t = ptr[384 - 1 - x]; - ptr[384 - 1 - x] = ptr[x]; - ptr[x] = t; - } - ptr += 384; - } - gst_buffer_unmap (buffer, &map); - } - - GST_PAD_PROBE_INFO_DATA (info) = buffer; - - return GST_PAD_PROBE_OK; -} - -gint -main (gint argc, - gchar *argv[]) -{ - GMainLoop *loop; - GstElement *pipeline, *src, *sink, *filter, *csp; - GstCaps *filtercaps; - GstPad *pad; - - /* init GStreamer */ - gst_init (&argc, &argv); - loop = g_main_loop_new (NULL, FALSE); - - /* build */ - pipeline = gst_pipeline_new ("my-pipeline"); - src = gst_element_factory_make ("videotestsrc", "src"); - if (src == NULL) - g_error ("Could not create 'videotestsrc' element"); - - filter = gst_element_factory_make ("capsfilter", "filter"); - g_assert (filter != NULL); /* should always exist */ - - csp = gst_element_factory_make ("videoconvert", "csp"); - if (csp == NULL) - g_error ("Could not create 'videoconvert' element"); - - sink = gst_element_factory_make ("xvimagesink", "sink"); - if (sink == NULL) { - sink = gst_element_factory_make ("ximagesink", "sink"); - if (sink == NULL) - g_error ("Could not create neither 'xvimagesink' nor 'ximagesink' element"); - } - - gst_bin_add_many (GST_BIN (pipeline), src, filter, csp, sink, NULL); - gst_element_link_many (src, filter, csp, sink, NULL); - filtercaps = gst_caps_new_simple ("video/x-raw", - "format", G_TYPE_STRING, "RGB16", - "width", G_TYPE_INT, 384, - "height", G_TYPE_INT, 288, - "framerate", GST_TYPE_FRACTION, 25, 1, - NULL); - g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL); - gst_caps_unref (filtercaps); - - pad = gst_element_get_static_pad (src, "src"); - gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER, - (GstPadProbeCallback) cb_have_data, NULL, NULL); - gst_object_unref (pad); - - /* run */ - gst_element_set_state (pipeline, GST_STATE_PLAYING); - - /* wait until it's up and running or failed */ - if (gst_element_get_state (pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) { - g_error ("Failed to go into PLAYING state"); - } - - g_print ("Running ...\n"); - g_main_loop_run (loop); - - /* exit */ - gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (pipeline); - - return 0; -} -]]> - - - - Compare that output with the output of gst-launch-1.0 - videotestsrc ! xvimagesink, just so you know what you're - looking for. - - - Strictly speaking, a pad probe callback is only allowed to modify the - buffer content if the buffer is writable. Whether this is the case or - not depends a lot on the pipeline and the elements involved. Often - enough, this is the case, but sometimes it is not, and if it is not - then unexpected modification of the data or metadata can introduce - bugs that are very hard to debug and track down. You can check if a - buffer is writable with gst_buffer_is_writable (). - Since you can pass back a different buffer than the one passed in, - it is a good idea to make the buffer writable in the callback function - with gst_buffer_make_writable (). - - - Pad probes are suited best for looking at data as it passes through - the pipeline. If you need to modify data, you should better write your - own GStreamer element. Base classes like GstAudioFilter, GstVideoFilter or - GstBaseTransform make this fairly easy. - - - If you just want to inspect buffers as they pass through the pipeline, - you don't even need to set up pad probes. You could also just insert - an identity element into the pipeline and connect to its "handoff" - signal. The identity element also provides a few useful debugging tools - like the "dump" property or the "last-message" property (the latter is - enabled by passing the '-v' switch to gst-launch and by setting the - silent property on the identity to FALSE). - - - - - Play a region of a media file - - In this example we will show you how to play back a region of - a media file. The goal is to only play the part of a file - from 2 seconds to 5 seconds and then EOS. - - - In a first step we will set a uridecodebin element to the PAUSED - state and make sure that we block all the source pads that are - created. When all the source pads are blocked, we have data on - all source pads and we say that the uridecodebin is prerolled. - - - In a prerolled pipeline we can ask for the duration of the media - and we can also perform seeks. We are interested in performing a - seek operation on the pipeline to select the range of media - that we are interested in. - - - After we configure the region we are interested in, we can link - the sink element, unblock the source pads and set the pipeline to - the playing state. You will see that exactly the requested - region is played by the sink before it goes to EOS. - - - What follows is an example application that loosly follows this - algorithm. - - - - - -static GMainLoop *loop; -static volatile gint counter; -static GstBus *bus; -static gboolean prerolled = FALSE; -static GstPad *sinkpad; - -static void -dec_counter (GstElement * pipeline) -{ - if (prerolled) - return; - - if (g_atomic_int_dec_and_test (&counter)) { - /* all probes blocked and no-more-pads signaled, post - * message on the bus. */ - prerolled = TRUE; - - gst_bus_post (bus, gst_message_new_application ( - GST_OBJECT_CAST (pipeline), - gst_structure_new_empty ("ExPrerolled"))); - } -} - -/* called when a source pad of uridecodebin is blocked */ -static GstPadProbeReturn -cb_blocked (GstPad *pad, - GstPadProbeInfo *info, - gpointer user_data) -{ - GstElement *pipeline = GST_ELEMENT (user_data); - - if (prerolled) - return GST_PAD_PROBE_REMOVE; - - dec_counter (pipeline); - - return GST_PAD_PROBE_OK; -} - -/* called when uridecodebin has a new pad */ -static void -cb_pad_added (GstElement *element, - GstPad *pad, - gpointer user_data) -{ - GstElement *pipeline = GST_ELEMENT (user_data); - - if (prerolled) - return; - - g_atomic_int_inc (&counter); - - gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, - (GstPadProbeCallback) cb_blocked, pipeline, NULL); - - /* try to link to the video pad */ - gst_pad_link (pad, sinkpad); -} - -/* called when uridecodebin has created all pads */ -static void -cb_no_more_pads (GstElement *element, - gpointer user_data) -{ - GstElement *pipeline = GST_ELEMENT (user_data); - - if (prerolled) - return; - - dec_counter (pipeline); -} - -/* called when a new message is posted on the bus */ -static void -cb_message (GstBus *bus, - GstMessage *message, - gpointer user_data) -{ - GstElement *pipeline = GST_ELEMENT (user_data); - - switch (GST_MESSAGE_TYPE (message)) { - case GST_MESSAGE_ERROR: - g_print ("we received an error!\n"); - g_main_loop_quit (loop); - break; - case GST_MESSAGE_EOS: - g_print ("we reached EOS\n"); - g_main_loop_quit (loop); - break; - case GST_MESSAGE_APPLICATION: - { - if (gst_message_has_name (message, "ExPrerolled")) { - /* it's our message */ - g_print ("we are all prerolled, do seek\n"); - gst_element_seek (pipeline, - 1.0, GST_FORMAT_TIME, - GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, - GST_SEEK_TYPE_SET, 2 * GST_SECOND, - GST_SEEK_TYPE_SET, 5 * GST_SECOND); - - gst_element_set_state (pipeline, GST_STATE_PLAYING); - } - break; - } - default: - break; - } -} - -gint -main (gint argc, - gchar *argv[]) -{ - GstElement *pipeline, *src, *csp, *vs, *sink; - - /* init GStreamer */ - gst_init (&argc, &argv); - loop = g_main_loop_new (NULL, FALSE); - - if (argc < 2) { - g_print ("usage: %s ", argv[0]); - return -1; - } - - /* build */ - pipeline = gst_pipeline_new ("my-pipeline"); - - bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); - gst_bus_add_signal_watch (bus); - g_signal_connect (bus, "message", (GCallback) cb_message, - pipeline); - - src = gst_element_factory_make ("uridecodebin", "src"); - if (src == NULL) - g_error ("Could not create 'uridecodebin' element"); - - g_object_set (src, "uri", argv[1], NULL); - - csp = gst_element_factory_make ("videoconvert", "csp"); - if (csp == NULL) - g_error ("Could not create 'videoconvert' element"); - - vs = gst_element_factory_make ("videoscale", "vs"); - if (csp == NULL) - g_error ("Could not create 'videoscale' element"); - - sink = gst_element_factory_make ("autovideosink", "sink"); - if (sink == NULL) - g_error ("Could not create 'autovideosink' element"); - - gst_bin_add_many (GST_BIN (pipeline), src, csp, vs, sink, NULL); - - /* can't link src yet, it has no pads */ - gst_element_link_many (csp, vs, sink, NULL); - - sinkpad = gst_element_get_static_pad (csp, "sink"); - - /* for each pad block that is installed, we will increment - * the counter. for each pad block that is signaled, we - * decrement the counter. When the counter is 0 we post - * an app message to tell the app that all pads are - * blocked. Start with 1 that is decremented when no-more-pads - * is signaled to make sure that we only post the message - * after no-more-pads */ - g_atomic_int_set (&counter, 1); - - g_signal_connect (src, "pad-added", - (GCallback) cb_pad_added, pipeline); - g_signal_connect (src, "no-more-pads", - (GCallback) cb_no_more_pads, pipeline); - - gst_element_set_state (pipeline, GST_STATE_PAUSED); - - g_main_loop_run (loop); - - gst_element_set_state (pipeline, GST_STATE_NULL); - - gst_object_unref (sinkpad); - gst_object_unref (bus); - gst_object_unref (pipeline); - g_main_loop_unref (loop); - - return 0; -} -]]> - - - - Note that we use a custom application message to signal the - main thread that the uridecidebin is prerolled. The main thread - will then issue a flushing seek to the requested region. The - flush will temporarily unblock the pad and reblock them when - new data arrives again. We detect this second block to remove - the probes. Then we set the pipeline to PLAYING and it should - play from 2 to 5 seconds, then EOS and exit the application. - - - - - - Manually adding or removing data from/to a pipeline - - Many people have expressed the wish to use their own sources to inject - data into a pipeline. Some people have also expressed the wish to grab - the output in a pipeline and take care of the actual output inside - their application. While either of these methods are strongly - discouraged, &GStreamer; offers support for this. - Beware! You need to know what you are doing. Since - you don't have any support from a base class you need to thoroughly - understand state changes and synchronization. If it doesn't work, - there are a million ways to shoot yourself in the foot. It's always - better to simply write a plugin and have the base class manage it. - See the Plugin Writer's Guide for more information on this topic. Also - see the next section, which will explain how to embed plugins statically - in your application. - - - There's two possible elements that you can use for the above-mentioned - purposes. Those are called appsrc (an imaginary source) - and appsink (an imaginary sink). The same method applies - to each of those elements. Here, we will discuss how to use those - elements to insert (using appsrc) or grab (using appsink) data from a - pipeline, and how to set negotiation. - - - Both appsrc and appsink provide 2 sets of API. One API uses standard - GObject (action) signals and properties. The same API is also - available as a regular C api. The C api is more performant but - requires you to link to the app library in order to use the elements. - - - - Inserting data with appsrc - - First we look at some examples for appsrc, which lets you insert data - into the pipeline from the application. Appsrc has some configuration - options that define how it will operate. You should decide about the - following configurations: - - - - - Will the appsrc operate in push or pull mode. The stream-type - property can be used to control this. stream-type of - random-access will activate pull mode scheduling - while the other stream-types activate push mode. - - - - - The caps of the buffers that appsrc will push out. This needs to - be configured with the caps property. The caps must be set to a - fixed caps and will be used to negotiate a format downstream. - - - - - If the appsrc operates in live mode or not. This can be configured - with the is-live property. When operating in live-mode it is - important to configure the min-latency and max-latency in appsrc. - The min-latency should be set to the amount of time it takes between - capturing a buffer and when it is pushed inside appsrc. - In live mode, you should timestamp the buffers with the pipeline - running-time when the first byte of the buffer was captured before - feeding them to appsrc. You can let appsrc do the timestaping with - the do-timestamp property (but then the min-latency must be set - to 0 because it timestamps based on the running-time when the buffer - entered appsrc). - - - - - The format of the SEGMENT event that appsrc will push. The format - has implications for how the running-time of the buffers will - be calculated so you must be sure you understand this. For - live sources you probably want to set the format property to - GST_FORMAT_TIME. For non-live source it depends on the media type - that you are handling. If you plan to timestamp the buffers, you - should probably put a GST_FORMAT_TIME format, otherwise - GST_FORMAT_BYTES might be appropriate. - - - - - If appsrc operates in random-access mode, it is important to configure - the size property of appsrc with the number of bytes in the stream. - This will allow downstream elements to know the size of the media and - alows them to seek to the end of the stream when needed. - - - - - The main way of handling data to appsrc is by using the function - gst_app_src_push_buffer () or by emiting the - push-buffer action signal. This will put the buffer onto a queue from - which appsrc will read from in its streaming thread. It is important - to note that data transport will not happen from the thread that - performed the push-buffer call. - - - The max-bytes property controls how much data can be - queued in appsrc before appsrc considers the queue full. A filled - internal queue will always signal the enough-data - signal, which signals the application that it should stop pushing - data into appsrc. The block property will cause appsrc to - block the push-buffer method until free data becomes available again. - - - When the internal queue is running out of data, the - need-data signal is emitted, which signals the application - that it should start pushing more data into appsrc. - - - In addition to the need-data and enough-data - signals, appsrc can emit the seek-data signal when the - stream-mode property is set to seekable - or random-access. The signal argument will contain the - new desired position in the stream expressed in the unit set with the - format property. After receiving the seek-data signal, - the application should push-buffers from the new position. - - - When the last byte is pushed into appsrc, you must call - gst_app_src_end_of_stream () to make it send - an EOS downstream. - - - These signals allow the application to operate appsrc in push and - pull mode as will be explained next. - - - - Using appsrc in push mode - - When appsrc is configured in push mode (stream-type is stream or - seekable), the application repeatedly calls the push-buffer method - with a new buffer. Optionally, the queue size in the appsrc can be - controlled with the enough-data and need-data signals by respectively - stopping/starting the push-buffer calls. The value of the - min-percent property defines how empty the internal appsrc queue - needs to be before the need-data signal will be fired. You can set - this to some value >0 to avoid completely draining the queue. - - - When the stream-type is set to seekable, don't forget to implement - a seek-data callback. - - - Use this model when implementing various network protocols or - hardware devices. - - - - - Using appsrc in pull mode - - In the pull model, data is fed to appsrc from the need-data signal - handler. You should push exactly the amount of bytes requested in the - need-data signal. You are only allowed to push less bytes when you are - at the end of the stream. - - - Use this model for file access or other randomly accessable sources. - - - - - Appsrc example - - This example application will generate black/white (it switches - every second) video to an Xv-window output by using appsrc as a - source with caps to force a format. We use a colorspace - conversion element to make sure that we feed the right format to - your X server. We configure a video stream with a variable framerate - (0/1) and we set the timestamps on the outgoing buffers in such - a way that we play 2 frames per second. - - - Note how we use the pull mode method of pushing new buffers into - appsrc although appsrc is running in push mode. - - - - - -static GMainLoop *loop; - -static void -cb_need_data (GstElement *appsrc, - guint unused_size, - gpointer user_data) -{ - static gboolean white = FALSE; - static GstClockTime timestamp = 0; - GstBuffer *buffer; - guint size; - GstFlowReturn ret; - - size = 385 * 288 * 2; - - buffer = gst_buffer_new_allocate (NULL, size, NULL); - - /* this makes the image black/white */ - gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size); - - white = !white; - - GST_BUFFER_PTS (buffer) = timestamp; - GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 2); - - timestamp += GST_BUFFER_DURATION (buffer); - - g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret); - gst_buffer_unref (buffer); - - if (ret != GST_FLOW_OK) { - /* something wrong, stop pushing */ - g_main_loop_quit (loop); - } -} - -gint -main (gint argc, - gchar *argv[]) -{ - GstElement *pipeline, *appsrc, *conv, *videosink; - - /* init GStreamer */ - gst_init (&argc, &argv); - loop = g_main_loop_new (NULL, FALSE); - - /* setup pipeline */ - pipeline = gst_pipeline_new ("pipeline"); - appsrc = gst_element_factory_make ("appsrc", "source"); - conv = gst_element_factory_make ("videoconvert", "conv"); - videosink = gst_element_factory_make ("xvimagesink", "videosink"); - - /* setup */ - g_object_set (G_OBJECT (appsrc), "caps", - gst_caps_new_simple ("video/x-raw", - "format", G_TYPE_STRING, "RGB16", - "width", G_TYPE_INT, 384, - "height", G_TYPE_INT, 288, - "framerate", GST_TYPE_FRACTION, 0, 1, - NULL), NULL); - gst_bin_add_many (GST_BIN (pipeline), appsrc, conv, videosink, NULL); - gst_element_link_many (appsrc, conv, videosink, NULL); - - /* setup appsrc */ - g_object_set (G_OBJECT (appsrc), - "stream-type", 0, - "format", GST_FORMAT_TIME, NULL); - g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), NULL); - - /* play */ - gst_element_set_state (pipeline, GST_STATE_PLAYING); - g_main_loop_run (loop); - - /* clean up */ - gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (GST_OBJECT (pipeline)); - g_main_loop_unref (loop); - - return 0; - } -]]> - - - - - - - Grabbing data with appsink - - Unlike appsrc, appsink is a little easier to use. It also supports - a pull and push based model of getting data from the pipeline. - - - The normal way of retrieving samples from appsink is by using the - gst_app_sink_pull_sample() and - gst_app_sink_pull_preroll() methods or by using - the pull-sample and pull-preroll - signals. These methods block until a sample becomes available in the - sink or when the sink is shut down or reaches EOS. - - - Appsink will internally use a queue to collect buffers from the - streaming thread. If the application is not pulling samples fast - enough, this queue will consume a lot of memory over time. The - max-buffers property can be used to limit the queue - size. The drop property controls whether the - streaming thread blocks or if older buffers are dropped when the - maximum queue size is reached. Note that blocking the streaming thread - can negatively affect real-time performance and should be avoided. - - - If a blocking behaviour is not desirable, setting the - emit-signals property to TRUE will make appsink emit - the new-sample and new-preroll signals - when a sample can be pulled without blocking. - - - The caps property on appsink can be used to control - the formats that appsink can receive. This property can contain - non-fixed caps, the format of the pulled samples can be obtained by - getting the sample caps. - - - If one of the pull-preroll or pull-sample methods return NULL, the - appsink is stopped or in the EOS state. You can check for the EOS state - with the eos property or with the - gst_app_sink_is_eos() method. - - - The eos signal can also be used to be informed when the EOS state is - reached to avoid polling. - - - Consider configuring the following properties in the appsink: - - - - - The sync property if you want to have the sink - base class synchronize the buffer against the pipeline clock - before handing you the sample. - - - - - Enable Quality-of-Service with the qos property. - If you are dealing with raw video frames and let the base class - sycnhronize on the clock, it might be a good idea to also let - the base class send QOS events upstream. - - - - - The caps property that contains the accepted caps. Upstream elements - will try to convert the format so that it matches the configured - caps on appsink. You must still check the - GstSample to get the actual caps of the - buffer. - - - - - - Appsink example - - What follows is an example on how to capture a snapshot of a video - stream using appsink. - - - - -#ifdef HAVE_GTK -#include -#endif - -#include - -#define CAPS "video/x-raw,format=RGB,width=160,pixel-aspect-ratio=1/1" - -int -main (int argc, char *argv[]) -{ - GstElement *pipeline, *sink; - gint width, height; - GstSample *sample; - gchar *descr; - GError *error = NULL; - gint64 duration, position; - GstStateChangeReturn ret; - gboolean res; - GstMapInfo map; - - gst_init (&argc, &argv); - - if (argc != 2) { - g_print ("usage: %s \n Writes snapshot.png in the current directory\n", - argv[0]); - exit (-1); - } - - /* create a new pipeline */ - descr = - g_strdup_printf ("uridecodebin uri=%s ! videoconvert ! videoscale ! " - " appsink name=sink caps=\"" CAPS "\"", argv[1]); - pipeline = gst_parse_launch (descr, &error); - - if (error != NULL) { - g_print ("could not construct pipeline: %s\n", error->message); - g_clear_error (&error); - exit (-1); - } - - /* get sink */ - sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink"); - - /* set to PAUSED to make the first frame arrive in the sink */ - ret = gst_element_set_state (pipeline, GST_STATE_PAUSED); - switch (ret) { - case GST_STATE_CHANGE_FAILURE: - g_print ("failed to play the file\n"); - exit (-1); - case GST_STATE_CHANGE_NO_PREROLL: - /* for live sources, we need to set the pipeline to PLAYING before we can - * receive a buffer. We don't do that yet */ - g_print ("live sources not supported yet\n"); - exit (-1); - default: - break; - } - /* This can block for up to 5 seconds. If your machine is really overloaded, - * it might time out before the pipeline prerolled and we generate an error. A - * better way is to run a mainloop and catch errors there. */ - ret = gst_element_get_state (pipeline, NULL, NULL, 5 * GST_SECOND); - if (ret == GST_STATE_CHANGE_FAILURE) { - g_print ("failed to play the file\n"); - exit (-1); - } - - /* get the duration */ - gst_element_query_duration (pipeline, GST_FORMAT_TIME, &duration); - - if (duration != -1) - /* we have a duration, seek to 5% */ - position = duration * 5 / 100; - else - /* no duration, seek to 1 second, this could EOS */ - position = 1 * GST_SECOND; - - /* seek to the a position in the file. Most files have a black first frame so - * by seeking to somewhere else we have a bigger chance of getting something - * more interesting. An optimisation would be to detect black images and then - * seek a little more */ - gst_element_seek_simple (pipeline, GST_FORMAT_TIME, - GST_SEEK_FLAG_KEY_UNIT | GST_SEEK_FLAG_FLUSH, position); - - /* get the preroll buffer from appsink, this block untils appsink really - * prerolls */ - g_signal_emit_by_name (sink, "pull-preroll", &sample, NULL); - - /* if we have a buffer now, convert it to a pixbuf. It's possible that we - * don't have a buffer because we went EOS right away or had an error. */ - if (sample) { - GstBuffer *buffer; - GstCaps *caps; - GstStructure *s; - - /* get the snapshot buffer format now. We set the caps on the appsink so - * that it can only be an rgb buffer. The only thing we have not specified - * on the caps is the height, which is dependant on the pixel-aspect-ratio - * of the source material */ - caps = gst_sample_get_caps (sample); - if (!caps) { - g_print ("could not get snapshot format\n"); - exit (-1); - } - s = gst_caps_get_structure (caps, 0); - - /* we need to get the final caps on the buffer to get the size */ - res = gst_structure_get_int (s, "width", &width); - res |= gst_structure_get_int (s, "height", &height); - if (!res) { - g_print ("could not get snapshot dimension\n"); - exit (-1); - } - - /* create pixmap from buffer and save, gstreamer video buffers have a stride - * that is rounded up to the nearest multiple of 4 */ - buffer = gst_sample_get_buffer (sample); - /* Mapping a buffer can fail (non-readable) */ - if (gst_buffer_map (buffer, &map, GST_MAP_READ)) { -#ifdef HAVE_GTK - pixbuf = gdk_pixbuf_new_from_data (map.data, - GDK_COLORSPACE_RGB, FALSE, 8, width, height, - GST_ROUND_UP_4 (width * 3), NULL, NULL); - - /* save the pixbuf */ - gdk_pixbuf_save (pixbuf, "snapshot.png", "png", &error, NULL); -#endif - gst_buffer_unmap (buffer, &map); - } - gst_sample_unref (sample); - } else { - g_print ("could not make snapshot\n"); - } - - /* cleanup and exit */ - gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (pipeline); - - exit (0); -} -]]> - - - - - - - - Forcing a format - - Sometimes you'll want to set a specific format, for example a video - size and format or an audio bitsize and number of channels. You can - do this by forcing a specific GstCaps on - the pipeline, which is possible by using - filtered caps. You can set a filtered caps on - a link by using the capsfilter element in between the - two elements, and specifying a GstCaps as - caps property on this element. It will then - only allow types matching that specified capability set for - negotiation. See also . - - - - Changing format in a PLAYING pipeline - - It is also possible to dynamically change the format in a pipeline - while PLAYING. This can simply be done by changing the caps - property on a capsfilter. The capsfilter will send a RECONFIGURE - event upstream that will make the upstream element attempt to - renegotiate a new format and allocator. This only works if - the upstream element is not using fixed caps on the source pad. - - - Below is an example of how you can change the caps of a pipeline - while in the PLAYING state: - - - - - -#include - -#define MAX_ROUND 100 - -int -main (int argc, char **argv) -{ - GstElement *pipe, *filter; - GstCaps *caps; - gint width, height; - gint xdir, ydir; - gint round; - GstMessage *message; - - gst_init (&argc, &argv); - - pipe = gst_parse_launch_full ("videotestsrc ! capsfilter name=filter ! " - "ximagesink", NULL, GST_PARSE_FLAG_NONE, NULL); - g_assert (pipe != NULL); - - filter = gst_bin_get_by_name (GST_BIN (pipe), "filter"); - g_assert (filter); - - width = 320; - height = 240; - xdir = ydir = -10; - - for (round = 0; round < MAX_ROUND; round++) { - gchar *capsstr; - g_print ("resize to %dx%d (%d/%d) \r", width, height, round, MAX_ROUND); - - /* we prefer our fixed width and height but allow other dimensions to pass - * as well */ - capsstr = g_strdup_printf ("video/x-raw, width=(int)%d, height=(int)%d", - width, height); - - caps = gst_caps_from_string (capsstr); - g_free (capsstr); - g_object_set (filter, "caps", caps, NULL); - gst_caps_unref (caps); - - if (round == 0) - gst_element_set_state (pipe, GST_STATE_PLAYING); - - width += xdir; - if (width >= 320) - xdir = -10; - else if (width < 200) - xdir = 10; - - height += ydir; - if (height >= 240) - ydir = -10; - else if (height < 150) - ydir = 10; - - message = - gst_bus_poll (GST_ELEMENT_BUS (pipe), GST_MESSAGE_ERROR, - 50 * GST_MSECOND); - if (message) { - g_print ("got error \n"); - - gst_message_unref (message); - } - } - g_print ("done \n"); - - gst_object_unref (filter); - gst_element_set_state (pipe, GST_STATE_NULL); - gst_object_unref (pipe); - - return 0; -} -]]> - - - - Note how we use gst_bus_poll() with a - small timeout to get messages and also introduce a short - sleep. - - - It is possible to set multiple caps for the capsfilter separated - with a ;. The capsfilter will try to renegotiate to the first - possible format from the list. - - - - - - Dynamically changing the pipeline - - In this section we talk about some techniques for dynamically - modifying the pipeline. We are talking specifically about changing - the pipeline while it is in the PLAYING state without interrupting - the flow. - - - There are some important things to consider when building dynamic - pipelines: - - - - - When removing elements from the pipeline, make sure that there - is no dataflow on unlinked pads because that will cause a fatal - pipeline error. Always block source pads (in push mode) or - sink pads (in pull mode) before unlinking pads. - See also . - - - - - When adding elements to a pipeline, make sure to put the element - into the right state, usually the same state as the parent, before - allowing dataflow the element. When an element is newly created, - it is in the NULL state and will return an error when it - receives data. - See also . - - - - - When adding elements to a pipeline, &GStreamer; will by default - set the clock and base-time on the element to the current values - of the pipeline. This means that the element will be able to - construct the same pipeline running-time as the other elements - in the pipeline. This means that sinks will synchronize buffers - like the other sinks in the pipeline and that sources produce - buffers with a running-time that matches the other sources. - - - - - When unlinking elements from an upstream chain, always make sure - to flush any queued data in the element by sending an EOS event - down the element sink pad(s) and by waiting that the EOS leaves - the elements (with an event probe). - - - If you do not do this, you will lose the data which is buffered - by the unlinked element. This can result in a simple frame loss - (one or more video frames, several milliseconds of audio). However - if you remove a muxer (and in some cases an encoder or similar elements) - from the pipeline, you risk getting a corrupted file which could not be - played properly, as some relevant metadata (header, seek/index tables, internal - sync tags) will not be stored or updated properly. - - - See also . - - - - - A live source will produce buffers with a running-time of the - current running-time in the pipeline. - - - A pipeline without a live source produces buffers with a - running-time starting from 0. Likewise, after a flushing seek, - those pipelines reset the running-time back to 0. - - - The running-time can be changed with - gst_pad_set_offset (). It is important to - know the running-time of the elements in the pipeline in order - to maintain synchronization. - - - - - Adding elements might change the state of the pipeline. Adding a - non-prerolled sink, for example, brings the pipeline back to the - prerolling state. Removing a non-prerolled sink, for example, might - change the pipeline to PAUSED and PLAYING state. - - - Adding a live source cancels the preroll stage and put the pipeline - to the playing state. Adding a live source or other live elements - might also change the latency of a pipeline. - - - Adding or removing elements to the pipeline might change the clock - selection of the pipeline. If the newly added element provides a clock, - it might be worth changing the clock in the pipeline to the new - clock. If, on the other hand, the element that provides the clock - for the pipeline is removed, a new clock has to be selected. - - - - - Adding and removing elements might cause upstream or downstream - elements to renegotiate caps and or allocators. You don't really - need to do anything from the application, plugins largely - adapt themself to the new pipeline topology in order to optimize - their formats and allocation strategy. - - - What is important is that when you add, remove or change elements - in the pipeline, it is possible that the pipeline needs to - negotiate a new format and this can fail. Usually you can fix this - by inserting the right converter elements where needed. - See also . - - - - - - &GStreamer; offers support for doing about any dynamic pipeline - modification but it requires you to know a bit of details before - you can do this without causing pipeline errors. In the following - sections we will demonstrate a couple of typical use-cases. - - - - Changing elements in a pipeline - - In the next example we look at the following chain of elements: - - - - ----. .----------. .---- - - element1 | | element2 | | element3 - src -> sink src -> sink - - ----' '----------' '---- - - - - We want to change element2 by element4 while the pipeline is in - the PLAYING state. Let's say that element2 is a visualization and - that you want to switch the visualization in the pipeline. - - - We can't just unlink element2's sinkpad from element1's source - pad because that would leave element1's source pad - unlinked and would cause a streaming error in the pipeline when - data is pushed on the source pad. - The technique is to block the dataflow from element1's source pad - before we change element2 by element4 and then resume dataflow - as shown in the following steps: - - - - - Block element1's source pad with a blocking pad probe. When the - pad is blocked, the probe callback will be called. - - - - - Inside the block callback nothing is flowing between element1 - and element2 and nothing will flow until unblocked. - - - - - Unlink element1 and element2. - - - - - Make sure data is flushed out of element2. Some elements might - internally keep some data, you need to make sure not to lose data - by forcing it out of element2. You can do this by pushing EOS into - element2, like this: - - - - - Put an event probe on element2's source pad. - - - - - Send EOS to element2's sinkpad. This makes sure the all the - data inside element2 is forced out. - - - - - Wait for the EOS event to appear on element2's source pad. - When the EOS is received, drop it and remove the event - probe. - - - - - - - Unlink element2 and element3. You can now also remove element2 - from the pipeline and set the state to NULL. - - - - - Add element4 to the pipeline, if not already added. Link element4 - and element3. Link element1 and element4. - - - - - Make sure element4 is in the same state as the rest of the elements - in the pipeline. It should be at least in the PAUSED state before - it can receive buffers and events. - - - - - Unblock element1's source pad probe. This will let new data into - element4 and continue streaming. - - - - - The above algorithm works when the source pad is blocked, i.e. when - there is dataflow in the pipeline. If there is no dataflow, there is - also no point in changing the element (just yet) so this algorithm can - be used in the PAUSED state as well. - - - Let show you how this works with an example. This example changes the - video effect on a simple pipeline every second. - - - - - -static gchar *opt_effects = NULL; - -#define DEFAULT_EFFECTS "identity,exclusion,navigationtest," \ - "agingtv,videoflip,vertigotv,gaussianblur,shagadelictv,edgetv" - -static GstPad *blockpad; -static GstElement *conv_before; -static GstElement *conv_after; -static GstElement *cur_effect; -static GstElement *pipeline; - -static GQueue effects = G_QUEUE_INIT; - -static GstPadProbeReturn -event_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) -{ - GMainLoop *loop = user_data; - GstElement *next; - - if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_DATA (info)) != GST_EVENT_EOS) - return GST_PAD_PROBE_PASS; - - gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info)); - - /* push current effect back into the queue */ - g_queue_push_tail (&effects, gst_object_ref (cur_effect)); - /* take next effect from the queue */ - next = g_queue_pop_head (&effects); - if (next == NULL) { - GST_DEBUG_OBJECT (pad, "no more effects"); - g_main_loop_quit (loop); - return GST_PAD_PROBE_DROP; - } - - g_print ("Switching from '%s' to '%s'..\n", GST_OBJECT_NAME (cur_effect), - GST_OBJECT_NAME (next)); - - gst_element_set_state (cur_effect, GST_STATE_NULL); - - /* remove unlinks automatically */ - GST_DEBUG_OBJECT (pipeline, "removing %" GST_PTR_FORMAT, cur_effect); - gst_bin_remove (GST_BIN (pipeline), cur_effect); - - GST_DEBUG_OBJECT (pipeline, "adding %" GST_PTR_FORMAT, next); - gst_bin_add (GST_BIN (pipeline), next); - - GST_DEBUG_OBJECT (pipeline, "linking.."); - gst_element_link_many (conv_before, next, conv_after, NULL); - - gst_element_set_state (next, GST_STATE_PLAYING); - - cur_effect = next; - GST_DEBUG_OBJECT (pipeline, "done"); - - return GST_PAD_PROBE_DROP; -} - -static GstPadProbeReturn -pad_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data) -{ - GstPad *srcpad, *sinkpad; - - GST_DEBUG_OBJECT (pad, "pad is blocked now"); - - /* remove the probe first */ - gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info)); - - /* install new probe for EOS */ - srcpad = gst_element_get_static_pad (cur_effect, "src"); - gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_BLOCK | - GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, event_probe_cb, user_data, NULL); - gst_object_unref (srcpad); - - /* push EOS into the element, the probe will be fired when the - * EOS leaves the effect and it has thus drained all of its data */ - sinkpad = gst_element_get_static_pad (cur_effect, "sink"); - gst_pad_send_event (sinkpad, gst_event_new_eos ()); - gst_object_unref (sinkpad); - - return GST_PAD_PROBE_OK; -} - -static gboolean -timeout_cb (gpointer user_data) -{ - gst_pad_add_probe (blockpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM, - pad_probe_cb, user_data, NULL); - - return TRUE; -} - -static gboolean -bus_cb (GstBus * bus, GstMessage * msg, gpointer user_data) -{ - GMainLoop *loop = user_data; - - switch (GST_MESSAGE_TYPE (msg)) { - case GST_MESSAGE_ERROR:{ - GError *err = NULL; - gchar *dbg; - - gst_message_parse_error (msg, &err, &dbg); - gst_object_default_error (msg->src, err, dbg); - g_clear_error (&err); - g_free (dbg); - g_main_loop_quit (loop); - break; - } - default: - break; - } - return TRUE; -} - -int -main (int argc, char **argv) -{ - GOptionEntry options[] = { - {"effects", 'e', 0, G_OPTION_ARG_STRING, &opt_effects, - "Effects to use (comma-separated list of element names)", NULL}, - {NULL} - }; - GOptionContext *ctx; - GError *err = NULL; - GMainLoop *loop; - GstElement *src, *q1, *q2, *effect, *filter1, *filter2, *sink; - gchar **effect_names, **e; - - ctx = g_option_context_new (""); - g_option_context_add_main_entries (ctx, options, NULL); - g_option_context_add_group (ctx, gst_init_get_option_group ()); - if (!g_option_context_parse (ctx, &argc, &argv, &err)) { - g_print ("Error initializing: %s\n", err->message); - g_clear_error (&err); - g_option_context_free (ctx); - return 1; - } - g_option_context_free (ctx); - - if (opt_effects != NULL) - effect_names = g_strsplit (opt_effects, ",", -1); - else - effect_names = g_strsplit (DEFAULT_EFFECTS, ",", -1); - - for (e = effect_names; e != NULL && *e != NULL; ++e) { - GstElement *el; - - el = gst_element_factory_make (*e, NULL); - if (el) { - g_print ("Adding effect '%s'\n", *e); - g_queue_push_tail (&effects, el); - } - } - - pipeline = gst_pipeline_new ("pipeline"); - - src = gst_element_factory_make ("videotestsrc", NULL); - g_object_set (src, "is-live", TRUE, NULL); - - filter1 = gst_element_factory_make ("capsfilter", NULL); - gst_util_set_object_arg (G_OBJECT (filter1), "caps", - "video/x-raw, width=320, height=240, " - "format={ I420, YV12, YUY2, UYVY, AYUV, Y41B, Y42B, " - "YVYU, Y444, v210, v216, NV12, NV21, UYVP, A420, YUV9, YVU9, IYU1 }"); - - q1 = gst_element_factory_make ("queue", NULL); - - blockpad = gst_element_get_static_pad (q1, "src"); - - conv_before = gst_element_factory_make ("videoconvert", NULL); - - effect = g_queue_pop_head (&effects); - cur_effect = effect; - - conv_after = gst_element_factory_make ("videoconvert", NULL); - - q2 = gst_element_factory_make ("queue", NULL); - - filter2 = gst_element_factory_make ("capsfilter", NULL); - gst_util_set_object_arg (G_OBJECT (filter2), "caps", - "video/x-raw, width=320, height=240, " - "format={ RGBx, BGRx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR }"); - - sink = gst_element_factory_make ("ximagesink", NULL); - - gst_bin_add_many (GST_BIN (pipeline), src, filter1, q1, conv_before, effect, - conv_after, q2, sink, NULL); - - gst_element_link_many (src, filter1, q1, conv_before, effect, conv_after, - q2, sink, NULL); - - gst_element_set_state (pipeline, GST_STATE_PLAYING); - - loop = g_main_loop_new (NULL, FALSE); - - gst_bus_add_watch (GST_ELEMENT_BUS (pipeline), bus_cb, loop); - - g_timeout_add_seconds (1, timeout_cb, loop); - - g_main_loop_run (loop); - - gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (pipeline); - - return 0; -} -]]> - - - - Note how we added videoconvert elements before and after the effect. - This is needed because some elements might operate in different - colorspaces than other elements. By inserting the conversion elements - you ensure that the right format can be negotiated at any time. - - - - - diff --git a/docs/manual/advanced-dparams.xml b/docs/manual/advanced-dparams.xml deleted file mode 100644 index 4459e01f98..0000000000 --- a/docs/manual/advanced-dparams.xml +++ /dev/null @@ -1,102 +0,0 @@ - - Dynamic Controllable Parameters - - - Getting Started - - The controller subsystem offers a lightweight way to adjust gobject - properties over stream-time. Normally these properties are changed using - g_object_set(). Timing those calls reliably so that - the changes affect certain stream times is close to impossible. The - controller takes time into account. It works by attaching control-sources - to properties using control-bindings. Control-sources provide values for a - given time-stamp that are usually in the range of 0.0 to 1.0. - Control-bindings map the control-value to a gobject property they are bound to - - converting the type and scaling to the target property value range. - At run-time the elements continuously pull values changes for the current - stream-time to update the gobject properties. GStreamer includes a few - different control-sources and control-bindings already, but applications can - define their own by sub-classing from the respective base classes. - - - Most parts of the controller mechanism is implemented in GstObject. Also the - base classes for control-sources and control-bindings are included in the core - library. The existing implementations are contained within the - gstcontroller library. - You need to include the header in your application's source file: - - -... -#include <gst/gst.h> -#include <gst/controller/gstinterpolationcontrolsource.h> -#include <gst/controller/gstdirectcontrolbinding.h> -... - - - Your application should link to the shared library - gstreamer-controller. One can get the required flag for - compiler and linker by using pkg-config for gstreamer-controller-1.0. - - - - - Setting up parameter control - - If we have our pipeline set up and want to control some parameters, we first - need to create a control-source. Lets use an interpolation control-source: - - - csource = gst_interpolation_control_source_new (); - g_object_set (csource, "mode", GST_INTERPOLATION_MODE_LINEAR, NULL); - - - Now we need to attach the control-source to the gobject property. This is done - with a control-binding. One control source can be attached to several object - properties (even in different objects) using separate control-bindings. - - - gst_object_add_control_binding (object, gst_direct_control_binding_new (object, "prop1", csource)); - - - This type control-source takes new property values from a list of time-stamped - parameter changes. The source can e.g. fill gaps by smoothing parameter changes - This behavior can be configured by setting the mode property of the - control-source. Other control sources e.g. produce a stream of values by - calling sin() function. They have parameters to control - e.g. the frequency. As control-sources are GstObjects too, one can attach - control-sources to these properties too. - - - Now we can set some control points. These are time-stamped gdouble values and - are usually in the range of 0.0 to 1.0. A value of 1.0 is later mapped to the - maximum value in the target properties value range. - The values become active when the timestamp is reached. They still stay - in the list. If e.g. the pipeline runs a loop (using a segmented seek), - the control-curve gets repeated as well. - - - GstTimedValueControlSource *tv_csource = (GstTimedValueControlSource *)csource; - gst_timed_value_control_source_set (tv_csource, 0 * GST_SECOND, 0.0); - gst_timed_value_control_source_set (tv_csource, 1 * GST_SECOND, 1.0); - - - Now everything is ready to play. If the control-source is e.g. bound to a - volume property, we will head a fade-in over 1 second. One word of caution, - the volume element that comes with gstreamer has a value range of 0.0 to 4.0 - on its volume property. If the above control-source is attached to the property - the volume will ramp up to 400%! - - - One final note - the controller subsystem has a built-in live-mode. Even though - a property has a control-source assigned one can change the GObject property - through the g_object_set(). - This is highly useful when binding the GObject properties to GUI widgets. - When the user adjusts the value with the widget, one can set the GObject - property and this remains active until the next programmed control-source - value overrides it. This also works with smoothed parameters. It does not - work for control-sources that constantly update the property (e.g. the - lfo_control_source). - - - - diff --git a/docs/manual/advanced-interfaces.xml b/docs/manual/advanced-interfaces.xml deleted file mode 100644 index ef41f4caee..0000000000 --- a/docs/manual/advanced-interfaces.xml +++ /dev/null @@ -1,82 +0,0 @@ - - Interfaces - - - In , you have learned how - to use GObject properties as a simple way to do - interaction between applications and elements. This method suffices for - the simple'n'straight settings, but fails for anything more complicated - than a getter and setter. For the more complicated use cases, &GStreamer; - uses interfaces based on the GObject GTypeInterface - type. - - - - Most of the interfaces handled here will not contain any example code. - See the API references for details. Here, we will just describe the - scope and purpose of each interface. - - - - The URI interface - - - In all examples so far, we have only supported local files through the - filesrc element. &GStreamer;, obviously, supports many - more location sources. However, we don't want applications to need to - know any particular element implementation details, such as element - names for particular network source types and so on. Therefore, there - is a URI interface, which can be used to get the source element that - supports a particular URI type. There is no strict rule for URI naming, - but in general we follow naming conventions that others use, too. For - example, assuming you have the correct plugins installed, &GStreamer; - supports file:///<path>/<file>, - http://<host>/<path>/<file>, - mms://<host>/<path>/<file>, and so on. - - - In order to get the source or sink element supporting a particular URI, - use gst_element_make_from_uri (), with the URI - type being either GST_URI_SRC for a source - element, or GST_URI_SINK for a sink element. - - - You can convert filenames to and from URIs using GLib's - g_filename_to_uri () and - g_uri_to_filename (). - - - - - The Color Balance interface - - - The colorbalance interface is a way to control video-related properties - on an element, such as brightness, contrast and so on. It's sole - reason for existence is that, as far as its authors know, there's no - way to dynamically register properties using - GObject. - - - The colorbalance interface is implemented by several plugins, including - xvimagesink and the Video4linux2 elements. - - - - - The Video Overlay interface - - - The Video Overlay interface was created to solve the problem of embedding - video streams in an application window. The application provides an - window handle to the element implementing this interface to draw on, and - the element will then use this window handle to draw on rather than creating - a new toplevel window. This is useful to embed video in video players. - - - This interface is implemented by, amongst others, the Video4linux2 - elements and by ximagesink, xvimagesink and sdlvideosink. - - - diff --git a/docs/manual/advanced-metadata.xml b/docs/manual/advanced-metadata.xml deleted file mode 100644 index f975409e6a..0000000000 --- a/docs/manual/advanced-metadata.xml +++ /dev/null @@ -1,196 +0,0 @@ - - Metadata - - - &GStreamer; makes a clear distinction between two types of metadata, and - has support for both types. The first is stream tags, which describe the - content of a stream in a non-technical way. Examples include the author - of a song, the title of that very same song or the album it is a part of. - The other type of metadata is stream-info, which is a somewhat technical - description of the properties of a stream. This can include video size, - audio samplerate, codecs used and so on. Tags are handled using the - &GStreamer; tagging system. Stream-info can be retrieved from a - GstPad by getting the current (negotiated) - GstCaps for that pad. - - - - Metadata reading - - - Stream information can most easily be read by reading it from a - GstPad. This has already been discussed before - in . Therefore, we will skip - it here. Note that this requires access to all pads of which you - want stream information. - - - - Tag reading is done through a bus in &GStreamer;, which has been - discussed previously in . You can - listen for GST_MESSAGE_TAG messages and handle - them as you wish. - - - Note, however, that the GST_MESSAGE_TAG - message may be fired multiple times in the pipeline. It is the - application's responsibility to put all those tags together and - display them to the user in a nice, coherent way. Usually, using - gst_tag_list_merge () is a good enough way - of doing this; make sure to empty the cache when loading a new song, - or after every few minutes when listening to internet radio. Also, - make sure you use GST_TAG_MERGE_PREPEND as - merging mode, so that a new title (which came in later) has a - preference over the old one for display. - - - The following example will extract tags from a file and print them: - - -/* compile with: - * gcc -o tags tags.c `pkg-config --cflags --libs gstreamer-1.0` */ -#include <gst/gst.h> - -static void -print_one_tag (const GstTagList * list, const gchar * tag, gpointer user_data) -{ - int i, num; - - num = gst_tag_list_get_tag_size (list, tag); - for (i = 0; i < num; ++i) { - const GValue *val; - - /* Note: when looking for specific tags, use the gst_tag_list_get_xyz() API, - * we only use the GValue approach here because it is more generic */ - val = gst_tag_list_get_value_index (list, tag, i); - if (G_VALUE_HOLDS_STRING (val)) { - g_print ("\t%20s : %s\n", tag, g_value_get_string (val)); - } else if (G_VALUE_HOLDS_UINT (val)) { - g_print ("\t%20s : %u\n", tag, g_value_get_uint (val)); - } else if (G_VALUE_HOLDS_DOUBLE (val)) { - g_print ("\t%20s : %g\n", tag, g_value_get_double (val)); - } else if (G_VALUE_HOLDS_BOOLEAN (val)) { - g_print ("\t%20s : %s\n", tag, - (g_value_get_boolean (val)) ? "true" : "false"); - } else if (GST_VALUE_HOLDS_BUFFER (val)) { - GstBuffer *buf = gst_value_get_buffer (val); - guint buffer_size = gst_buffer_get_size (buf); - - g_print ("\t%20s : buffer of size %u\n", tag, buffer_size); - } else if (GST_VALUE_HOLDS_DATE_TIME (val)) { - GstDateTime *dt = g_value_get_boxed (val); - gchar *dt_str = gst_date_time_to_iso8601_string (dt); - - g_print ("\t%20s : %s\n", tag, dt_str); - g_free (dt_str); - } else { - g_print ("\t%20s : tag of type '%s'\n", tag, G_VALUE_TYPE_NAME (val)); - } - } -} - -static void -on_new_pad (GstElement * dec, GstPad * pad, GstElement * fakesink) -{ - GstPad *sinkpad; - - sinkpad = gst_element_get_static_pad (fakesink, "sink"); - if (!gst_pad_is_linked (sinkpad)) { - if (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK) - g_error ("Failed to link pads!"); - } - gst_object_unref (sinkpad); -} - -int -main (int argc, char ** argv) -{ - GstElement *pipe, *dec, *sink; - GstMessage *msg; - gchar *uri; - - gst_init (&argc, &argv); - - if (argc < 2) - g_error ("Usage: %s FILE or URI", argv[0]); - - if (gst_uri_is_valid (argv[1])) { - uri = g_strdup (argv[1]); - } else { - uri = gst_filename_to_uri (argv[1], NULL); - } - - pipe = gst_pipeline_new ("pipeline"); - - dec = gst_element_factory_make ("uridecodebin", NULL); - g_object_set (dec, "uri", uri, NULL); - gst_bin_add (GST_BIN (pipe), dec); - - sink = gst_element_factory_make ("fakesink", NULL); - gst_bin_add (GST_BIN (pipe), sink); - - g_signal_connect (dec, "pad-added", G_CALLBACK (on_new_pad), sink); - - gst_element_set_state (pipe, GST_STATE_PAUSED); - - while (TRUE) { - GstTagList *tags = NULL; - - msg = gst_bus_timed_pop_filtered (GST_ELEMENT_BUS (pipe), - GST_CLOCK_TIME_NONE, - GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_TAG | GST_MESSAGE_ERROR); - - if (GST_MESSAGE_TYPE (msg) != GST_MESSAGE_TAG) /* error or async_done */ - break; - - gst_message_parse_tag (msg, &tags); - - g_print ("Got tags from element %s:\n", GST_OBJECT_NAME (msg->src)); - gst_tag_list_foreach (tags, print_one_tag, NULL); - g_print ("\n"); - gst_tag_list_unref (tags); - - gst_message_unref (msg); - } - - if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ERROR) { - GError *err = NULL; - - gst_message_parse_error (msg, &err, NULL); - g_printerr ("Got error: %s\n", err->message); - g_error_free (err); - } - - gst_message_unref (msg); - gst_element_set_state (pipe, GST_STATE_NULL); - gst_object_unref (pipe); - g_free (uri); - return 0; -} - - - - - Tag writing - - - Tag writing is done using the GstTagSetter - interface. All that's required is a tag-set-supporting element in - your pipeline. In order to see if any of the elements in your - pipeline supports tag writing, you can use the function - gst_bin_iterate_all_by_interface (pipeline, - GST_TYPE_TAG_SETTER). On the resulting element, usually - an encoder or muxer, you can use gst_tag_setter_merge - () (with a taglist) or gst_tag_setter_add - () (with individual tags) to set tags on it. - - - A nice extra feature in &GStreamer; tag support is that tags are - preserved in pipelines. This means that if you transcode one file - containing tags into another media type, and that new media type - supports tags too, then the tags will be handled as part of the - data stream and be merged into the newly written media file, too. - - - diff --git a/docs/manual/advanced-position.xml b/docs/manual/advanced-position.xml deleted file mode 100644 index 1786339a49..0000000000 --- a/docs/manual/advanced-position.xml +++ /dev/null @@ -1,235 +0,0 @@ - - Position tracking and seeking - - - So far, we've looked at how to create a pipeline to do media processing - and how to make it run. Most application developers will be interested - in providing feedback to the user on media progress. Media players, for - example, will want to show a slider showing the progress in the song, - and usually also a label indicating stream length. Transcoding - applications will want to show a progress bar on how much percent of - the task is done. &GStreamer; has built-in support for doing all this - using a concept known as querying. Since seeking - is very similar, it will be discussed here as well. Seeking is done - using the concept of events. - - - - Querying: getting the position or length of a stream - - - Querying is defined as requesting a specific stream property related - to progress tracking. This includes getting the length of a stream (if - available) or getting the current position. Those stream properties - can be retrieved in various formats such as time, audio samples, video - frames or bytes. The function most commonly used for this is - gst_element_query (), although some convenience - wrappers are provided as well (such as - gst_element_query_position () and - gst_element_query_duration ()). You can generally - query the pipeline directly, and it'll figure out the internal details - for you, like which element to query. - - - - Internally, queries will be sent to the sinks, and - dispatched backwards until one element can handle it; - that result will be sent back to the function caller. Usually, that - is the demuxer, although with live sources (from a webcam), it is the - source itself. - - - - -#include <gst/gst.h> - - - - -static gboolean -cb_print_position (GstElement *pipeline) -{ - gint64 pos, len; - - if (gst_element_query_position (pipeline, GST_FORMAT_TIME, &pos) - && gst_element_query_duration (pipeline, GST_FORMAT_TIME, &len)) { - g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r", - GST_TIME_ARGS (pos), GST_TIME_ARGS (len)); - } - - /* call me again */ - return TRUE; -} - -gint -main (gint argc, - gchar *argv[]) -{ - GstElement *pipeline; - -[..] - - /* run pipeline */ - g_timeout_add (200, (GSourceFunc) cb_print_position, pipeline); - g_main_loop_run (loop); - -[..] - -} - - - - - Events: seeking (and more) - - - Events work in a very similar way as queries. Dispatching, for - example, works exactly the same for events (and also has the same - limitations), and they can similarly be sent to the toplevel pipeline - and it will figure out everything for you. Although there are more - ways in which applications and elements can interact using events, - we will only focus on seeking here. This is done using the seek-event. - A seek-event contains a playback rate, a seek offset format (which is - the unit of the offsets to follow, e.g. time, audio samples, video - frames or bytes), optionally a set of seeking-related flags (e.g. - whether internal buffers should be flushed), a seek method (which - indicates relative to what the offset was given), and seek offsets. - The first offset (cur) is the new position to seek to, while - the second offset (stop) is optional and specifies a position where - streaming is supposed to stop. Usually it is fine to just specify - GST_SEEK_TYPE_NONE and -1 as end_method and end offset. The behaviour - of a seek is also wrapped in the gst_element_seek (). - - - -static void -seek_to_time (GstElement *pipeline, - gint64 time_nanoseconds) -{ - if (!gst_element_seek (pipeline, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, - GST_SEEK_TYPE_SET, time_nanoseconds, - GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE)) { - g_print ("Seek failed!\n"); - } -} - - - Seeks with the GST_SEEK_FLAG_FLUSH should be done when the pipeline is - in PAUSED or PLAYING state. The pipeline will automatically go to preroll - state until the new data after the seek will cause the pipeline to preroll - again. After the pipeline is prerolled, it will go back to the state - (PAUSED or PLAYING) it was in when the seek was executed. You can wait - (blocking) for the seek to complete with - gst_element_get_state() or by waiting for the - ASYNC_DONE message to appear on the bus. - - - - Seeks without the GST_SEEK_FLAG_FLUSH should only be done when the - pipeline is in the PLAYING state. Executing a non-flushing seek in the - PAUSED state might deadlock because the pipeline streaming threads might - be blocked in the sinks. - - - - It is important to realise that seeks will not happen instantly in the - sense that they are finished when the function - gst_element_seek () returns. Depending on the - specific elements involved, the actual seeking might be done later in - another thread (the streaming thread), and it might take a short time - until buffers from the new seek position will reach downstream elements - such as sinks (if the seek was non-flushing then it might take a bit - longer). - - - - It is possible to do multiple seeks in short time-intervals, such as - a direct response to slider movement. After a seek, internally, the - pipeline will be paused (if it was playing), the position will be - re-set internally, the demuxers and decoders will decode from the new - position onwards and this will continue until all sinks have data - again. If it was playing originally, it will be set to playing again, - too. Since the new position is immediately available in a video output, - you will see the new frame, even if your pipeline is not in the playing - state. - - - - diff --git a/docs/manual/advanced-threads.xml b/docs/manual/advanced-threads.xml deleted file mode 100644 index cf2f50cff3..0000000000 --- a/docs/manual/advanced-threads.xml +++ /dev/null @@ -1,481 +0,0 @@ - - Threads - - &GStreamer; is inherently multi-threaded, and is fully thread-safe. - Most threading internals are hidden from the application, which should - make application development easier. However, in some cases, applications - may want to have influence on some parts of those. &GStreamer; allows - applications to force the use of multiple threads over some parts of - a pipeline. - See . - - - &GStreamer; can also notify you when threads are created so that you can - configure things such as the thread priority or the threadpool to use. - See . - - - - Scheduling in &GStreamer; - - Each element in the &GStreamer; pipeline decides how it is going to - be scheduled. Elements can choose if their pads are to be scheduled - push-based or pull-based. An element can, for example, choose to start - a thread to start pulling from the sink pad or/and start pushing on - the source pad. An element can also choose to use the upstream or - downstream thread for its data processing in push and pull mode - respectively. &GStreamer; does not pose any restrictions on how the - element chooses to be scheduled. See the Plugin Writer Guide for more - details. - - - What will happen in any case is that some elements will start a thread - for their data processing, called the streaming threads. - The streaming threads, or GstTask objects, are - created from a GstTaskPool when the element - needs to make a streaming thread. In the next section we see how we - can receive notifications of the tasks and pools. - - - - - Configuring Threads in &GStreamer; - - A STREAM_STATUS message is posted on the bus to inform you about the - status of the streaming threads. You will get the following information - from the message: - - - - When a new thread is about to be created, you will be notified - of this with a GST_STREAM_STATUS_TYPE_CREATE type. It is then - possible to configure a GstTaskPool in - the GstTask. The custom taskpool will - provide custom threads for the task to implement the streaming - threads. - - - This message needs to be handled synchronously if you want to - configure a custom taskpool. If you don't configure the taskpool - on the task when this message returns, the task will use its - default pool. - - - - - When a thread is entered or left. This is the moment where you - could configure thread priorities. You also get a notification - when a thread is destroyed. - - - - - You get messages when the thread starts, pauses and stops. This - could be used to visualize the status of streaming threads in - a gui application. - - - - - - - - We will now look at some examples in the next sections. - - - - Boost priority of a thread - - .----------. .----------. - | faksesrc | | fakesink | - | src->sink | - '----------' '----------' - - - Let's look at the simple pipeline above. We would like to boost - the priority of the streaming thread. - It will be the fakesrc element that starts the streaming thread for - generating the fake data pushing them to the peer fakesink. - The flow for changing the priority would go like this: - - - - - When going from READY to PAUSED state, fakesrc will require a - streaming thread for pushing data into the fakesink. It will - post a STREAM_STATUS message indicating its requirement for a - streaming thread. - - - - - The application will react to the STREAM_STATUS messages with a - sync bus handler. It will then configure a custom - GstTaskPool on the - GstTask inside the message. The custom - taskpool is responsible for creating the threads. In this - example we will make a thread with a higher priority. - - - - - Alternatively, since the sync message is called in the thread - context, you can use thread ENTER/LEAVE notifications to - change the priority or scheduling pollicy of the current thread. - - - - - In a first step we need to implement a custom - GstTaskPool that we can configure on the task. - Below is the implementation of a GstTaskPool - subclass that uses pthreads to create a SCHED_RR real-time thread. - Note that creating real-time threads might require extra priveleges. - - - - - - - - -typedef struct -{ - pthread_t thread; -} TestRTId; - -G_DEFINE_TYPE (TestRTPool, test_rt_pool, GST_TYPE_TASK_POOL); - -static void -default_prepare (GstTaskPool * pool, GError ** error) -{ - /* we don't do anything here. We could construct a pool of threads here that - * we could reuse later but we don't */ -} - -static void -default_cleanup (GstTaskPool * pool) -{ -} - -static gpointer -default_push (GstTaskPool * pool, GstTaskPoolFunction func, gpointer data, - GError ** error) -{ - TestRTId *tid; - gint res; - pthread_attr_t attr; - struct sched_param param; - - tid = g_slice_new0 (TestRTId); - - pthread_attr_init (&attr); - if ((res = pthread_attr_setschedpolicy (&attr, SCHED_RR)) != 0) - g_warning ("setschedpolicy: failure: %p", g_strerror (res)); - - param.sched_priority = 50; - if ((res = pthread_attr_setschedparam (&attr, ¶m)) != 0) - g_warning ("setschedparam: failure: %p", g_strerror (res)); - - if ((res = pthread_attr_setinheritsched (&attr, PTHREAD_EXPLICIT_SCHED)) != 0) - g_warning ("setinheritsched: failure: %p", g_strerror (res)); - - res = pthread_create (&tid->thread, &attr, (void *(*)(void *)) func, data); - - if (res != 0) { - g_set_error (error, G_THREAD_ERROR, G_THREAD_ERROR_AGAIN, - "Error creating thread: %s", g_strerror (res)); - g_slice_free (TestRTId, tid); - tid = NULL; - } - - return tid; -} - -static void -default_join (GstTaskPool * pool, gpointer id) -{ - TestRTId *tid = (TestRTId *) id; - - pthread_join (tid->thread, NULL); - - g_slice_free (TestRTId, tid); -} - -static void -test_rt_pool_class_init (TestRTPoolClass * klass) -{ - GstTaskPoolClass *gsttaskpool_class; - - gsttaskpool_class = (GstTaskPoolClass *) klass; - - gsttaskpool_class->prepare = default_prepare; - gsttaskpool_class->cleanup = default_cleanup; - gsttaskpool_class->push = default_push; - gsttaskpool_class->join = default_join; -} - -static void -test_rt_pool_init (TestRTPool * pool) -{ -} - -GstTaskPool * -test_rt_pool_new (void) -{ - GstTaskPool *pool; - - pool = g_object_new (TEST_TYPE_RT_POOL, NULL); - - return pool; -} -]]> - - - - The important function to implement when writing an taskpool is the - push function. The implementation should start a thread - that calls the given function. More involved implementations might - want to keep some threads around in a pool because creating and - destroying threads is not always the fastest operation. - - - In a next step we need to actually configure the custom taskpool when - the fakesrc needs it. For this we intercept the STREAM_STATUS messages - with a sync handler. - - - - - - - - Note that this program likely needs root permissions in order to - create real-time threads. When the thread can't be created, the - state change function will fail, which we catch in the application - above. - - - When there are multiple threads in the pipeline, you will receive - multiple STREAM_STATUS messages. You should use the owner of the - message, which is likely the pad or the element that starts the - thread, to figure out what the function of this thread is in the - context of the application. - - - - - - When would you want to force a thread? - - We have seen that threads are created by elements but it is also - possible to insert elements in the pipeline for the sole purpose of - forcing a new thread in the pipeline. - - - There are several reasons to force the use of threads. However, - for performance reasons, you never want to use one thread for every - element out there, since that will create some overhead. - Let's now list some situations where threads can be particularly - useful: - - - - - Data buffering, for example when dealing with network streams or - when recording data from a live stream such as a video or audio - card. Short hickups elsewhere in the pipeline will not cause data - loss. See also about network - buffering with queue2. - -
- Data buffering, from a networked source - - - - - -
- -
- - - Synchronizing output devices, e.g. when playing a stream containing - both video and audio data. By using threads for both outputs, they - will run independently and their synchronization will be better. - -
- Synchronizing audio and video sinks - - - - - -
-
-
- - - - Above, we've mentioned the queue element several times - now. A queue is the thread boundary element through which you can - force the use of threads. It does so by using a classic - provider/consumer model as learned in threading classes at - universities all around the world. By doing this, it acts both as a - means to make data throughput between threads threadsafe, and it can - also act as a buffer. Queues have several GObject - properties to be configured for specific uses. For example, you can set - lower and upper thresholds for the element. If there's less data than - the lower threshold (default: disabled), it will block output. If - there's more data than the upper threshold, it will block input or - (if configured to do so) drop data. - - - To use a queue (and therefore force the use of two distinct threads - in the pipeline), one can simply create a queue element - and put this in as part of the pipeline. &GStreamer; will take care of - all threading details internally. - -
- -
diff --git a/docs/manual/appendix-checklist.xml b/docs/manual/appendix-checklist.xml deleted file mode 100644 index 538ba5e84d..0000000000 --- a/docs/manual/appendix-checklist.xml +++ /dev/null @@ -1,204 +0,0 @@ - - Things to check when writing an application - - This chapter contains a fairly random selection of things that can be - useful to keep in mind when writing &GStreamer;-based applications. It's - up to you how much you're going to use the information provided here. - We will shortly discuss how to debug pipeline problems using &GStreamer; - applications. Also, we will touch upon how to acquire knowledge about - plugins and elements and how to test simple pipelines before building - applications around them. - - - - Good programming habits - - - - Always add a GstBus handler to your - pipeline. Always report errors in your application, and try - to do something with warnings and information messages, too. - - - - - Always check return values of &GStreamer; functions. Especially, - check return values of gst_element_link () - and gst_element_set_state (). - - - - - Dereference return values of all functions returning a non-base - type, such as gst_element_get_pad (). Also, - always free non-const string returns, such as - gst_object_get_name (). - - - - - Always use your pipeline object to keep track of the current state - of your pipeline. Don't keep private variables in your application. - Also, don't update your user interface if a user presses the - play button. Instead, listen for the - state-changed message on the - GstBus and only update the user interface - whenever this message is received. - - - - - Report all bugs that you find in &GStreamer; bugzilla at - http://bugzilla.gnome.org/. - - - - - - - Debugging - - Applications can make use of the extensive &GStreamer; debugging system - to debug pipeline problems. Elements will write output to this system - to log what they're doing. It's not used for error reporting, but it - is very useful for tracking what an element is doing exactly, which - can come in handy when debugging application issues (such as failing - seeks, out-of-sync media, etc.). - - - Most &GStreamer;-based applications accept the commandline option - and related family members. The - list consists of a comma-separated list of category/level pairs, - which can set the debugging level for a specific debugging category. - For example, would turn - on debugging for the Ogg demuxer element. You can use wildcards as - well. A debugging level of 0 will turn off all debugging, and a level - of 9 will turn on all debugging. Intermediate values only turn on - some debugging (based on message severity; 2, for example, will only - display errors and warnings). Here's a list of all available options: - - - - - - will print available debug - categories and exit. - - - - - - will set the default debug level (which can range from 0 (no - output) to 9 (everything)). - - - - - - takes a comma-separated list of category_name:level pairs to - set specific levels for the individual categories. Example: - . Alternatively, you - can also set the GST_DEBUG environment - variable, which has the same effect. - - - - - will disable color debugging. - You can also set the GST_DEBUG_NO_COLOR environment variable to 1 - if you want to disable colored debug output permanently. Note that - if you are disabling color purely to avoid messing up your pager - output, try using less -R. - - - - - - will change debug log coloring mode. MODE - can be one of the following: , - , , - , . - You can also set the GST_DEBUG_COLOR_MODE environment variable - if you want to change colored debug output permanently. Note that - if you are disabling color purely to avoid messing up your pager - output, try using less -R. - - - - - disables debugging altogether. - - - - - enables printout of errors while - loading &GStreamer; plugins. - - - - - - - - Conversion plugins - - &GStreamer; contains a bunch of conversion plugins that most - applications will find useful. Specifically, those are videoscalers - (videoscale), colorspace convertors (videoconvert), audio format - convertors and channel resamplers (audioconvert) and audio samplerate - convertors (audioresample). Those convertors don't do anything when not - required, they will act in passthrough mode. They will activate when - the hardware doesn't support a specific request, though. All - applications are recommended to use those elements. - - - - - Utility applications provided with &GStreamer; - - &GStreamer; comes with a default set of command-line utilities that - can help in application development. We will discuss only - gst-launch and gst-inspect here. - - - - <command>gst-launch</command> - - gst-launch is a simple script-like commandline - application that can be used to test pipelines. For example, the - command gst-launch audiotestsrc ! audioconvert ! - audio/x-raw,channels=2 ! alsasink will run - a pipeline which generates a sine-wave audio stream and plays it - to your ALSA audio card. gst-launch also allows - the use of threads (will be used automatically as required or as queue - elements are inserted in the pipeline) and bins (using brackets, so - ( and )). You can use dots to imply - padnames on elements, - or even omit the padname to automatically select a pad. Using - all this, the pipeline - gst-launch filesrc location=file.ogg ! oggdemux name=d - d. ! queue ! theoradec ! videoconvert ! xvimagesink - d. ! queue ! vorbisdec ! audioconvert ! audioresample ! alsasink - will play an Ogg file - containing a Theora video-stream and a Vorbis audio-stream. You can - also use autopluggers such as decodebin on the commandline. See the - manual page of gst-launch for more information. - - - - - <command>gst-inspect</command> - - gst-inspect can be used to inspect all properties, - signals, dynamic parameters and the object hierarchy of an element. - This can be very useful to see which GObject - properties or which signals (and using what arguments) an element - supports. Run gst-inspect fakesrc to get an idea - of what it does. See the manual page of gst-inspect - for more information. - - - - - diff --git a/docs/manual/appendix-compiling.xml b/docs/manual/appendix-compiling.xml deleted file mode 100644 index 2d83c989be..0000000000 --- a/docs/manual/appendix-compiling.xml +++ /dev/null @@ -1,76 +0,0 @@ - - Compiling - - This section talks about the different things you can do when building - and shipping your applications and plugins. - - - - Embedding static elements in your application - - The Plugin - Writer's Guide describes in great detail how to write elements - for the &GStreamer; framework. In this section, we will solely discuss - how to embed such elements statically in your application. This can be - useful for application-specific elements that have no use elsewhere in - &GStreamer;. - - - Dynamically loaded plugins contain a structure that's defined using - GST_PLUGIN_DEFINE (). This structure is loaded - when the plugin is loaded by the &GStreamer; core. The structure - contains an initialization function (usually called - plugin_init) that will be called right after that. - It's purpose is to register the elements provided by the plugin with - the &GStreamer; framework. - If you want to embed elements directly in - your application, the only thing you need to do is to replace - GST_PLUGIN_DEFINE () with a call to - gst_plugin_register_static (). As soon as you - call gst_plugin_register_static (), the elements - will from then on be available like any other element, without them - having to be dynamically loadable libraries. In the example below, you - would be able to call gst_element_factory_make - ("my-element-name", "some-name") to create an instance of the - element. - - - - - - - diff --git a/docs/manual/appendix-integration.xml b/docs/manual/appendix-integration.xml deleted file mode 100644 index 64b9ed81f0..0000000000 --- a/docs/manual/appendix-integration.xml +++ /dev/null @@ -1,331 +0,0 @@ - - Integration - - &GStreamer; tries to integrate closely with operating systems (such - as Linux and UNIX-like operating systems, OS X or Windows) and desktop - environments (such as GNOME or KDE). In this chapter, we'll mention - some specific techniques to integrate your application with your - operating system or desktop environment of choice. - - - - - - - - Linux and UNIX-like operating systems - - &GStreamer; provides a basic set of elements that are useful when - integrating with Linux or a UNIX-like operating system. - - - - - For audio input and output, &GStreamer; provides input and - output elements for several audio subsystems. Amongst others, - &GStreamer; includes elements for ALSA (alsasrc, - alsasink), OSS (osssrc, osssink) Pulesaudio (pulsesrc, pulsesink) - and Sun audio (sunaudiosrc, sunaudiomixer, sunaudiosink). - - - - - For video input, &GStreamer; contains source elements for - Video4linux2 (v4l2src, v4l2element, v4l2sink). - - - - - For video output, &GStreamer; provides elements for output - to X-windows (ximagesink), Xv-windows (xvimagesink; for - hardware-accelerated video), direct-framebuffer (dfbimagesink) - and openGL image contexts (glsink). - - - - - - - - - - - GNOME desktop - - &GStreamer; has been the media backend of the GNOME desktop since GNOME-2.2 - onwards. Nowadays, a whole bunch of GNOME applications make use of - &GStreamer; for media-processing, including (but not limited to) - Rhythmbox, - Videos - and Sound - Juicer. - - - Most of these GNOME applications make use of some specific techniques - to integrate as closely as possible with the GNOME desktop: - - - - - GNOME applications usually call gtk_init () - to parse command-line options and initialize GTK. &GStreamer; - applications would normally call gst_init () - to do the same for GStreamer. - This would mean that only one of the two can parse command-line - options. To work around this issue, &GStreamer; can provide a - GLib GOptionGroup which can be passed to - gnome_program_init (). The following - example requires GTK 2.6 or newer (previous GTK versions - do not support command line parsing via GOption yet) - - -#include <gtk/gtk.h> -#include <gst/gst.h> - -static gchar **cmd_filenames = NULL; - -static GOptionEntries cmd_options[] = { - /* here you can add command line options for your application. Check - * the GOption section in the GLib API reference for a more elaborate - * example of how to add your own command line options here */ - - /* at the end we have a special option that collects all remaining - * command line arguments (like filenames) for us. If you don't - * need this, you can safely remove it */ - { G_OPTION_REMAINING, 0, 0, G_OPTION_ARG_FILENAME_ARRAY, &cmd_filenames, - "Special option that collects any remaining arguments for us" }, - - /* mark the end of the options array with a NULL option */ - { NULL, } -}; - -/* this should usually be defined in your config.h */ -#define VERSION "0.0.1" - -gint -main (gint argc, gchar **argv) -{ - GOptionContext *context; - GOptionGroup *gstreamer_group, *gtk_group; - GError *err = NULL; - - context = g_option_context_new ("gtk-demo-app"); - - /* get command line options from GStreamer and add them to the group */ - gstreamer_group = gst_init_get_option_group (); - g_option_context_add_group (context, gstreamer_group); - gtk_group = gtk_get_option_group (TRUE); - g_option_context_add_group (context, gtk_group); - - /* add our own options. If you are using gettext for translation of your - * strings, use GETTEXT_PACKAGE here instead of NULL */ - g_option_context_add_main_entries (context, cmd_options, NULL); - - /* now parse the commandline options, note that this already - * calls gtk_init() and gst_init() */ - if (!g_option_context_parse (ctx, &argc, &argv, &err)) { - g_print ("Error initializing: %s\n", err->message); - g_clear_error (&err); - g_option_context_free (ctx); - exit (1); - } - g_option_context_free (ctx); - - /* any filenames we got passed on the command line? parse them! */ - if (cmd_filenames != NULL) { - guint i, num; - - num = g_strv_length (cmd_filenames); - for (i = 0; i < num; ++i) { - /* do something with the filename ... */ - g_print ("Adding to play queue: %s\n", cmd_filenames[i]); - } - - g_strfreev (cmd_filenames); - cmd_filenames = NULL; - } - -[..] - -} - - - - - GNOME uses Pulseaudio for audio, use the pulsesrc and - pulsesink elements to have access to all the features. - - - - - &GStreamer; provides data input/output elements for use with the - GIO VFS system. These elements are called giosrc - and giosink. - The deprecated GNOME-VFS system is supported too but shouldn't be - used for any new applications. - - - - - - - - - - - - - KDE desktop - - &GStreamer; has been proposed for inclusion in KDE-4.0. Currently, - &GStreamer; is included as an optional component, and it's used by - several KDE applications, including AmaroK, - KMPlayer and - Kaffeine. - - - Although not yet as complete as the GNOME integration bits, there - are already some KDE integration specifics available. This list will - probably grow as &GStreamer; starts to be used in KDE-4.0: - - - - - AmaroK contains a kiosrc element, which is a source element that - integrates with the KDE VFS subsystem KIO. - - - - - - - - - - - OS X - - &GStreamer; provides native video and audio output elements for OS X. - It builds using the standard development tools for OS X. - - - - - - - - - Windows - - - -Note: this section is out of date. GStreamer-1.0 has much better -support for win32 than previous versions though and should usually compile -and work out-of-the-box both using MSYS/MinGW or Microsoft compilers. The -GStreamer web site and the -mailing list -archives are a good place to check the latest win32-related news. - - - - - - &GStreamer; builds using Microsoft Visual C .NET 2003 and using Cygwin. - - - - Building <application>GStreamer</application> under Win32 - -There are different makefiles that can be used to build GStreamer with the usual Microsoft -compiling tools. - -The Makefile is meant to be used with the GNU make program and the free -version of the Microsoft compiler (http://msdn.microsoft.com/visualc/vctoolkit2003/). You also -have to modify your system environment variables to use it from the command-line. You will also -need a working Platform SDK for Windows that is available for free from Microsoft. - -The projects/makefiles will generate automatically some source files needed to compile -GStreamer. That requires that you have installed on your system some GNU tools and that they are -available in your system PATH. - -The GStreamer project depends on other libraries, namely : - -GLib -libxml2 -libintl -libiconv - - -Work is being done to provide pre-compiled GStreamer-1.0 libraries as -a packages for win32. Check the -GStreamer web site and check our -mailing list - for the latest developments in this respect. - - -Notes - -GNU tools needed that you can find on http://gnuwin32.sourceforge.net/ - -GNU flex (tested with 2.5.4) -GNU bison (tested with 1.35) - - -and http://www.mingw.org/ - -GNU make (tested with 3.80) - - -the generated files from the -auto makefiles will be available soon separately on the net -for convenience (people who don't want to install GNU tools). - - - - -Installation on the system - -FIXME: This section needs be updated for GStreamer-1.0. - - - - - - - - diff --git a/docs/manual/appendix-licensing.xml b/docs/manual/appendix-licensing.xml deleted file mode 100644 index 128a6ccbcb..0000000000 --- a/docs/manual/appendix-licensing.xml +++ /dev/null @@ -1,101 +0,0 @@ - -Licensing advisory - - How to license the applications you build with <application>GStreamer</application> - -The licensing of GStreamer is no different from a lot of other libraries -out there like GTK+ or glibc: we use the LGPL. What complicates things -with regards to GStreamer is its plugin-based design and the heavily -patented and proprietary nature of many multimedia codecs. While patents -on software are currently only allowed in a small minority of world -countries (the US and Australia being the most important of those), the -problem is that due to the central place the US hold in the world economy -and the computing industry, software patents are hard to ignore wherever -you are. - -Due to this situation, many companies, including major GNU/Linux -distributions, get trapped in a situation where they either get bad -reviews due to lacking out-of-the-box media playback capabilities (and -attempts to educate the reviewers have met with little success so far), or -go against their own - and the free software movement's - wish to avoid -proprietary software. Due to competitive pressure, most choose to add some -support. Doing that through pure free software solutions would have them -risk heavy litigation and punishment from patent owners. So when the -decision is made to include support for patented codecs, it leaves them -the choice of either using special proprietary applications, or try to -integrate the support for these codecs through proprietary plugins into -the multimedia infrastructure provided by GStreamer. Faced with one of -these two evils the GStreamer community of course prefer the second option. - - -The problem which arises is that most free software and open source -applications developed use the GPL as their license. While this is -generally a good thing, it creates a dilemma for people who want to put -together a distribution. The dilemma they face is that if they include -proprietary plugins in GStreamer to support patented formats in a way that -is legal for them, they do risk running afoul of the GPL license of the -applications. We have gotten some conflicting reports from lawyers on -whether this is actually a problem, but the official stance of the FSF is -that it is a problem. We view the FSF as an authority on this matter, so -we are inclined to follow their interpretation of the GPL license. - - -So what does this mean for you as an application developer? Well, it means -you have to make an active decision on whether you want your application -to be used together with proprietary plugins or not. What you decide here -will also influence the chances of commercial distributions and Unix -vendors shipping your application. The GStreamer community suggest you -license your software using a license that will allow proprietary plugins -to be bundled with GStreamer and your applications, in order to make sure -that as many vendors as possible go with GStreamer instead of less free -solutions. This in turn we hope and think will let GStreamer be a vehicle -for wider use of free formats like the Xiph.org formats. - - -If you do decide that you want to allow for non-free plugins to be used -with your application you have a variety of choices. One of the simplest -is using licenses like LGPL, MPL or BSD for your application instead of -the GPL. Or you can add an exception clause to your GPL license stating -that you except GStreamer plugins from the obligations of the GPL. - - -A good example of such a GPL exception clause would be, using the -Totem video player project as an example: -The authors of the Totem video player project hereby grants permission -for non-GPL-compatible GStreamer plugins to be used and distributed -together with GStreamer and Totem. This permission goes above and beyond -the permissions granted by the GPL license Totem is covered by. - - -Our suggestion among these choices is to use the LGPL license, as it is -what resembles the GPL most and it makes it a good licensing fit with the -major GNU/Linux desktop projects like GNOME and KDE. It also allows you to -share code more openly with projects that have compatible licenses. -Obviously, pure GPL code without the above-mentioned clause is not usable -in your application as such. By choosing the LGPL, there is no need for an -exception clause and thus code can be shared more freely. - - -I have above outlined the practical reasons for why the GStreamer -community suggests you allow non-free plugins to be used with your -applications. We feel that in the multimedia arena, the free software -community is still not strong enough to set the agenda and that blocking -non-free plugins to be used in our infrastructure hurts us more than it -hurts the patent owners and their ilk. - - -This view is not shared by everyone. The Free Software Foundation urges -you to use an unmodified GPL for your applications, so as to push back -against the temptation to use non-free plug-ins. They say that since not -everyone else has the strength to reject them because they are unethical, -they ask your help to give them a legal reason to do so. - - -This advisory is part of a bigger advisory with a FAQ which you can find -on the GStreamer website - - - - - - diff --git a/docs/manual/appendix-porting.xml b/docs/manual/appendix-porting.xml deleted file mode 100644 index 6a8eb097ea..0000000000 --- a/docs/manual/appendix-porting.xml +++ /dev/null @@ -1,316 +0,0 @@ - - Porting 0.8 applications to 0.10 - - This section of the appendix will discuss shortly what changes to - applications will be needed to quickly and conveniently port most - applications from &GStreamer;-0.8 to &GStreamer;-0.10, with references - to the relevant sections in this Application Development Manual - where needed. With this list, it should be possible to port simple - applications to &GStreamer;-0.10 in less than a day. - - - - List of changes - - - - Most functions returning an object or an object property have - been changed to return its own reference rather than a constant - reference of the one owned by the object itself. The reason for - this change is primarily thread safety. This means, effectively, - that return values of functions such as - gst_element_get_pad (), - gst_pad_get_name () and many more like these - have to be free'ed or unreferenced after use. Check the API - references of each function to know for sure whether return - values should be free'ed or not. It is important that all objects - derived from GstObject are ref'ed/unref'ed using gst_object_ref() - and gst_object_unref() respectively (instead of g_object_ref/unref). - - - - - Applications should no longer use signal handlers to be notified - of errors, end-of-stream and other similar pipeline events. - Instead, they should use the GstBus, which - has been discussed in . The bus will - take care that the messages will be delivered in the context of a - main loop, which is almost certainly the application's main thread. - The big advantage of this is that applications no longer need to - be thread-aware; they don't need to use g_idle_add - () in the signal handler and do the actual real work - in the idle-callback. &GStreamer; now does all that internally. - - - - - Related to this, gst_bin_iterate () has been - removed. Pipelines will iterate in their own thread, and applications - can simply run a GMainLoop (or call the - mainloop of their UI toolkit, such as gtk_main - ()). - - - - - State changes can be delayed (ASYNC). Due to the new fully threaded - nature of GStreamer-0.10, state changes are not always immediate, - in particular changes including the transition from READY to PAUSED - state. This means two things in the context of porting applications: - first of all, it is no longer always possible to do - gst_element_set_state () and check for a return - value of GST_STATE_CHANGE_SUCCESS, as the state change might be - delayed (ASYNC) and the result will not be known until later. You - should still check for GST_STATE_CHANGE_FAILURE right away, it is - just no longer possible to assume that everything that is not SUCCESS - means failure. Secondly, state changes might not be immediate, so - your code needs to take that into account. You can wait for a state - change to complete if you use GST_CLOCK_TIME_NONE as timeout interval - with gst_element_get_state (). - - - - - In 0.8, events and queries had to manually be sent to sinks in - pipelines (unless you were using playbin). This is no longer - the case in 0.10. In 0.10, queries and events can be sent to - toplevel pipelines, and the pipeline will do the dispatching - internally for you. This means less bookkeeping in your - application. For a short code example, see . Related, seeking is now - threadsafe, and your video output will show the new video - position's frame while seeking, providing a better user - experience. - - - - - The GstThread object has been removed. - Applications can now simply put elements in a pipeline with - optionally some queue elements in between for - buffering, and &GStreamer; will take care of creating threads - internally. It is still possible to have parts of a pipeline - run in different threads than others, by using the - queue element. See - for details. - - - - - Filtered caps -> capsfilter element (the pipeline syntax for - gst-launch has not changed though). - - - - - libgstgconf-0.10.la does not exist. Use the - gconfvideosink and gconfaudiosink - elements instead, which will do live-updates and require no library - linking. - - - - - The new-pad and state-change signals on - GstElement were renamed to - pad-added and state-changed. - - - - - gst_init_get_popt_table () has been removed - in favour of the new GOption command line option API that was - added to GLib 2.6. gst_init_get_option_group () - is the new GOption-based equivalent to - gst_init_get_ptop_table (). - - - - - - - Porting 0.10 applications to 1.0 - - This section outlines some of the changes necessary to port - applications from &GStreamer;-0.10 to &GStreamer;-1.0. For a - comprehensive and up-to-date list, see the separate - Porting to 1.0 document. - - - It should be possible to port simple applications to - &GStreamer;-1.0 in less than a day. - - - - List of changes - - - - All deprecated methods were removed. Recompile against 0.10 with - GST_DISABLE_DEPRECATED defined (such as by adding - -DGST_DISABLE_DEPRECATED to the compiler flags) and fix issues - before attempting to port to 1.0. - - - - - "playbin2" has been renamed to "playbin", with similar API - - - - - "decodebin2" has been renamed to "decodebin", with similar API. Note - that there is no longer a "new-decoded-pad" signal, just use GstElement's - "pad-added" signal instead (but don't forget to remove the 'gboolean last' - argument from your old signal callback functino signature). - - - - - the names of some "formatted" pad templates has been changed from e.g. - "src%d" to "src%u" or "src_%u" or similar, since we don't want to see - negative numbers in pad names. This mostly affects applications that - create request pads from elements. - - - - - some elements that used to have a single dynamic source pad have a - source pad now. Example: wavparse, id3demux, iceydemux, apedemux. - (This does not affect applications using decodebin or playbin). - - - - - playbin now proxies the GstVideoOverlay (former GstXOverlay) interface, - so most applications can just remove the sync bus handler where they - would set the window ID, and instead just set the window ID on playbin - from the application thread before starting playback. - - - playbin also proxies the GstColorBalance and GstNavigation interfaces, - so applications that use this don't need to go fishing for elements - that may implement those any more, but can just use on playbin - unconditionally. - - - - - multifdsink, tcpclientsink, tcpclientsrc, tcpserversrc the protocol property - is removed, use gdppay and gdpdepay. - - - - - XML serialization was removed. - - - - - Probes and pad blocking was merged into new pad probes. - - - - - Position, duration and convert functions no longer use an inout parameter - for the destination format. - - - - - Video and audio caps were simplified. audio/x-raw-int and audio/x-raw-float - are now all under the audio/x-raw media type. Similarly, video/x-raw-rgb - and video/x-raw-yuv are now video/x-raw. - - - - - ffmpegcolorspace was removed and replaced with videoconvert. - - - - - GstMixerInterface / GstTunerInterface were removed without replacement. - - - - - The GstXOverlay interface was renamed to GstVideoOverlay, and now part - of the video library in gst-plugins-base, as the interfaces library - no longer exists. - - - The name of the GstXOverlay "prepare-xwindow-id" message has changed - to "prepare-window-handle" (and GstXOverlay has been renamed to - GstVideoOverlay). Code that checks for the string directly should be - changed to use gst_is_video_overlay_prepare_window_handle_message(message) - instead. - - - - - The GstPropertyProbe interface was removed. There is no replacement - for it in GStreamer 1.0.x and 1.2.x, but since version 1.4 there is - a more featureful replacement for device discovery and feature - querying provided by GstDeviceMonitor, GstDevice, and friends. See - the - "GStreamer Device Discovery and Device Probing" documentation. - - - - - gst_uri_handler_get_uri() and the get_uri vfunc now return a copy of - the URI string - - - gst_uri_handler_set_uri() and the set_uri vfunc now take an additional - GError argument so the handler can notify the caller why it didn't - accept a particular URI. - - - gst_uri_handler_set_uri() now checks if the protocol of the URI passed - is one of the protocols advertised by the uri handler, so set_uri vfunc - implementations no longer need to check that as well. - - - - - GstTagList is now an opaque mini object instead of being typedefed to a - GstStructure. While it was previously okay (and in some cases required because of - missing taglist API) to cast a GstTagList to a GstStructure or use - gst_structure_* API on taglists, you can no longer do that. Doing so will - cause crashes. - - - Also, tag lists are refcounted now, and can therefore not be freely - modified any longer. Make sure to call gst_tag_list_make_writable (taglist) - before adding, removing or changing tags in the taglist. - - - GST_TAG_IMAGE, GST_TAG_PREVIEW_IMAGE, GST_TAG_ATTACHMENT: many tags that - used to be of type GstBuffer are now of type GstSample (which is basically - a struct containing a buffer alongside caps and some other info). - - - - - GstController has now been merged into GstObject. It does not exists as an - individual object anymore. In addition core contains a GstControlSource base - class and the GstControlBinding. The actual control sources are in the controller - library as before. The 2nd big change is that control sources generate - a sequence of gdouble values and those are mapped to the property type and - value range by GstControlBindings. - - - The whole gst_controller_* API is gone and now available in simplified form - under gst_object_*. ControlSources are now attached via GstControlBinding - to properties. There are no GValue arguments used anymore when programming - control sources. - - - - - diff --git a/docs/manual/appendix-programs.xml b/docs/manual/appendix-programs.xml deleted file mode 100644 index 72eab54562..0000000000 --- a/docs/manual/appendix-programs.xml +++ /dev/null @@ -1,342 +0,0 @@ - - Programs - - - - - <command>gst-launch</command> - - This is a tool that will construct pipelines based on a command-line - syntax. - - - A simple commandline looks like: - - -gst-launch filesrc location=hello.mp3 ! mad ! audioresample ! osssink - - - A more complex pipeline looks like: - - -gst-launch filesrc location=redpill.vob ! dvddemux name=demux \ - demux.audio_00 ! queue ! a52dec ! audioconvert ! audioresample ! osssink \ - demux.video_00 ! queue ! mpeg2dec ! videoconvert ! xvimagesink - - - - - You can also use the parser in you own - code. GStreamer provides a function - gst_parse_launch () that you can use to construct a pipeline. - The following program lets you create an MP3 pipeline using the - gst_parse_launch () function: - - -#include <gst/gst.h> - -int -main (int argc, char *argv[]) -{ - GstElement *pipeline; - GstElement *filesrc; - GstMessage *msg; - GstBus *bus; - GError *error = NULL; - - gst_init (&argc, &argv); - - if (argc != 2) { - g_print ("usage: %s <filename>\n", argv[0]); - return -1; - } - - pipeline = gst_parse_launch ("filesrc name=my_filesrc ! mad ! osssink", &error); - if (!pipeline) { - g_print ("Parse error: %s\n", error->message); - exit (1); - } - - filesrc = gst_bin_get_by_name (GST_BIN (pipeline), "my_filesrc"); - g_object_set (filesrc, "location", argv[1], NULL); - g_object_unref (filesrc); - - gst_element_set_state (pipeline, GST_STATE_PLAYING); - - bus = gst_element_get_bus (pipeline); - - /* wait until we either get an EOS or an ERROR message. Note that in a real - * program you would probably not use gst_bus_poll(), but rather set up an - * async signal watch on the bus and run a main loop and connect to the - * bus's signals to catch certain messages or all messages */ - msg = gst_bus_poll (bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1); - - switch (GST_MESSAGE_TYPE (msg)) { - case GST_MESSAGE_EOS: { - g_print ("EOS\n"); - break; - } - case GST_MESSAGE_ERROR: { - GError *err = NULL; /* error to show to users */ - gchar *dbg = NULL; /* additional debug string for developers */ - - gst_message_parse_error (msg, &err, &dbg); - if (err) { - g_printerr ("ERROR: %s\n", err->message); - g_error_free (err); - } - if (dbg) { - g_printerr ("[Debug details: %s]\n", dbg); - g_free (dbg); - } - } - default: - g_printerr ("Unexpected message of type %d", GST_MESSAGE_TYPE (msg)); - break; - } - gst_message_unref (msg); - - gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (pipeline); - gst_object_unref (bus); - - return 0; -} - - - Note how we can retrieve the filesrc element from the constructed bin using the - element name. - - - Grammar Reference - - The gst-launch syntax is processed by a flex/bison parser. This section - is intended to provide a full specification of the grammar; any deviations from this - specification is considered a bug. - - - Elements - - ... mad ... - - - A bare identifier (a string beginning with a letter and containing - only letters, numbers, dashes, underscores, percent signs, or colons) - will create an element from a given element factory. In this example, - an instance of the "mad" MP3 decoding plugin will be created. - - - - Links - - ... !sink ... - - - An exclamation point, optionally having a qualified pad name (an the name of the pad, - optionally preceded by the name of the element) on both sides, will link two pads. If - the source pad is not specified, a source pad from the immediately preceding element - will be automatically chosen. If the sink pad is not specified, a sink pad from the next - element to be constructed will be chosen. An attempt will be made to find compatible - pads. Pad names may be preceded by an element name, as in - my_element_name.sink_pad. - - - - Properties - - ... location="http://gstreamer.net" ... - - - The name of a property, optionally qualified with an element name, and a value, - separated by an equals sign, will set a property on an element. If the element is not - specified, the previous element is assumed. Strings can optionally be enclosed in - quotation marks. Characters in strings may be escaped with the backtick - (\). If the right-hand side is all digits, it is considered to be an - integer. If it is all digits and a decimal point, it is a double. If it is "true", - "false", "TRUE", or "FALSE" it is considered to be boolean. Otherwise, it is parsed as a - string. The type of the property is determined later on in the parsing, and the value is - converted to the target type. This conversion is not guaranteed to work, it relies on - the g_value_convert routines. No error message will be displayed on an invalid - conversion, due to limitations in the value convert API. - - - - Bins, Threads, and Pipelines - - ( ... ) - - - A pipeline description between parentheses is placed into a bin. The open paren may be - preceded by a type name, as in jackbin.( ... ) to make - a bin of a specified type. Square brackets make pipelines, and curly braces make - threads. The default toplevel bin type is a pipeline, although putting the whole - description within parentheses or braces can override this default. - - - - - - - <command>gst-inspect</command> - - This is a tool to query a plugin or an element about its properties. - - - To query the information about the element mad, you would specify: - - - -gst-inspect mad - - - - Below is the output of a query for the osssink element: - - - -, Wim Taymans - -Plugin Details: - Name: ossaudio - Description: OSS (Open Sound System) support for GStreamer - Filename: /home/wim/gst/head/gst-plugins-good/sys/oss/.libs/libgstossaudio.so - Version: 1.0.0.1 - License: LGPL - Source module: gst-plugins-good - Source release date: 2012-09-25 12:52 (UTC) - Binary package: GStreamer Good Plug-ins git - Origin URL: Unknown package origin - -GObject - +----GInitiallyUnowned - +----GstObject - +----GstElement - +----GstBaseSink - +----GstAudioBaseSink - +----GstAudioSink - +----GstOssSink - -Pad Templates: - SINK template: 'sink' - Availability: Always - Capabilities: - audio/x-raw - format: { S16LE, U16LE, S8, U8 } - layout: interleaved - rate: [ 1, 2147483647 ] - channels: 1 - audio/x-raw - format: { S16LE, U16LE, S8, U8 } - layout: interleaved - rate: [ 1, 2147483647 ] - channels: 2 - channel-mask: 0x0000000000000003 - - -Element Flags: - no flags set - -Element Implementation: - Has change_state() function: gst_audio_base_sink_change_state - -Clocking Interaction: - element is supposed to provide a clock but returned NULL - -Element has no indexing capabilities. -Element has no URI handling capabilities. - -Pads: - SINK: 'sink' - Implementation: - Has chainfunc(): gst_base_sink_chain - Has custom eventfunc(): gst_base_sink_event - Has custom queryfunc(): gst_base_sink_sink_query - Has custom iterintlinkfunc(): gst_pad_iterate_internal_links_default - Pad Template: 'sink' - -Element Properties: - name : The name of the object - flags: readable, writable - String. Default: "osssink0" - parent : The parent of the object - flags: readable, writable - Object of type "GstObject" - sync : Sync on the clock - flags: readable, writable - Boolean. Default: true - max-lateness : Maximum number of nanoseconds that a buffer can be late before it is dropped (-1 unlimited) - flags: readable, writable - Integer64. Range: -1 - 9223372036854775807 Default: -1 - qos : Generate Quality-of-Service events upstream - flags: readable, writable - Boolean. Default: false - async : Go asynchronously to PAUSED - flags: readable, writable - Boolean. Default: true - ts-offset : Timestamp offset in nanoseconds - flags: readable, writable - Integer64. Range: -9223372036854775808 - 9223372036854775807 Default: 0 - enable-last-sample : Enable the last-sample property - flags: readable, writable - Boolean. Default: false - last-sample : The last sample received in the sink - flags: readable - Boxed pointer of type "GstSample" - blocksize : Size in bytes to pull per buffer (0 = default) - flags: readable, writable - Unsigned Integer. Range: 0 - 4294967295 Default: 4096 - render-delay : Additional render delay of the sink in nanoseconds - flags: readable, writable - Unsigned Integer64. Range: 0 - 18446744073709551615 Default: 0 - throttle-time : The time to keep between rendered buffers - flags: readable, writable - Unsigned Integer64. Range: 0 - 18446744073709551615 Default: 0 - buffer-time : Size of audio buffer in microseconds, this is the minimum latency that the sink reports - flags: readable, writable - Integer64. Range: 1 - 9223372036854775807 Default: 200000 - latency-time : The minimum amount of data to write in each iteration in microseconds - flags: readable, writable - Integer64. Range: 1 - 9223372036854775807 Default: 10000 - provide-clock : Provide a clock to be used as the global pipeline clock - flags: readable, writable - Boolean. Default: true - slave-method : Algorithm to use to match the rate of the masterclock - flags: readable, writable - Enum "GstAudioBaseSinkSlaveMethod" Default: 1, "skew" - (0): resample - GST_AUDIO_BASE_SINK_SLAVE_RESAMPLE - (1): skew - GST_AUDIO_BASE_SINK_SLAVE_SKEW - (2): none - GST_AUDIO_BASE_SINK_SLAVE_NONE - can-activate-pull : Allow pull-based scheduling - flags: readable, writable - Boolean. Default: false - alignment-threshold : Timestamp alignment threshold in nanoseconds - flags: readable, writable - Unsigned Integer64. Range: 1 - 18446744073709551614 Default: 40000000 - drift-tolerance : Tolerance for clock drift in microseconds - flags: readable, writable - Integer64. Range: 1 - 9223372036854775807 Default: 40000 - discont-wait : Window of time in nanoseconds to wait before creating a discontinuity - flags: readable, writable - Unsigned Integer64. Range: 0 - 18446744073709551614 Default: 1000000000 - device : OSS device (usually /dev/dspN) - flags: readable, writable - String. Default: "/dev/dsp" -]]> - - - - To query the information about a plugin, you would do: - - - -gst-inspect gstelements - - - - diff --git a/docs/manual/appendix-quotes.xml b/docs/manual/appendix-quotes.xml deleted file mode 100644 index e0656aff46..0000000000 --- a/docs/manual/appendix-quotes.xml +++ /dev/null @@ -1,350 +0,0 @@ - - Quotes from the Developers - - As well as being a cool piece of software, - GStreamer is a lively project, with - developers from around the globe very actively contributing. - We often hang out on the #gstreamer IRC channel on - irc.freenode.net: the following are a selection of amusing - No guarantee of sense of humour compatibility is given. - quotes from our conversations. - - - - - - 6 Mar 2006 - - -When I opened my eyes I was in a court room. There were masters McIlroy and -Thompson sitting in the jury and master Kernighan too. There were the GStreamer -developers standing in the defendant's place, accused of violating several laws -of Unix philosophy and customer lock-down via running on a proprietary -pipeline, different from that of the Unix systems. I heard Eric Raymond -whispering "got to add this case to my book. - -behdad's blog - - - - - 22 May 2007 - -<__tim> -Uraeus: amusing, isn't it? - -<Uraeus> -__tim: I wrote that :) - -<__tim> -Uraeus: of course you did; your refusal to surrender to the oppressive regime -of the third-person-singular-rule is so unique in its persistence that it's -hard to miss :) - - - - - - - 12 Sep 2005 - -<wingo> -we just need to get rid of that mmap stuff - -<wingo> -i think gnomevfssrc is faster for files even - -<BBB> -wingo, no - -<BBB> -and no - -<wingo> -good points ronald - - - - - - 23 Jun 2005 - - -* wingo back -* thomasvs back ---- You are now known as everybody -* everybody back back -<everybody> now break it down ---- You are now known as thomasvs -* bilboed back ---- bilboed is now known as john-sebastian -* john-sebastian bach ---- john-sebastian is now known as bilboed ---- You are now known as scratch_my -* scratch_my back ---- bilboed is now known as Illbe ---- You are now known as thomasvs -* Illbe back ---- Illbe is now known as bilboed - - - - - 20 Apr 2005 - - - thomas: -jrb, somehow his screenshotsrc grabs whatever X is showing and makes it -available as a stream of frames - - - jrb: -thomas: so, is the point that the screenshooter takes a video? -but won't the dialog be in the video? oh, nevermind. I'll just send mail... - - - thomas: -jrb, well, it would shoot first and ask questions later - - - - - - 2 Nov 2004 - - - zaheerm: -wtay: unfair u fixed the bug i was using as a feature! - - - - - - 14 Oct 2004 - - - * zaheerm -wonders how he can break gstreamer today :) - - - ensonic: -zaheerm, spider is always a good starting point - - - - - - 14 Jun 2004 - - - teuf: ok, things work much better when I don't write incredibly stupid and buggy code - - - thaytan: I find that too - - - - - 23 Nov 2003 - - - Uraeus: ah yes, the sleeping part, my mind - is not multitasking so I was still thinking about exercise - - - dolphy: Uraeus: your mind is multitasking - - - dolphy: Uraeus: you just miss low latency patches - - - - - - 14 Sep 2002 - - - --- wingo-party is now known as - wingo - - - * wingo holds head - - - - - - 4 Jun 2001 - - taaz: you witchdoctors and your voodoo mpeg2 black magic... - omega_: um. I count three, no four different cults there <g> - ajmitch: hehe - omega_: witchdoctors, voodoo, black magic, - omega_: and mpeg - - - - - 16 Feb 2001 - - - wtay: - I shipped a few commerical products to >40000 people now but - GStreamer is way more exciting... - - - - - 16 Feb 2001 - - - * - tool-man - is a gstreamer groupie - - - - - 14 Jan 2001 - - - Omega: - did you run ldconfig? maybe it talks to init? - - - wtay: - not sure, don't think so... - I did run gstreamer-register though :-) - - - Omega: - ah, that did it then ;-) - - - wtay: - right - - - Omega: - probably not, but in case GStreamer starts turning into an OS, someone please let me know? - - - - - 9 Jan 2001 - - - wtay: - me tar, you rpm? - - - wtay: - hehe, forgot "zan" - - - Omega: - ? - - - wtay: - me tar"zan", you ... - - - - - 7 Jan 2001 - - - Omega: - that means probably building an agreggating, cache-massaging - queue to shove N buffers across all at once, forcing cache - transfer. - - - wtay: - never done that before... - - - Omega: - nope, but it's easy to do in gstreamer <g> - - - wtay: - sure, I need to rewrite cp with gstreamer too, someday :-) - - - - - 7 Jan 2001 - - - wtay: - GStreamer; always at least one developer is awake... - - - - - 5/6 Jan 2001 - - - wtay: - we need to cut down the time to create an mp3 player down to - seconds... - - - richardb: - :) - - - Omega: - I'm wanting to something more interesting soon, I did the "draw an mp3 - player in 15sec" back in October '99. - - - wtay: - by the time Omega gets his hands on the editor, you'll see a - complete audio mixer in the editor :-) - - - richardb: - Well, it clearly has the potential... - - - Omega: - Working on it... ;-) - - - - - 28 Dec 2000 - - - MPAA: - We will sue you now, you have violated our IP rights! - - - wtay: - hehehe - - - MPAA: - How dare you laugh at us? We have lawyers! We have Congressmen! We have LARS! - - - wtay: - I'm so sorry your honor - - - MPAA: - Hrumph. - - - * - wtay - bows before thy - - - - - - diff --git a/docs/manual/base.css b/docs/manual/base.css deleted file mode 100644 index a24c078ad3..0000000000 --- a/docs/manual/base.css +++ /dev/null @@ -1,3 +0,0 @@ -pre.programlisting { - background: #E8E8FF; -} diff --git a/docs/manual/basics-bins.xml b/docs/manual/basics-bins.xml deleted file mode 100644 index 882f3d153c..0000000000 --- a/docs/manual/basics-bins.xml +++ /dev/null @@ -1,185 +0,0 @@ - - Bins - - A bin is a container element. You can add elements to a bin. Since a - bin is an element itself, a bin can be handled in the same way as any - other element. Therefore, the whole previous chapter () applies to bins as well. - - - - What are bins - - Bins allow you to combine a group of linked elements into one - logical element. You do not deal with the individual elements - anymore but with just one element, the bin. We will see that - this is extremely powerful when you are going to construct - complex pipelines since it allows you to break up the pipeline - in smaller chunks. - - - The bin will also manage the elements contained in it. It will - perform state changes on the elements as well as collect and - forward bus messages. - - -
- Visualisation of a bin with some elements in it - - - - - -
- - - There is one specialized type of bin available to the - &GStreamer; programmer: - - - - - A pipeline: a generic container that manages the synchronization - and bus messages of the contained elements. The toplevel bin has - to be a pipeline, every application thus needs at least one of - these. - - - -
- - - Creating a bin - - Bins are created in the same way that other elements are created, - i.e. using an element factory. There are also convenience functions - available (gst_bin_new () and - gst_pipeline_new ()). - To add elements to a bin or remove elements from a - bin, you can use gst_bin_add () and - gst_bin_remove (). Note that the bin that you - add an element to will take ownership of that element. If you - destroy the bin, the element will be dereferenced with it. If you - remove an element from a bin, it will be dereferenced automatically. - - -#include <gst/gst.h> - -int -main (int argc, - char *argv[]) -{ - GstElement *bin, *pipeline, *source, *sink; - - /* init */ - gst_init (&argc, &argv); - - /* create */ - pipeline = gst_pipeline_new ("my_pipeline"); - bin = gst_bin_new ("my_bin"); - source = gst_element_factory_make ("fakesrc", "source"); - sink = gst_element_factory_make ("fakesink", "sink"); - - /* First add the elements to the bin */ - gst_bin_add_many (GST_BIN (bin), source, sink, NULL); - /* add the bin to the pipeline */ - gst_bin_add (GST_BIN (pipeline), bin); - - /* link the elements */ - gst_element_link (source, sink); - -[..] - -} - - - There are various functions to lookup elements in a bin. The most - commonly used are gst_bin_get_by_name () and - gst_bin_get_by_interface (). You can also - iterate over all elements that a bin contains using the function - gst_bin_iterate_elements (). See the API references - of GstBin - for details. - - - - - Custom bins - - The application programmer can create custom bins packed with elements - to perform a specific task. This allows you, for example, to write - an Ogg/Vorbis decoder with just the following lines of code: - - -int -main (int argc, - char *argv[]) -{ - GstElement *player; - - /* init */ - gst_init (&argc, &argv); - - /* create player */ - player = gst_element_factory_make ("oggvorbisplayer", "player"); - - /* set the source audio file */ - g_object_set (player, "location", "helloworld.ogg", NULL); - - /* start playback */ - gst_element_set_state (GST_ELEMENT (player), GST_STATE_PLAYING); -[..] -} - - - (This is a silly example of course, there already exists a much more - powerful and versatile custom bin like this: the playbin element.) - - - Custom bins can be created with a plugin or from the application. You - will find more information about creating custom bin in the Plugin - Writers Guide. - - - Examples of such custom bins are the playbin and uridecodebin elements from - gst-plugins-base. - - - - Bins manage states of their children - - Bins manage the state of all elements contained in them. If you set - a bin (or a pipeline, which is a special top-level type of bin) to - a certain target state using gst_element_set_state (), - it will make sure all elements contained within it will also be set - to this state. This means it's usually only necessary to set the state - of the top-level pipeline to start up the pipeline or shut it down. - - - The bin will perform the state changes on all its children from the - sink element to the source element. This ensures that the downstream - element is ready to receive data when the upstream element is brought - to PAUSED or PLAYING. Similarly when shutting down, the sink elements - will be set to READY or NULL first, which will cause the upstream - elements to receive a FLUSHING error and stop the streaming threads - before the elements are set to the READY or NULL state. - - - Note, however, that if elements are added to a bin or pipeline that's - already running, , e.g. from within a "pad-added" - signal callback, its state will not automatically be brought in line with - the current state or target state of the bin or pipeline it was added to. - Instead, you have to need to set it to the desired target state yourself - using gst_element_set_state () or - gst_element_sync_state_with_parent () when adding - elements to an already-running pipeline. - - -
diff --git a/docs/manual/basics-bus.xml b/docs/manual/basics-bus.xml deleted file mode 100644 index d040c8dcbd..0000000000 --- a/docs/manual/basics-bus.xml +++ /dev/null @@ -1,289 +0,0 @@ - - Bus - - A bus is a simple system that takes care of forwarding messages from - the streaming threads to an application in its own thread context. The - advantage of a bus is that an application does not need to be - thread-aware in order to use &GStreamer;, even though &GStreamer; - itself is heavily threaded. - - - Every pipeline contains a bus by default, so applications do not need - to create a bus or anything. The only thing applications should do is - set a message handler on a bus, which is similar to a signal handler - to an object. When the mainloop is running, the bus will periodically - be checked for new messages, and the callback will be called when any - message is available. - - - - How to use a bus - - There are two different ways to use a bus: - - - - Run a GLib/Gtk+ main loop (or iterate the default GLib main - context yourself regularly) and attach some kind of watch to the - bus. This way the GLib main loop will check the bus for new - messages and notify you whenever there are messages. - - - Typically you would use gst_bus_add_watch () - or gst_bus_add_signal_watch () in this case. - - - To use a bus, attach a message handler to the bus of a pipeline - using gst_bus_add_watch (). This handler will - be called whenever the pipeline emits a message to the bus. In this - handler, check the signal type (see next section) and do something - accordingly. The return value of the handler should be TRUE to - keep the handler attached to the bus, return FALSE to remove it. - - - - - Check for messages on the bus yourself. This can be done using - gst_bus_peek () and/or - gst_bus_poll (). - - - - - -#include <gst/gst.h> - -static GMainLoop *loop; - -static gboolean -my_bus_callback (GstBus *bus, - GstMessage *message, - gpointer data) -{ - g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message)); - - switch (GST_MESSAGE_TYPE (message)) { - case GST_MESSAGE_ERROR: { - GError *err; - gchar *debug; - - gst_message_parse_error (message, &err, &debug); - g_print ("Error: %s\n", err->message); - g_error_free (err); - g_free (debug); - - g_main_loop_quit (loop); - break; - } - case GST_MESSAGE_EOS: - /* end-of-stream */ - g_main_loop_quit (loop); - break; - default: - /* unhandled message */ - break; - } - - /* we want to be notified again the next time there is a message - * on the bus, so returning TRUE (FALSE means we want to stop watching - * for messages on the bus and our callback should not be called again) - */ - return TRUE; -} - -gint -main (gint argc, - gchar *argv[]) -{ - GstElement *pipeline; - GstBus *bus; - guint bus_watch_id; - - /* init */ - gst_init (&argc, &argv); - - /* create pipeline, add handler */ - pipeline = gst_pipeline_new ("my_pipeline"); - - /* adds a watch for new message on our pipeline's message bus to - * the default GLib main context, which is the main context that our - * GLib main loop is attached to below - */ - bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); - bus_watch_id = gst_bus_add_watch (bus, my_bus_callback, NULL); - gst_object_unref (bus); - -[..] - - /* create a mainloop that runs/iterates the default GLib main context - * (context NULL), in other words: makes the context check if anything - * it watches for has happened. When a message has been posted on the - * bus, the default main context will automatically call our - * my_bus_callback() function to notify us of that message. - * The main loop will be run until someone calls g_main_loop_quit() - */ - loop = g_main_loop_new (NULL, FALSE); - g_main_loop_run (loop); - - /* clean up */ - gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (pipeline); - g_source_remove (bus_watch_id); - g_main_loop_unref (loop); - - return 0; -} - - - - It is important to know that the handler will be called in the thread - context of the mainloop. This means that the interaction between the - pipeline and application over the bus is - asynchronous, and thus not suited for some - real-time purposes, such as cross-fading between audio tracks, doing - (theoretically) gapless playback or video effects. All such things - should be done in the pipeline context, which is easiest by writing - a &GStreamer; plug-in. It is very useful for its primary purpose, - though: passing messages from pipeline to application. - The advantage of this approach is that all the threading that - &GStreamer; does internally is hidden from the application and the - application developer does not have to worry about thread issues at - all. - - - Note that if you're using the default GLib mainloop integration, you - can, instead of attaching a watch, connect to the message - signal on the bus. This way you don't have to - switch() - on all possible message types; just connect to the interesting signals - in form of message::<type>, where <type> - is a specific message type (see the next section for an explanation of - message types). - - - The above snippet could then also be written as: - - -GstBus *bus; - -[..] - -bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline); -gst_bus_add_signal_watch (bus); -g_signal_connect (bus, "message::error", G_CALLBACK (cb_message_error), NULL); -g_signal_connect (bus, "message::eos", G_CALLBACK (cb_message_eos), NULL); - -[..] - - - If you aren't using GLib mainloop, the asynchronous message signals won't - be available by default. You can however install a custom sync handler - that wakes up the custom mainloop and that uses - gst_bus_async_signal_func () to emit the signals. - (see also documentation for details) - - - - - Message types - - &GStreamer; has a few pre-defined message types that can be passed - over the bus. The messages are extensible, however. Plug-ins can - define additional messages, and applications can decide to either - have specific code for those or ignore them. All applications are - strongly recommended to at least handle error messages by providing - visual feedback to the user. - - - All messages have a message source, type and timestamp. The message - source can be used to see which element emitted the message. For some - messages, for example, only the ones emitted by the top-level pipeline - will be interesting to most applications (e.g. for state-change - notifications). Below is a list of all messages and a short explanation - of what they do and how to parse message-specific content. - - - - - Error, warning and information notifications: those are used - by elements if a message should be shown to the user about the - state of the pipeline. Error messages are fatal and terminate - the data-passing. The error should be repaired to resume pipeline - activity. Warnings are not fatal, but imply a problem nevertheless. - Information messages are for non-problem notifications. All those - messages contain a GError with the main - error type and message, and optionally a debug string. Both - can be extracted using gst_message_parse_error - (), _parse_warning () and - _parse_info (). Both error and debug strings - should be freed after use. - - - - - End-of-stream notification: this is emitted when the stream has - ended. The state of the pipeline will not change, but further - media handling will stall. Applications can use this to skip to - the next song in their playlist. After end-of-stream, it is also - possible to seek back in the stream. Playback will then continue - automatically. This message has no specific arguments. - - - - - Tags: emitted when metadata was found in the stream. This can be - emitted multiple times for a pipeline (e.g. once for descriptive - metadata such as artist name or song title, and another one for - stream-information, such as samplerate and bitrate). Applications - should cache metadata internally. gst_message_parse_tag - () should be used to parse the taglist, which should - be gst_tag_list_unref ()'ed when no longer - needed. - - - - - State-changes: emitted after a successful state change. - gst_message_parse_state_changed () can be - used to parse the old and new state of this transition. - - - - - Buffering: emitted during caching of network-streams. One can - manually extract the progress (in percent) from the message by - extracting the buffer-percent property from the - structure returned by gst_message_get_structure - (). See also . - - - - - Element messages: these are special messages that are unique to - certain elements and usually represent additional features. The - element's documentation should mention in detail which - element messages a particular element may send. As an example, - the 'qtdemux' QuickTime demuxer element may send a 'redirect' - element message on certain occasions if the stream contains a - redirect instruction. - - - - - Application-specific messages: any information on those can - be extracted by getting the message structure (see above) and - reading its fields. Usually these messages can safely be ignored. - - - Application messages are primarily meant for internal - use in applications in case the application needs to marshal - information from some thread into the main thread. This is - particularly useful when the application is making use of element - signals (as those signals will be emitted in the context of the - streaming thread). - - - - - diff --git a/docs/manual/basics-data.xml b/docs/manual/basics-data.xml deleted file mode 100644 index d8972e057c..0000000000 --- a/docs/manual/basics-data.xml +++ /dev/null @@ -1,101 +0,0 @@ - - Buffers and Events - - The data flowing through a pipeline consists of a combination of - buffers and events. Buffers contain the actual media data. Events - contain control information, such as seeking information and - end-of-stream notifiers. All this will flow through the pipeline - automatically when it's running. This chapter is mostly meant to - explain the concept to you; you don't need to do anything for this. - - - - Buffers - - Buffers contain the data that will flow through the pipeline you have - created. A source element will typically create a new buffer and pass - it through a pad to the next element in the chain. When using the - GStreamer infrastructure to create a media pipeline you will not have - to deal with buffers yourself; the elements will do that for you. - - - A buffer consists, amongst others, of: - - - - - Pointers to memory objects. Memory objects encapsulate a region - in the memory. - - - - - A timestamp for the buffer. - - - - - A refcount that indicates how many elements are using this - buffer. This refcount will be used to destroy the buffer when no - element has a reference to it. - - - - - Buffer flags. - - - - - The simple case is that a buffer is created, memory allocated, data - put in it, and passed to the next element. That element reads the - data, does something (like creating a new buffer and decoding into - it), and unreferences the buffer. This causes the data to be free'ed - and the buffer to be destroyed. A typical video or audio decoder - works like this. - - - There are more complex scenarios, though. Elements can modify buffers - in-place, i.e. without allocating a new one. Elements can also write - to hardware memory (such as from video-capture sources) or memory - allocated from the X-server (using XShm). Buffers can be read-only, - and so on. - - - - - Events - - Events are control particles that are sent both up- and downstream in - a pipeline along with buffers. Downstream events notify fellow elements - of stream states. Possible events include seeking, flushes, - end-of-stream notifications and so on. Upstream events are used both - in application-element interaction as well as element-element interaction - to request changes in stream state, such as seeks. For applications, - only upstream events are important. Downstream events are just - explained to get a more complete picture of the data concept. - - - Since most applications seek in time units, our example below does so - too: - - -static void -seek_to_time (GstElement *element, - guint64 time_ns) -{ - GstEvent *event; - - event = gst_event_new_seek (1.0, GST_FORMAT_TIME, - GST_SEEK_FLAG_NONE, - GST_SEEK_METHOD_SET, time_ns, - GST_SEEK_TYPE_NONE, G_GUINT64_CONSTANT (0)); - gst_element_send_event (element, event); -} - - - The function gst_element_seek () is a shortcut - for this. This is mostly just to show how it all works. - - - diff --git a/docs/manual/basics-elements.xml b/docs/manual/basics-elements.xml deleted file mode 100644 index b3c8634b79..0000000000 --- a/docs/manual/basics-elements.xml +++ /dev/null @@ -1,567 +0,0 @@ - - Elements - - The most important object in &GStreamer; for the application programmer - is the GstElement - object. An element is the basic building block for a media pipeline. All - the different high-level components you will use are derived from - GstElement. Every decoder, encoder, demuxer, video - or audio output is in fact a GstElement - - - - What are elements? - - For the application programmer, elements are best visualized as black - boxes. On the one end, you might put something in, the element does - something with it and something else comes out at the other side. For - a decoder element, for example, you'd put in encoded data, and the - element would output decoded data. In the next chapter (see ), you will learn more about data input and - output in elements, and how you can set that up in your application. - - - - Source elements - - Source elements generate data for use by a pipeline, for example - reading from disk or from a sound card. shows how we will visualise - a source element. We always draw a source pad to the right of - the element. - -
- Visualisation of a source element - - - - - -
- - Source elements do not accept data, they only generate data. You can - see this in the figure because it only has a source pad (on the - right). A source pad can only generate data. - -
- - - Filters, convertors, demuxers, muxers and codecs - - Filters and filter-like elements have both input and outputs pads. - They operate on data that they receive on their input (sink) pads, - and will provide data on their output (source) pads. Examples of - such elements are a volume element (filter), a video scaler - (convertor), an Ogg demuxer or a Vorbis decoder. - - - Filter-like elements can have any number of source or sink pads. A - video demuxer, for example, would have one sink pad and several - (1-N) source pads, one for each elementary stream contained in the - container format. Decoders, on the other hand, will only have one - source and sink pads. - -
- Visualisation of a filter element - - - - - -
- - shows how we will - visualise a filter-like element. This specific element has one source - and one sink element. Sink pads, receiving input data, are depicted - at the left of the element; source pads are still on the right. - -
- Visualisation of a filter element with - more than one output pad - - - - - -
- - shows another - filter-like element, this one having more than one output (source) - pad. An example of one such element could, for example, be an Ogg - demuxer for an Ogg stream containing both audio and video. One - source pad will contain the elementary video stream, another will - contain the elementary audio stream. Demuxers will generally fire - signals when a new pad is created. The application programmer can - then handle the new elementary stream in the signal handler. - -
- - - Sink elements - - Sink elements are end points in a media pipeline. They accept - data but do not produce anything. Disk writing, soundcard playback, - and video output would all be implemented by sink elements. - shows a sink element. - -
- Visualisation of a sink element - - - - - -
-
-
- - - Creating a <classname>GstElement</classname> - - The simplest way to create an element is to use gst_element_factory_make - (). This function takes a factory name and an - element name for the newly created element. The name of the element - is something you can use later on to look up the element in a bin, - for example. The name will also be used in debug output. You can - pass NULL as the name argument to get a unique, - default name. - - - When you don't need the element anymore, you need to unref it using - gst_object_unref - (). This decreases the reference count for the - element by 1. An element has a refcount of 1 when it gets created. - An element gets destroyed completely when the refcount is decreased - to 0. - - - The following example &EXAFOOT; shows how to create an element named - source from the element factory named - fakesrc. It checks if the creation succeeded. - After checking, it unrefs the element. - - - -int -main (int argc, - char *argv[]) -{ - GstElement *element; - - /* init GStreamer */ - gst_init (&argc, &argv); - - /* create element */ - element = gst_element_factory_make ("fakesrc", "source"); - if (!element) { - g_print ("Failed to create element of type 'fakesrc'\n"); - return -1; - } - - gst_object_unref (GST_OBJECT (element)); - - return 0; -} - ]]> - - gst_element_factory_make is actually a shorthand - for a combination of two functions. A GstElement - object is created from a factory. To create the element, you have to - get access to a GstElementFactory - object using a unique factory name. This is done with gst_element_factory_find - (). - - - The following code fragment is used to get a factory that can be used - to create the fakesrc element, a fake data source. - The function gst_element_factory_create - () will use the element factory to create an - element with the given name. - - - -int -main (int argc, - char *argv[]) -{ - GstElementFactory *factory; - GstElement * element; - - /* init GStreamer */ - gst_init (&argc, &argv); - - /* create element, method #2 */ - factory = gst_element_factory_find ("fakesrc"); - if (!factory) { - g_print ("Failed to find factory of type 'fakesrc'\n"); - return -1; - } - element = gst_element_factory_create (factory, "source"); - if (!element) { - g_print ("Failed to create element, even though its factory exists!\n"); - return -1; - } - - gst_object_unref (GST_OBJECT (element)); - - return 0; -} - ]]> - - - - Using an element as a <classname>GObject</classname> - - A GstElement - can have several properties which are implemented using standard - GObject properties. The usual - GObject methods to query, set and get - property values and GParamSpecs are - therefore supported. - - - Every GstElement inherits at least one - property from its parent GstObject: the - "name" property. This is the name you provide to the functions - gst_element_factory_make () or - gst_element_factory_create (). You can get - and set this property using the functions - gst_object_set_name and - gst_object_get_name or use the - GObject property mechanism as shown below. - - - -int -main (int argc, - char *argv[]) -{ - GstElement *element; - gchar *name; - - /* init GStreamer */ - gst_init (&argc, &argv); - - /* create element */ - element = gst_element_factory_make ("fakesrc", "source"); - - /* get name */ - g_object_get (G_OBJECT (element), "name", &name, NULL); - g_print ("The name of the element is '%s'.\n", name); - g_free (name); - - gst_object_unref (GST_OBJECT (element)); - - return 0; -} - ]]> - - Most plugins provide additional properties to provide more information - about their configuration or to configure the element. - gst-inspect is a useful tool to query the properties - of a particular element, it will also use property introspection to give - a short explanation about the function of the property and about the - parameter types and ranges it supports. See - - in the appendix for details about gst-inspect. - - - For more information about GObject - properties we recommend you read the GObject manual and an introduction to - The Glib Object system. - - - A - GstElement also provides various - GObject signals that can be used as a flexible - callback mechanism. Here, too, you can use gst-inspect - to see which signals a specific element supports. Together, signals - and properties are the most basic way in which elements and - applications interact. - - - - - More about element factories - - In the previous section, we briefly introduced the GstElementFactory - object already as a way to create instances of an element. Element - factories, however, are much more than just that. Element factories - are the basic types retrieved from the &GStreamer; registry, they - describe all plugins and elements that &GStreamer; can create. This - means that element factories are useful for automated element - instancing, such as what autopluggers do, and for creating lists - of available elements. - - - - Getting information about an element using a factory - - Tools like gst-inspect will provide some generic - information about an element, such as the person that wrote the - plugin, a descriptive name (and a shortname), a rank and a category. - The category can be used to get the type of the element that can - be created using this element factory. Examples of categories include - Codec/Decoder/Video (video decoder), - Codec/Encoder/Video (video encoder), - Source/Video (a video generator), - Sink/Video (a video output), and all these - exist for audio as well, of course. Then, there's also - Codec/Demuxer and - Codec/Muxer and a whole lot more. - gst-inspect will give a list of all factories, and - gst-inspect <factory-name> will list all - of the above information, and a lot more. - - - -int -main (int argc, - char *argv[]) -{ - GstElementFactory *factory; - - /* init GStreamer */ - gst_init (&argc, &argv); - - /* get factory */ - factory = gst_element_factory_find ("fakesrc"); - if (!factory) { - g_print ("You don't have the 'fakesrc' element installed!\n"); - return -1; - } - - /* display information */ - g_print ("The '%s' element is a member of the category %s.\n" - "Description: %s\n", - gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)), - gst_element_factory_get_metadata (factory, GST_ELEMENT_METADATA_KLASS), - gst_element_factory_get_metadata (factory, GST_ELEMENT_METADATA_DESCRIPTION)); - - return 0; -} - ]]> - - You can use gst_registry_pool_feature_list (GST_TYPE_ELEMENT_FACTORY) - to get a list of all the element factories that &GStreamer; knows - about. - - - - - Finding out what pads an element can contain - - Perhaps the most powerful feature of element factories is that - they contain a full description of the pads that the element - can generate, and the capabilities of those pads (in layman words: - what types of media can stream over those pads), without actually - having to load those plugins into memory. This can be used - to provide a codec selection list for encoders, or it can be used - for autoplugging purposes for media players. All current - &GStreamer;-based media players and autopluggers work this way. - We'll look closer at these features as we learn about - GstPad and GstCaps - in the next chapter: - - - - - - Linking elements - - By linking a source element with zero or more filter-like - elements and finally a sink element, you set up a media - pipeline. Data will flow through the elements. This is the - basic concept of media handling in &GStreamer;. - - - - By linking these three elements, we have created a very simple - chain of elements. The effect of this will be that the output of - the source element (element1) will be used as input - for the filter-like element (element2). The - filter-like element will do something with the data and send the - result to the final sink element (element3). - - - Imagine the above graph as a simple Ogg/Vorbis audio decoder. The - source is a disk source which reads the file from disc. The second - element is a Ogg/Vorbis audio decoder. The sink element is your - soundcard, playing back the decoded audio data. We will use this - simple graph to construct an Ogg/Vorbis player later in this manual. - - - In code, the above graph is written like this: - - -#include <gst/gst.h> - -int -main (int argc, - char *argv[]) -{ - GstElement *pipeline; - GstElement *source, *filter, *sink; - - /* init */ - gst_init (&argc, &argv); - - /* create pipeline */ - pipeline = gst_pipeline_new ("my-pipeline"); - - /* create elements */ - source = gst_element_factory_make ("fakesrc", "source"); - filter = gst_element_factory_make ("identity", "filter"); - sink = gst_element_factory_make ("fakesink", "sink"); - - /* must add elements to pipeline before linking them */ - gst_bin_add_many (GST_BIN (pipeline), source, filter, sink, NULL); - - /* link */ - if (!gst_element_link_many (source, filter, sink, NULL)) { - g_warning ("Failed to link elements!"); - } - -[..] - -} - - - For more specific behaviour, there are also the functions - gst_element_link () and - gst_element_link_pads (). You can also obtain - references to individual pads and link those using various - gst_pad_link_* () functions. See the API - references for more details. - - - Important: you must add elements to a bin or pipeline - before linking them, since adding an element to - a bin will disconnect any already existing links. Also, you cannot - directly link elements that are not in the same bin or pipeline; if - you want to link elements or pads at different hierarchy levels, you - will need to use ghost pads (more about ghost pads later, - see ). - - - - - Element States - - After being created, an element will not actually perform any actions - yet. You need to change elements state to make it do something. - &GStreamer; knows four element states, each with a very specific - meaning. Those four states are: - - - - - GST_STATE_NULL: this is the default state. - No resources are allocated in this state, so, transitioning to it - will free all resources. The element must be in this state when - its refcount reaches 0 and it is freed. - - - - - GST_STATE_READY: in the ready state, an - element has allocated all of its global resources, that is, - resources that can be kept within streams. You can think about - opening devices, allocating buffers and so on. However, the - stream is not opened in this state, so the stream positions is - automatically zero. If a stream was previously opened, it should - be closed in this state, and position, properties and such should - be reset. - - - - - GST_STATE_PAUSED: in this state, an - element has opened the stream, but is not actively processing - it. An element is allowed to modify a stream's position, read - and process data and such to prepare for playback as soon as - state is changed to PLAYING, but it is not - allowed to play the data which would make the clock run. - In summary, PAUSED is the same as PLAYING but without a running - clock. - - - Elements going into the PAUSED state should prepare themselves - for moving over to the PLAYING state as soon as possible. Video - or audio outputs would, for example, wait for data to arrive and - queue it so they can play it right after the state change. Also, - video sinks can already play the first frame (since this does - not affect the clock yet). Autopluggers could use this same - state transition to already plug together a pipeline. Most other - elements, such as codecs or filters, do not need to explicitly - do anything in this state, however. - - - - - GST_STATE_PLAYING: in the PLAYING state, - an element does exactly the same as in the PAUSED state, except - that the clock now runs. - - - - - You can change the state of an element using the function - gst_element_set_state (). If you set an element - to another state, &GStreamer; will internally traverse all intermediate - states. So if you set an element from NULL to PLAYING, &GStreamer; - will internally set the element to READY and PAUSED in between. - - - When moved to GST_STATE_PLAYING, pipelines - will process data automatically. They do not need to be iterated in - any form. Internally, &GStreamer; will start threads that take this - task on to them. &GStreamer; will also take care of switching - messages from the pipeline's thread into the application's own - thread, by using a GstBus. See - for details. - - - When you set a bin or pipeline to a certain target state, it will usually - propagate the state change to all elements within the bin or pipeline - automatically, so it's usually only necessary to set the state of the - top-level pipeline to start up the pipeline or shut it down. However, - when adding elements dynamically to an already-running pipeline, e.g. - from within a "pad-added" signal callback, you - need to set it to the desired target state yourself using - gst_element_set_state () or - gst_element_sync_state_with_parent (). - - -
diff --git a/docs/manual/basics-helloworld.xml b/docs/manual/basics-helloworld.xml deleted file mode 100644 index 9097b203f2..0000000000 --- a/docs/manual/basics-helloworld.xml +++ /dev/null @@ -1,277 +0,0 @@ - - Your first application - - This chapter will summarize everything you've learned in the previous - chapters. It describes all aspects of a simple &GStreamer; application, - including initializing libraries, creating elements, packing elements - together in a pipeline and playing this pipeline. By doing all this, - you will be able to build a simple Ogg/Vorbis audio player. - - - - Hello world - - We're going to create a simple first application, a simple Ogg/Vorbis - command-line audio player. For this, we will use only standard - &GStreamer; components. The player will read a file specified on - the command-line. Let's get started! - - - We've learned, in , that the first thing - to do in your application is to initialize &GStreamer; by calling - gst_init (). Also, make sure that the application - includes gst/gst.h so all function names and - objects are properly defined. Use #include - <gst/gst.h> to do that. - - - Next, you'll want to create the different elements using - gst_element_factory_make (). For an Ogg/Vorbis - audio player, we'll need a source element that reads files from a - disk. &GStreamer; includes this element under the name - filesrc. Next, we'll need something to parse the - file and decode it into raw audio. &GStreamer; has two elements - for this: the first parses Ogg streams into elementary streams (video, - audio) and is called oggdemux. The second is a Vorbis - audio decoder, it's conveniently called vorbisdec. - Since oggdemux creates dynamic pads for each elementary - stream, you'll need to set a pad-added event handler - on the oggdemux element, like you've learned in - , to link the Ogg demuxer and - the Vorbis decoder elements together. At last, we'll also need an - audio output element, we will use autoaudiosink, which - automatically detects your audio device. - - - The last thing left to do is to add all elements into a container - element, a GstPipeline, and wait until - we've played the whole song. We've previously - learned how to add elements to a container bin in , and we've learned about element states - in . We will also attach - a message handler to the pipeline bus so we can retrieve errors - and detect the end-of-stream. - - - Let's now add all the code together to get our very first audio - player: - - - -#include <gst/gst.h> -#include <glib.h> - - -static gboolean -bus_call (GstBus *bus, - GstMessage *msg, - gpointer data) -{ - GMainLoop *loop = (GMainLoop *) data; - - switch (GST_MESSAGE_TYPE (msg)) { - - case GST_MESSAGE_EOS: - g_print ("End of stream\n"); - g_main_loop_quit (loop); - break; - - case GST_MESSAGE_ERROR: { - gchar *debug; - GError *error; - - gst_message_parse_error (msg, &error, &debug); - g_free (debug); - - g_printerr ("Error: %s\n", error->message); - g_error_free (error); - - g_main_loop_quit (loop); - break; - } - default: - break; - } - - return TRUE; -} - - -static void -on_pad_added (GstElement *element, - GstPad *pad, - gpointer data) -{ - GstPad *sinkpad; - GstElement *decoder = (GstElement *) data; - - /* We can now link this pad with the vorbis-decoder sink pad */ - g_print ("Dynamic pad created, linking demuxer/decoder\n"); - - sinkpad = gst_element_get_static_pad (decoder, "sink"); - - gst_pad_link (pad, sinkpad); - - gst_object_unref (sinkpad); -} - - - -int -main (int argc, - char *argv[]) -{ - GMainLoop *loop; - - GstElement *pipeline, *source, *demuxer, *decoder, *conv, *sink; - GstBus *bus; - guint bus_watch_id; - - /* Initialisation */ - gst_init (&argc, &argv); - - loop = g_main_loop_new (NULL, FALSE); - - - /* Check input arguments */ - if (argc != 2) { - g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]); - return -1; - } - - - /* Create gstreamer elements */ - pipeline = gst_pipeline_new ("audio-player"); - source = gst_element_factory_make ("filesrc", "file-source"); - demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer"); - decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder"); - conv = gst_element_factory_make ("audioconvert", "converter"); - sink = gst_element_factory_make ("autoaudiosink", "audio-output"); - - if (!pipeline || !source || !demuxer || !decoder || !conv || !sink) { - g_printerr ("One element could not be created. Exiting.\n"); - return -1; - } - - /* Set up the pipeline */ - - /* we set the input filename to the source element */ - g_object_set (G_OBJECT (source), "location", argv[1], NULL); - - /* we add a message handler */ - bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); - bus_watch_id = gst_bus_add_watch (bus, bus_call, loop); - gst_object_unref (bus); - - /* we add all elements into the pipeline */ - /* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */ - gst_bin_add_many (GST_BIN (pipeline), - source, demuxer, decoder, conv, sink, NULL); - - /* we link the elements together */ - /* file-source -> ogg-demuxer ~> vorbis-decoder -> converter -> alsa-output */ - gst_element_link (source, demuxer); - gst_element_link_many (decoder, conv, sink, NULL); - g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), decoder); - - /* note that the demuxer will be linked to the decoder dynamically. - The reason is that Ogg may contain various streams (for example - audio and video). The source pad(s) will be created at run time, - by the demuxer when it detects the amount and nature of streams. - Therefore we connect a callback function which will be executed - when the "pad-added" is emitted.*/ - - - /* Set the pipeline to "playing" state*/ - g_print ("Now playing: %s\n", argv[1]); - gst_element_set_state (pipeline, GST_STATE_PLAYING); - - - /* Iterate */ - g_print ("Running...\n"); - g_main_loop_run (loop); - - - /* Out of the main loop, clean up nicely */ - g_print ("Returned, stopping playback\n"); - gst_element_set_state (pipeline, GST_STATE_NULL); - - g_print ("Deleting pipeline\n"); - gst_object_unref (GST_OBJECT (pipeline)); - g_source_remove (bus_watch_id); - g_main_loop_unref (loop); - - return 0; -} - - - - We now have created a complete pipeline. We can visualise the - pipeline as follows: - - -
- The "hello world" pipeline - - - - - -
- -
- - - Compiling and Running helloworld.c - - To compile the helloworld example, use: gcc -Wall - helloworld.c -o helloworld - $(pkg-config --cflags --libs gstreamer-&GST_API_VERSION;). - &GStreamer; makes use of pkg-config to get compiler - and linker flags needed to compile this application. - - - If you're running a non-standard installation (ie. you've installed - GStreamer from source yourself instead of using pre-built packages), - make sure the PKG_CONFIG_PATH environment variable - is set to the correct location ($libdir/pkgconfig). - - - In the unlikely case that you are using an uninstalled GStreamer - setup (ie. gst-uninstalled), you will need to use libtool to build the - hello world program, like this: libtool --mode=link gcc -Wall - helloworld.c -o helloworld - $(pkg-config --cflags --libs gstreamer-&GST_API_VERSION;). - - - You can run this example application with ./helloworld - file.ogg. Substitute file.ogg - with your favourite Ogg/Vorbis file. - - - - - Conclusion - - This concludes our first example. As you see, setting up a pipeline - is very low-level but powerful. You will see later in this manual how - you can create a more powerful media player with even less effort - using higher-level interfaces. We will discuss all that in . We will first, however, go more in-depth - into more advanced &GStreamer; internals. - - - It should be clear from the example that we can very easily replace - the filesrc element with some other element that - reads data from a network, or some other data source element that - is better integrated with your desktop environment. Also, you can - use other decoders and parsers/demuxers to support other media types. You - can use another audio sink if you're not running Linux, but Mac OS X, - Windows or FreeBSD, or you can instead use a filesink to write audio - files to disk instead of playing them back. By using an audio card - source, you can even do audio capture instead of playback. All this - shows the reusability of &GStreamer; elements, which is its greatest - advantage. - - -
diff --git a/docs/manual/basics-init.xml b/docs/manual/basics-init.xml deleted file mode 100644 index 3a71be6871..0000000000 --- a/docs/manual/basics-init.xml +++ /dev/null @@ -1,129 +0,0 @@ - - Initializing &GStreamer; - - When writing a &GStreamer; application, you can simply include - gst/gst.h to get access to the library - functions. Besides that, you will also need to initialize the - &GStreamer; library. - - - - Simple initialization - - Before the &GStreamer; libraries can be used, - gst_init has to be called from the main - application. This call will perform the necessary initialization - of the library as well as parse the &GStreamer;-specific command - line options. - - - A typical program &EXAFOOT; would have code to initialize - &GStreamer; that looks like this: - - - Initializing GStreamer - - -#include <stdio.h> -#include <gst/gst.h> - -int -main (int argc, - char *argv[]) -{ - const gchar *nano_str; - guint major, minor, micro, nano; - - gst_init (&argc, &argv); - - gst_version (&major, &minor, &micro, &nano); - - if (nano == 1) - nano_str = "(CVS)"; - else if (nano == 2) - nano_str = "(Prerelease)"; - else - nano_str = ""; - - printf ("This program is linked against GStreamer %d.%d.%d %s\n", - major, minor, micro, nano_str); - - return 0; -} - - - - - Use the GST_VERSION_MAJOR, - GST_VERSION_MINOR and GST_VERSION_MICRO - macros to get the &GStreamer; version you are building against, or - use the function gst_version to get the version - your application is linked against. &GStreamer; currently uses a - scheme where versions with the same major and minor versions are - API-/ and ABI-compatible. - - - It is also possible to call the gst_init function - with two NULL arguments, in which case no command line - options will be parsed by GStreamer. - - - - - The GOption interface - - You can also use a GOption table to initialize your own parameters as - shown in the next example: - - - Initialisation using the GOption interface - - -#include <gst/gst.h> - -int -main (int argc, - char *argv[]) -{ - gboolean silent = FALSE; - gchar *savefile = NULL; - GOptionContext *ctx; - GError *err = NULL; - GOptionEntry entries[] = { - { "silent", 's', 0, G_OPTION_ARG_NONE, &silent, - "do not output status information", NULL }, - { "output", 'o', 0, G_OPTION_ARG_STRING, &savefile, - "save xml representation of pipeline to FILE and exit", "FILE" }, - { NULL } - }; - - ctx = g_option_context_new ("- Your application"); - g_option_context_add_main_entries (ctx, entries, NULL); - g_option_context_add_group (ctx, gst_init_get_option_group ()); - if (!g_option_context_parse (ctx, &argc, &argv, &err)) { - g_print ("Failed to initialize: %s\n", err->message); - g_clear_error (&err); - g_option_context_free (ctx); - return 1; - } - g_option_context_free (ctx); - - printf ("Run me with --help to see the Application options appended.\n"); - - return 0; -} - - - - - As shown in this fragment, you can use a GOption table to define your application-specific - command line options, and pass this table to the GLib initialization - function along with the option group returned from the - function gst_init_get_option_group. Your - application options will be parsed in addition to the standard - GStreamer options. - - - diff --git a/docs/manual/basics-pads.xml b/docs/manual/basics-pads.xml deleted file mode 100644 index edca390644..0000000000 --- a/docs/manual/basics-pads.xml +++ /dev/null @@ -1,686 +0,0 @@ - - Pads and capabilities - - As we have seen in , the pads are - the element's interface to the outside world. Data streams from one - element's source pad to another element's sink pad. The specific - type of media that the element can handle will be exposed by the - pad's capabilities. We will talk more on capabilities later in this - chapter (see ). - - - - Pads - - A pad type is defined by two properties: its direction and its - availability. As we've mentioned before, &GStreamer; defines two - pad directions: source pads and sink pads. This terminology is - defined from the view of within the element: elements receive data - on their sink pads and generate data on their source pads. - Schematically, sink pads are drawn on the left side of an element, - whereas source pads are drawn on the right side of an element. In - such graphs, data flows from left to right. - - - In reality, there is no objection to data flowing from a - source pad to the sink pad of an element upstream (to the - left of this element in drawings). Data will, however, always - flow from a source pad of one element to the sink pad of - another. - - - - - - Pad directions are very simple compared to pad availability. A pad - can have any of three availabilities: always, sometimes and on - request. The meaning of those three types is exactly as it says: - always pads always exist, sometimes pad exist only in certain - cases (and can disappear randomly), and on-request pads appear - only if explicitly requested by applications. - - - - Dynamic (or sometimes) pads - - Some elements might not have all of their pads when the element is - created. This can happen, for example, with an Ogg demuxer element. - The element will read the Ogg stream and create dynamic pads for - each contained elementary stream (vorbis, theora) when it detects - such a stream in the Ogg stream. Likewise, it will delete the pad - when the stream ends. This principle is very useful for demuxer - elements, for example. - - - Running gst-inspect oggdemux will show - that the element has only one pad: a sink pad called 'sink'. The - other pads are dormant. You can see this in the pad - template because there is an Exists: Sometimes - property. Depending on the type of Ogg file you play, the pads will - be created. We will see that this is very important when you are - going to create dynamic pipelines. You can attach a signal handler - to an element to inform you when the element has created a new pad - from one of its sometimes pad templates. The - following piece of code is an example of how to do this: - - -#include <gst/gst.h> - -static void -cb_new_pad (GstElement *element, - GstPad *pad, - gpointer data) -{ - gchar *name; - - name = gst_pad_get_name (pad); - g_print ("A new pad %s was created\n", name); - g_free (name); - - /* here, you would setup a new pad link for the newly created pad */ -[..] - -} - -int -main (int argc, - char *argv[]) -{ - GstElement *pipeline, *source, *demux; - GMainLoop *loop; - - /* init */ - gst_init (&argc, &argv); - - /* create elements */ - pipeline = gst_pipeline_new ("my_pipeline"); - source = gst_element_factory_make ("filesrc", "source"); - g_object_set (source, "location", argv[1], NULL); - demux = gst_element_factory_make ("oggdemux", "demuxer"); - - /* you would normally check that the elements were created properly */ - - /* put together a pipeline */ - gst_bin_add_many (GST_BIN (pipeline), source, demux, NULL); - gst_element_link_pads (source, "src", demux, "sink"); - - /* listen for newly created pads */ - g_signal_connect (demux, "pad-added", G_CALLBACK (cb_new_pad), NULL); - - /* start the pipeline */ - gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); - loop = g_main_loop_new (NULL, FALSE); - g_main_loop_run (loop); - -[..] - -} - - - It is not uncommon to add elements to the pipeline only from within - the "pad-added" callback. If you do this, don't - forget to set the state of the newly-added elements to the target - state of the pipeline using - gst_element_set_state () or - gst_element_sync_state_with_parent (). - - - - - Request pads - - An element can also have request pads. These pads are not created - automatically but are only created on demand. This is very useful - for multiplexers, aggregators and tee elements. Aggregators are - elements that merge the content of several input streams together - into one output stream. Tee elements are the reverse: they are - elements that have one input stream and copy this stream to each - of their output pads, which are created on request. Whenever an - application needs another copy of the stream, it can simply request - a new output pad from the tee element. - - - The following piece of code shows how you can request a new output - pad from a tee element: - - -static void -some_function (GstElement *tee) -{ - GstPad * pad; - gchar *name; - - pad = gst_element_get_request_pad (tee, "src%d"); - name = gst_pad_get_name (pad); - g_print ("A new pad %s was created\n", name); - g_free (name); - - /* here, you would link the pad */ -[..] - - /* and, after doing that, free our reference */ - gst_object_unref (GST_OBJECT (pad)); -} - - - The gst_element_get_request_pad () method - can be used to get a pad from the element based on the name of - the pad template. It is also possible to request a pad that is - compatible with another pad template. This is very useful if - you want to link an element to a multiplexer element and you - need to request a pad that is compatible. The method - gst_element_get_compatible_pad () can be - used to request a compatible pad, as shown in the next example. - It will request a compatible pad from an Ogg multiplexer from - any input. - - -static void -link_to_multiplexer (GstPad *tolink_pad, - GstElement *mux) -{ - GstPad *pad; - gchar *srcname, *sinkname; - - srcname = gst_pad_get_name (tolink_pad); - pad = gst_element_get_compatible_pad (mux, tolink_pad); - gst_pad_link (tolinkpad, pad); - sinkname = gst_pad_get_name (pad); - gst_object_unref (GST_OBJECT (pad)); - - g_print ("A new pad %s was created and linked to %s\n", sinkname, srcname); - g_free (sinkname); - g_free (srcname); -} - - - - - - Capabilities of a pad - - Since the pads play a very important role in how the element is - viewed by the outside world, a mechanism is implemented to describe - the data that can flow or currently flows through the pad by using - capabilities. Here, we will briefly describe what capabilities are - and how to use them, enough to get an understanding of the concept. - For an in-depth look into capabilities and a list of all capabilities - defined in &GStreamer;, see the Plugin - Writers Guide. - - - Capabilities are attached to pad templates and to pads. For pad - templates, it will describe the types of media that may stream - over a pad created from this template. For pads, it can either - be a list of possible caps (usually a copy of the pad template's - capabilities), in which case the pad is not yet negotiated, or it - is the type of media that currently streams over this pad, in - which case the pad has been negotiated already. - - - - Dissecting capabilities - - A pad's capabilities are described in a GstCaps - object. Internally, a GstCaps - will contain one or more GstStructure - that will describe one media type. A negotiated pad will have - capabilities set that contain exactly one - structure. Also, this structure will contain only - fixed values. These constraints are not - true for unnegotiated pads or pad templates. - - - As an example, below is a dump of the capabilities of the - vorbisdec element, which you will get by running - gst-inspect vorbisdec. You will see two pads: - a source and a sink pad. Both of these pads are always available, - and both have capabilities attached to them. The sink pad will - accept vorbis-encoded audio data, with the media type - audio/x-vorbis. The source pad will be used - to send raw (decoded) audio samples to the next element, with - a raw audio media type (in this case, - audio/x-raw). The source pad will also - contain properties for the audio samplerate and the amount of - channels, plus some more that you don't need to worry about - for now. - - - -Pad Templates: - SRC template: 'src' - Availability: Always - Capabilities: - audio/x-raw - format: F32LE - rate: [ 1, 2147483647 ] - channels: [ 1, 256 ] - - SINK template: 'sink' - Availability: Always - Capabilities: - audio/x-vorbis - - - - - Properties and values - - Properties are used to describe extra information for - capabilities. A property consists of a key (a string) and - a value. There are different possible value types that can be used: - - - - - Basic types, this can be pretty much any - GType registered with Glib. Those - properties indicate a specific, non-dynamic value for this - property. Examples include: - - - - - An integer value (G_TYPE_INT): - the property has this exact value. - - - - - A boolean value (G_TYPE_BOOLEAN): - the property is either TRUE or FALSE. - - - - - A float value (G_TYPE_FLOAT): - the property has this exact floating point value. - - - - - A string value (G_TYPE_STRING): - the property contains a UTF-8 string. - - - - - A fraction value (GST_TYPE_FRACTION): - contains a fraction expressed by an integer numerator and - denominator. - - - - - - - Range types are GTypes registered by - &GStreamer; to indicate a range of possible values. They are - used for indicating allowed audio samplerate values or - supported video sizes. The two types defined in &GStreamer; - are: - - - - - An integer range value - (GST_TYPE_INT_RANGE): the property - denotes a range of possible integers, with a lower and an - upper boundary. The vorbisdec element, for - example, has a rate property that can be between 8000 and - 50000. - - - - - A float range value - (GST_TYPE_FLOAT_RANGE): the property - denotes a range of possible floating point values, with a - lower and an upper boundary. - - - - - A fraction range value - (GST_TYPE_FRACTION_RANGE): the property - denotes a range of possible fraction values, with a - lower and an upper boundary. - - - - - - - A list value (GST_TYPE_LIST): the - property can take any value from a list of basic values - given in this list. - - - Example: caps that express that either - a sample rate of 44100 Hz and a sample rate of 48000 Hz - is supported would use a list of integer values, with - one value being 44100 and one value being 48000. - - - - - An array value (GST_TYPE_ARRAY): the - property is an array of values. Each value in the array is a - full value on its own, too. All values in the array should be - of the same elementary type. This means that an array can - contain any combination of integers, lists of integers, integer - ranges together, and the same for floats or strings, but it can - not contain both floats and ints at the same time. - - - Example: for audio where there are more than two channels involved - the channel layout needs to be specified (for one and two channel - audio the channel layout is implicit unless stated otherwise in the - caps). So the channel layout would be an array of integer enum - values where each enum value represents a loudspeaker position. - Unlike a GST_TYPE_LIST, the values in an - array will be interpreted as a whole. - - - - - - - - What capabilities are used for - - Capabilities (short: caps) describe the type of data that is streamed - between two pads, or that one pad (template) supports. This makes them - very useful for various purposes: - - - - - Autoplugging: automatically finding elements to link to a - pad based on its capabilities. All autopluggers use this - method. - - - - - Compatibility detection: when two pads are linked, &GStreamer; - can verify if the two pads are talking about the same media - type. The process of linking two pads and checking if they - are compatible is called caps negotiation. - - - - - Metadata: by reading the capabilities from a pad, applications - can provide information about the type of media that is being - streamed over the pad, which is information about the stream - that is currently being played back. - - - - - Filtering: an application can use capabilities to limit the - possible media types that can stream between two pads to a - specific subset of their supported stream types. An application - can, for example, use filtered caps to set a - specific (fixed or non-fixed) video size that should stream - between two pads. You will see an example of filtered caps - later in this manual, in . - You can do caps filtering by inserting a capsfilter element into - your pipeline and setting its caps property. Caps - filters are often placed after converter elements like audioconvert, - audioresample, videoconvert or videoscale to force those - converters to convert data to a specific output format at a - certain point in a stream. - - - - - - Using capabilities for metadata - - A pad can have a set (i.e. one or more) of capabilities attached - to it. Capabilities (GstCaps) are represented - as an array of one or more GstStructures, and - each GstStructure is an array of fields where - each field consists of a field name string (e.g. "width") and a - typed value (e.g. G_TYPE_INT or - GST_TYPE_INT_RANGE). - - - Note that there is a distinct difference between the - possible capabilities of a pad (ie. usually what - you find as caps of pad templates as they are shown in gst-inspect), - the allowed caps of a pad (can be the same as - the pad's template caps or a subset of them, depending on the possible - caps of the peer pad) and lastly negotiated caps - (these describe the exact format of a stream or buffer and contain - exactly one structure and have no variable bits like ranges or lists, - ie. they are fixed caps). - - - You can get values of properties in a set of capabilities - by querying individual properties of one structure. You can get - a structure from a caps using - gst_caps_get_structure () and the number of - structures in a GstCaps using - gst_caps_get_size (). - - - Caps are called simple caps when they contain - only one structure, and fixed caps when they - contain only one structure and have no variable field types (like - ranges or lists of possible values). Two other special types of caps - are ANY caps and empty caps. - - - Here is an example of how to extract the width and height from - a set of fixed video caps: - -static void -read_video_props (GstCaps *caps) -{ - gint width, height; - const GstStructure *str; - - g_return_if_fail (gst_caps_is_fixed (caps)); - - str = gst_caps_get_structure (caps, 0); - if (!gst_structure_get_int (str, "width", &width) || - !gst_structure_get_int (str, "height", &height)) { - g_print ("No width/height available\n"); - return; - } - - g_print ("The video size of this set of capabilities is %dx%d\n", - width, height); -} - - - - - - Creating capabilities for filtering - - While capabilities are mainly used inside a plugin to describe the - media type of the pads, the application programmer often also has - to have basic understanding of capabilities in order to interface - with the plugins, especially when using filtered caps. When you're - using filtered caps or fixation, you're limiting the allowed types of - media that can stream between two pads to a subset of their supported - media types. You do this using a capsfilter - element in your pipeline. In order to do this, you also need to - create your own GstCaps. The easiest way to - do this is by using the convenience function - gst_caps_new_simple (): - - - -static gboolean -link_elements_with_filter (GstElement *element1, GstElement *element2) -{ - gboolean link_ok; - GstCaps *caps; - - caps = gst_caps_new_simple ("video/x-raw", - "format", G_TYPE_STRING, "I420", - "width", G_TYPE_INT, 384, - "height", G_TYPE_INT, 288, - "framerate", GST_TYPE_FRACTION, 25, 1, - NULL); - - link_ok = gst_element_link_filtered (element1, element2, caps); - gst_caps_unref (caps); - - if (!link_ok) { - g_warning ("Failed to link element1 and element2!"); - } - - return link_ok; -} - - This will force the data flow between those two elements to - a certain video format, width, height and framerate (or the linking - will fail if that cannot be achieved in the context of the elements - involved). Keep in mind that when you use - gst_element_link_filtered () it will automatically create - a capsfilter element for you and insert it into - your bin or pipeline between the two elements you want to connect (this - is important if you ever want to disconnect those elements because then - you will have to disconnect both elements from the capsfilter instead). - - - In some cases, you will want to create a more elaborate set of - capabilities to filter a link between two pads. Then, this function - is too simplistic and you'll want to use the method - gst_caps_new_full (): - - -static gboolean -link_elements_with_filter (GstElement *element1, GstElement *element2) -{ - gboolean link_ok; - GstCaps *caps; - - caps = gst_caps_new_full ( - gst_structure_new ("video/x-raw", - "width", G_TYPE_INT, 384, - "height", G_TYPE_INT, 288, - "framerate", GST_TYPE_FRACTION, 25, 1, - NULL), - gst_structure_new ("video/x-bayer", - "width", G_TYPE_INT, 384, - "height", G_TYPE_INT, 288, - "framerate", GST_TYPE_FRACTION, 25, 1, - NULL), - NULL); - - link_ok = gst_element_link_filtered (element1, element2, caps); - gst_caps_unref (caps); - - if (!link_ok) { - g_warning ("Failed to link element1 and element2!"); - } - - return link_ok; -} - - - See the API references for the full API of - GstStructure - and GstCaps. - - - - - - Ghost pads - - You can see from how a bin - has no pads of its own. This is where "ghost pads" come into play. - -
- Visualisation of a <ulink type="http" - url="&URLAPI;GstBin.html"><classname>GstBin</classname></ulink> - element without ghost pads - - - - - -
- - A ghost pad is a pad from some element in the bin that can be - accessed directly from the bin as well. Compare it to a symbolic - link in UNIX filesystems. Using ghost pads on bins, the bin also - has a pad and can transparently be used as an element in other - parts of your code. - - -
- Visualisation of a <ulink type="http" - url="&URLAPI;GstBin.html"><classname>GstBin</classname></ulink> - element with a ghost pad - - - - - -
- - is a representation of a - ghost pad. The sink pad of element one is now also a pad of the bin. - Because ghost pads look and work like any other pads, they can be added - to any type of elements, not just to a GstBin, - just like ordinary pads. - - - A ghostpad is created using the function - gst_ghost_pad_new (): - - -#include <gst/gst.h> - -int -main (int argc, - char *argv[]) -{ - GstElement *bin, *sink; - GstPad *pad; - - /* init */ - gst_init (&argc, &argv); - - /* create element, add to bin */ - sink = gst_element_factory_make ("fakesink", "sink"); - bin = gst_bin_new ("mybin"); - gst_bin_add (GST_BIN (bin), sink); - - /* add ghostpad */ - pad = gst_element_get_static_pad (sink, "sink"); - gst_element_add_pad (bin, gst_ghost_pad_new ("sink", pad)); - gst_object_unref (GST_OBJECT (pad)); - -[..] - -} - - - In the above example, the bin now also has a pad: the pad called - sink of the given element. The bin can, from here - on, be used as a substitute for the sink element. You could, for - example, link another element to the bin. - -
-
diff --git a/docs/manual/basics-plugins.xml b/docs/manual/basics-plugins.xml deleted file mode 100644 index 0d263e1ee8..0000000000 --- a/docs/manual/basics-plugins.xml +++ /dev/null @@ -1,85 +0,0 @@ - - Plugins - - - A plugin is a shared library that contains at least one of the following - items: - - - - - - one or more element factories - - - - - one or more type definitions - - - - - one or more auto-pluggers - - - - - exported symbols for use in other plugins - - - - - - All plugins should implement one function, plugin_init, - that creates all the element factories and registers all the type - definitions contained in the plugin. - Without this function, a plugin cannot be registered. - - - The plugins are maintained in the plugin system. Optionally, the - type definitions and the element factories can be saved into an XML - representation so that the plugin system does not have to load all - available plugins in order to know their definition. - - - - The basic plugin structure has the following fields: - - -typedef struct _GstPlugin GstPlugin; - -struct _GstPlugin { - gchar *name; /* name of the plugin */ - gchar *longname; /* long name of plugin */ - gchar *filename; /* filename it came from */ - - GList *types; /* list of types provided */ - gint numtypes; - GList *elements; /* list of elements provided */ - gint numelements; - GList *autopluggers; /* list of autopluggers provided */ - gint numautopluggers; - - gboolean loaded; /* if the plugin is in memory */ -}; - - - - You can query a GList of available plugins with the - function gst_registry_pool_plugin_list as this example - shows: - - - GList *plugins; - - plugins = gst_registry_pool_plugin_list (); - - while (plugins) { - GstPlugin *plugin = (GstPlugin *)plugins->data; - - g_print ("plugin: %s\n", gst_plugin_get_name (plugin)); - - plugins = g_list_next (plugins); - } - - diff --git a/docs/manual/bin-element-ghost.png b/docs/manual/bin-element-ghost.png deleted file mode 100644 index 5ca55dcf44..0000000000 Binary files a/docs/manual/bin-element-ghost.png and /dev/null differ diff --git a/docs/manual/bin-element-noghost.png b/docs/manual/bin-element-noghost.png deleted file mode 100644 index 61dcb65e5e..0000000000 Binary files a/docs/manual/bin-element-noghost.png and /dev/null differ diff --git a/docs/manual/bin-element.png b/docs/manual/bin-element.png deleted file mode 100644 index 7c7f80d4eb..0000000000 Binary files a/docs/manual/bin-element.png and /dev/null differ diff --git a/docs/manual/clocks.png b/docs/manual/clocks.png deleted file mode 100644 index a96de4e229..0000000000 Binary files a/docs/manual/clocks.png and /dev/null differ diff --git a/docs/manual/communication.png b/docs/manual/communication.png deleted file mode 100644 index 2b11ed5bdd..0000000000 Binary files a/docs/manual/communication.png and /dev/null differ diff --git a/docs/manual/diagrams-clocks.svg b/docs/manual/diagrams-clocks.svg deleted file mode 100644 index 8299c8187e..0000000000 --- a/docs/manual/diagrams-clocks.svg +++ /dev/null @@ -1,1565 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - image/svg+xml - - - - - - - - Clock time - Running time - - - Stream time - - base time - 10 - - - - - - - - - - - - - - - 20 - - 30 - 40 - 50 - 60 - 70 - 80 - 90 - 100 - 110 - 120 - 130 - 140 - 10 - - - - - - - - - - - - - - - 20 - 30 - 40 - 50 - 60 - 70 - 80 - 90 - 100 - 110 - 120 - 130 - 140 - 150 - 160 - 170 - 180 - 190 - 200 - 210 - 220 - - - - - - - - 10 - - - - - - - - - - - - - - - 20 - 30 - 40 - 50 - 60 - 70 - 80 - 90 - 100 - 60 - 70 - 80 - 90 - - - - - - - - - replay - - - - - - - - - - - - - - - 100 ms stream - - - - diff --git a/docs/manual/diagrams-general.svg b/docs/manual/diagrams-general.svg deleted file mode 100644 index 98c427eb98..0000000000 --- a/docs/manual/diagrams-general.svg +++ /dev/null @@ -1,14982 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - image/svg+xml - - gstreamer diagrams - - - Luc Pionchon - - - June 2008 - This diagram uses icons from the Tango Desktop Project. http://tango.freedesktop.org - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - <Element name> - - - sink - - - - src - - - <Element name> - - - sink - - - <Element name> - - - sink - - - - - - src_01 - - - - - - src_02 - - - - - - src_03 - - - - - - src_01 - - - - - - src_02 - - - file-source - - src - - ogg-demuxer - - sink - - src_01 - - vorbis-decoder - - sink - - src - - converter - - sink - - src - - audio-output - - sink - - pipeline - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - MENU - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - R - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - file-source - - - - - - - - - - - - - - - - - - - - - - - - - - src - - - - - file-source - - src - - ogg-demuxer - - sink - - src_01 - - vorbis-decoder - - sink - - src - - audio-sink - - sink - - pipeline - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - src_02 - - - theora-decoder - - sink - - src - - video-sink - - sink - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Gstreamer pipeline for a basic ogg player - - - - theora-decoder - - sink - - src - - - - - - - - - - - - - Gstreamer "hello world" pipeline diagram - - - - file-source - - - - - - - - - - - - - - - - - Multimedia applications - media player - VoIP & video conferencing - streamingserver - video editor - pipeline architecture - gstreamer tools - 3rd party plugins - - - - - (...) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - gst-inspectgst-launchgst-editor... - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - gstreamer core framework - - file:- http:- rtsp:- ... - - alsa- v4l2- ... - protocols - sources - formats - codecs - filters - - - - - - - - - - - - - - - - - - - media agnosticbase classesmessage busmedia type negotiationplugin systemdata transportsynchronization - - gstreamer plugins - gstreamer includes over 250 plugins - - sinks - - avi- mp4- ogg- ... - - mp3- mpeg4- vorbis- ... - - converters- mixers- effects- ... - - alsa- xvideo- ... - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - multimedia applications - media player - VoIP & video conferencing - streamingserver - video editor - pipeline architecture - gstreamer tools - 3rd party plugins - - - - - (...) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - gst-inspectgst-launchgst-editor - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - gstreamer core framework - - file:- http:- rtsp:- ... - - alsa- v4l2- tcp/udp- ... - protocols - sources - formats - codecs - filters - media agnosticbase classesmessage busmedia type negotiationplugin systemdata transportsynchronization - - gstreamer plugins - gstreamer includes over 250 plugins - - sinks - - avi- mp4- ogg- ... - - mp3- mpeg4- vorbis- ... - - converters- mixers- effects- ... - - alsa- xvideo- tcp/udp- ... - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - file-source - - src - - ogg-demuxer - - sink - - src_01 - - vorbis-decoder - - sink - - src - sink - - - audio-output - - sink - - - pipeline - - - - bus - - application - buffers - - messages - - queries - - events - - - - - diff --git a/docs/manual/diagrams-pipelines.svg b/docs/manual/diagrams-pipelines.svg deleted file mode 100644 index 30696ac874..0000000000 --- a/docs/manual/diagrams-pipelines.svg +++ /dev/null @@ -1,12768 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - image/svg+xml - - gstreamer diagrams - - - Luc Pionchon - - - June 2008 - This diagram uses icons from the Tango Desktop Project. http://tango.freedesktop.org - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - MENU - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - R - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - file-source - - src - - ogg-demuxer - - sink - - src_01 - - vorbis-decoder - - sink - - src - - audio-sink - - sink - - pipeline - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - src_02 - - - theora-decoder - - sink - - src - - video-sink - - sink - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Gstreamer pipeline for a basic ogg player - - - - - - file-source - - src - - ogg-demuxer - - sink - - src_01 - - vorbis-decoder - - sink - - src - - converter - - sink - - src - - audio-output - - sink - - pipeline - - - - - - - - - - - - - - - - - - - - - source element - - src - - Chapter 5. Elements - - - filter - - sink - - src - - - - demuxer - - sink - - video - - audio - - - - sink element - - sink - - - - - source - - src - - - - filter - - sink - - src - - - - sink - - sink - - - - - Chapter 6. Bins - - - - - Element 1 - - src - - - - Element 2 - - sink - - src - - - - Element 3 - - sink - - - - Bin - - Chapter 8. Ghost pads - - - - - sink - - - Element 2 - - sink - - Bin - - - Element 1 - - sink - - src - - - - - - - Element 2 - - sink - - Bin - - - Element 1 - - sink - - src - - - Chapter 10. Hello world - Chapter 16. Threads - Chapter 17. Media types - - - - file-source - - src - - ogg-demuxer - - sink - - src_01 - - vorbis-decoder - - sink - - src - - converter - - sink - - src - - audio-output - - sink - - pipeline - - - - - (any) - application/ogg - audio/x-vorbis - audio/x-vorbis - audio/x-raw,format=F32LE - audio/x-raw,format=F32LE - audio/x-raw,format=S16LE - audio/x-raw,format=S16LE - - - - - - - - - - - - - - - - source - - src - - demuxer - - sink - - src_01 - - queue - - sink - - src - - - - - src_02 - - - queue - - sink - - src - - thread 1 - - - audio decoder - - sink - - src - - audio sink - - sink - - - - video decoder - - sink - - src - - video sink - - sink - - thread 3 - thread 2 - - - - - - - - - parser - - sink - - src - - - - decoder - - sink - - src - - - - audio sink - - sink - - - - - - networkedsource - - src - - - - - queue2 - - sink - - src - - - thread 1 - thread 2 - - - - - - - diff --git a/docs/manual/filter-element-multi.png b/docs/manual/filter-element-multi.png deleted file mode 100644 index dfbbcb1e81..0000000000 Binary files a/docs/manual/filter-element-multi.png and /dev/null differ diff --git a/docs/manual/filter-element.png b/docs/manual/filter-element.png deleted file mode 100644 index 7e2ea32605..0000000000 Binary files a/docs/manual/filter-element.png and /dev/null differ diff --git a/docs/manual/gstreamer-overview.png b/docs/manual/gstreamer-overview.png deleted file mode 100644 index bb2df9b276..0000000000 Binary files a/docs/manual/gstreamer-overview.png and /dev/null differ diff --git a/docs/manual/hello-world.png b/docs/manual/hello-world.png deleted file mode 100644 index 3cf8744324..0000000000 Binary files a/docs/manual/hello-world.png and /dev/null differ diff --git a/docs/manual/highlevel-playback.xml b/docs/manual/highlevel-playback.xml deleted file mode 100644 index 27d2d1ff9d..0000000000 --- a/docs/manual/highlevel-playback.xml +++ /dev/null @@ -1,617 +0,0 @@ - - Playback Components - - - &GStreamer; includes several higher-level components to simplify an - application developer's life. All of the components discussed here (for now) are - targetted at media playback. The idea of each of these components is - to integrate as closely as possible with a &GStreamer; pipeline, but - to hide the complexity of media type detection and several other - rather complex topics that have been discussed in . - - - - We currently recommend people to use either playbin (see ) or decodebin (see ), depending on their needs. - Playbin is the recommended solution for everything related to simple - playback of media that should just work. Decodebin is a more flexible - autoplugger that could be used to add more advanced features, such - as playlist support, crossfading of audio tracks and so on. Its - programming interface is more low-level than that of playbin, though. - - - - Playbin - - - Playbin is an element that can be created using the standard &GStreamer; - API (e.g. gst_element_factory_make ()). The factory - is conveniently called playbin. By being a - GstPipeline (and thus a - GstElement), playbin automatically supports all - of the features of this class, including error handling, tag support, - state handling, getting stream positions, seeking, and so on. - - - - Setting up a playbin pipeline is as simple as creating an instance of - the playbin element, setting a file location using the - uri property on playbin, and then setting the element - to the GST_STATE_PLAYING state (the location has to be a valid - URI, so <protocol>://<location>, e.g. - file:///tmp/my.ogg or http://www.example.org/stream.ogg). Internally, - playbin will set up a pipeline to playback the media location. - - - -#include <gst/gst.h> - -[.. my_bus_callback goes here ..] - -gint -main (gint argc, - gchar *argv[]) -{ - GMainLoop *loop; - GstElement *play; - GstBus *bus; - - /* init GStreamer */ - gst_init (&argc, &argv); - loop = g_main_loop_new (NULL, FALSE); - - /* make sure we have a URI */ - if (argc != 2) { - g_print ("Usage: %s <URI>\n", argv[0]); - return -1; - } - - /* set up */ - play = gst_element_factory_make ("playbin", "play"); - g_object_set (G_OBJECT (play), "uri", argv[1], NULL); - - bus = gst_pipeline_get_bus (GST_PIPELINE (play)); - gst_bus_add_watch (bus, my_bus_callback, loop); - gst_object_unref (bus); - - gst_element_set_state (play, GST_STATE_PLAYING); - - /* now run */ - g_main_loop_run (loop); - - /* also clean up */ - gst_element_set_state (play, GST_STATE_NULL); - gst_object_unref (GST_OBJECT (play)); - - return 0; -} - - - - Playbin has several features that have been discussed previously: - - - - - Settable video and audio output (using the video-sink - and audio-sink properties). - - - - - Mostly controllable and trackable as a - GstElement, including error handling, eos - handling, tag handling, state handling (through the - GstBus), media position handling and - seeking. - - - - - Buffers network-sources, with buffer fullness notifications being - passed through the GstBus. - - - - - Supports visualizations for audio-only media. - - - - - Supports subtitles, both in the media as well as from separate - files. For separate subtitle files, use the suburi - property. - - - - - Supports stream selection and disabling. If your media has - multiple audio or subtitle tracks, you can dynamically choose - which one to play back, or decide to turn it off altogether - (which is especially useful to turn off subtitles). For each - of those, use the current-text and other related - properties. - - - - - For convenience, it is possible to test playbin on - the commandline, using the command gst-launch-1.0 playbin - uri=file:///path/to/file. - - - - - Decodebin - - - Decodebin is the actual autoplugger backend of playbin, which was - discussed in the previous section. Decodebin will, in short, accept - input from a source that is linked to its sinkpad and will try to - detect the media type contained in the stream, and set up decoder - routines for each of those. It will automatically select decoders. - For each decoded stream, it will emit the pad-added - signal, to let the client know about the newly found decoded stream. - For unknown streams (which might be the whole stream), it will emit - the unknown-type signal. The application is then - responsible for reporting the error to the user. - - - -]]> - -[.. my_bus_callback goes here ..] - - -\n", argv[0]); - return -1; - } - - /* setup */ - pipeline = gst_pipeline_new ("pipeline"); - - bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); - gst_bus_add_watch (bus, my_bus_callback, loop); - gst_object_unref (bus); - - src = gst_element_factory_make ("filesrc", "source"); - g_object_set (G_OBJECT (src), "location", argv[1], NULL); - dec = gst_element_factory_make ("decodebin", "decoder"); - g_signal_connect (dec, "pad-added", G_CALLBACK (cb_newpad), NULL); - gst_bin_add_many (GST_BIN (pipeline), src, dec, NULL); - gst_element_link (src, dec); - - /* create audio output */ - audio = gst_bin_new ("audiobin"); - conv = gst_element_factory_make ("audioconvert", "aconv"); - audiopad = gst_element_get_static_pad (conv, "sink"); - sink = gst_element_factory_make ("alsasink", "sink"); - gst_bin_add_many (GST_BIN (audio), conv, sink, NULL); - gst_element_link (conv, sink); - gst_element_add_pad (audio, - gst_ghost_pad_new ("sink", audiopad)); - gst_object_unref (audiopad); - gst_bin_add (GST_BIN (pipeline), audio); - - /* run */ - gst_element_set_state (pipeline, GST_STATE_PLAYING); - g_main_loop_run (loop); - - /* cleanup */ - gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (GST_OBJECT (pipeline)); - - return 0; -} -]]> - - - Decodebin, similar to playbin, supports the following features: - - - - - Can decode an unlimited number of contained streams to decoded - output pads. - - - - - Is handled as a GstElement in all ways, - including tag or error forwarding and state handling. - - - - - Although decodebin is a good autoplugger, there's a whole lot of - things that it does not do and is not intended to do: - - - - - Taking care of input streams with a known media type (e.g. a DVD, - an audio-CD or such). - - - - - Selection of streams (e.g. which audio track to play in case of - multi-language media streams). - - - - - Overlaying subtitles over a decoded video stream. - - - - - Decodebin can be easily tested on the commandline, e.g. by using the - command gst-launch-1.0 filesrc location=file.ogg ! decodebin - ! audioconvert ! audioresample ! autoaudiosink. - - - - - URIDecodebin - - The uridecodebin element is very similar to decodebin, only that it - automatically plugs a source plugin based on the protocol of the URI - given. - - - Uridecodebin will also automatically insert buffering elements when - the uri is a slow network source. The buffering element will post - BUFFERING messages that the application needs to handle as explained - in . - The following properties can be used to configure the buffering method: - - - - - The buffer-size property allows you to configure a maximum size in - bytes for the buffer element. - - - - - The buffer-duration property allows you to configure a maximum size - in time for the buffer element. The time will be estimated based on - the bitrate of the network. - - - - - With the download property you can enable the download buffering - method as described in . - Setting this option to TRUE will only enable download buffering - for selected formats such as quicktime, flash video, avi and - webm. - - - - - You can also enable buffering on the parsed/demuxed data with the - use-buffering property. This is interesting to enable buffering - on slower random access media such as a network file server. - - - - - URIDecodebin can be easily tested on the commandline, e.g. by using the - command gst-launch-1.0 uridecodebin uri=file:///file.ogg ! - ! audioconvert ! audioresample ! autoaudiosink. - - - - - Playsink - - The playsink element is a powerful sink element. It has request pads - for raw decoded audio, video and text and it will configure itself to - play the media streams. It has the following features: - - - - - It exposes GstStreamVolume, GstVideoOverlay, GstNavigation and - GstColorBalance interfaces and automatically plugs software - elements to implement the interfaces when needed. - - - - - It will automatically plug conversion elements. - - - - - Can optionally render visualizations when there is no video input. - - - - - Configurable sink elements. - - - - - Configurable audio/video sync offset to fine-tune synchronization - in badly muxed files. - - - - - Support for taking a snapshot of the last video frame. - - - - - Below is an example of how you can use playsink. We use a uridecodebin - element to decode into raw audio and video streams which we then link - to the playsink request pads. We only link the first audio and video - pads, you could use an input-selector to link all pads. - - - - -]]> - -[.. my_bus_callback goes here ..] - - - - -\n", argv[0]); - return -1; - } - - /* setup */ - pipeline = gst_pipeline_new ("pipeline"); - - bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); - gst_bus_add_watch (bus, my_bus_callback, loop); - gst_object_unref (bus); - - dec = gst_element_factory_make ("uridecodebin", "source"); - g_object_set (G_OBJECT (dec), "uri", argv[1], NULL); - g_signal_connect (dec, "pad-added", G_CALLBACK (cb_pad_added), NULL); - - /* create audio output */ - sink = gst_element_factory_make ("playsink", "sink"); - gst_util_set_object_arg (G_OBJECT (sink), "flags", - "soft-colorbalance+soft-volume+vis+text+audio+video"); - gst_bin_add_many (GST_BIN (pipeline), dec, sink, NULL); - - /* run */ - gst_element_set_state (pipeline, GST_STATE_PLAYING); - g_main_loop_run (loop); - - /* cleanup */ - gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (GST_OBJECT (pipeline)); - - return 0; -} -]]> - - - - This example will show audio and video depending on what you - give it. Try this example on an audio file and you will see that - it shows visualizations. You can change the visualization at runtime by - changing the vis-plugin property. - - - diff --git a/docs/manual/highlevel-xml.xml b/docs/manual/highlevel-xml.xml deleted file mode 100644 index e330616636..0000000000 --- a/docs/manual/highlevel-xml.xml +++ /dev/null @@ -1,17 +0,0 @@ - - XML in <application>GStreamer</application> (deprecated) - - GStreamer used to provide functions to - save pipeline definitions into XML format and later restore them - again from XML. - - - - This never really worked properly for all but the most simple use cases - though, and is also pretty much impossible to make work correctly in a - useful way due to the dynamic nature of almost all non-trivial GStreamer - pipelines. Consequently, this API has been deprecated and will be - removed at some point. Don't use it. - - - diff --git a/docs/manual/images/.gitignore b/docs/manual/images/.gitignore deleted file mode 100644 index 68a2009bf3..0000000000 --- a/docs/manual/images/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*.eps -*.png diff --git a/docs/manual/intro-basics.xml b/docs/manual/intro-basics.xml deleted file mode 100644 index 0c385b1a1c..0000000000 --- a/docs/manual/intro-basics.xml +++ /dev/null @@ -1,159 +0,0 @@ - - Foundations - - This chapter of the guide introduces the basic concepts of &GStreamer;. - Understanding these concepts will be important in reading any of the - rest of this guide, all of them assume understanding of these basic - concepts. - - - - Elements - - An element is the most important class of objects - in &GStreamer;. You will usually create a chain of elements linked - together and let data flow through this chain of elements. An element - has one specific function, which can be the reading of data from a - file, decoding of this data or outputting this data to your sound - card (or anything else). By chaining together several such elements, - you create a pipeline that can do a specific task, - for example media playback or capture. &GStreamer; ships with a large - collection of elements by default, making the development of a large - variety of media applications possible. If needed, you can also write - new elements. That topic is explained in great deal in the &GstPWG;. - - - - - Pads - - Pads are element's input and output, where - you can connect other elements. They are used to negotiate links and - data flow - between elements in &GStreamer;. A pad can be viewed as a - plug or port on an element where - links may be made with other elements, and through which data can - flow to or from those elements. Pads have specific data handling - capabilities: a pad can restrict the type of data that flows - through it. Links are only allowed between two pads when the - allowed data types of the two pads are compatible. Data types are - negotiated between pads using a process called caps - negotiation. Data types are described as a - GstCaps. - - - An analogy may be helpful here. A pad is similar to a plug or jack on a - physical device. Consider, for example, a home theater system consisting - of an amplifier, a DVD player, and a (silent) video projector. Linking - the DVD player to the amplifier is allowed because both devices have audio - jacks, and linking the projector to the DVD player is allowed because - both devices have compatible video jacks. Links between the - projector and the amplifier may not be made because the projector and - amplifier have different types of jacks. Pads in &GStreamer; serve the - same purpose as the jacks in the home theater system. - - - For the most part, all data in &GStreamer; flows one way through a link - between elements. Data flows out of one element through one or more - source pads, and elements accept incoming data - through one or more sink pads. Source and sink - elements have only source and sink pads, respectively. Data usually - means buffers (described by the - GstBuffer object) and events (described - by the - GstEvent object). - - - - - Bins and pipelines - - - A bin is a container for a collection of elements. - Since bins are subclasses of elements - themselves, you can mostly control a bin as if it were an element, - thereby abstracting away a lot of complexity for your application. You - can, for example change state on all elements in a bin by changing the - state of that bin itself. Bins also forward bus messages from their - contained children (such as error messages, tag messages or EOS messages). - - - A pipeline is a top-level bin. It provides a bus for - the application and manages the synchronization for its children. - As you set it to PAUSED or PLAYING state, data flow will start and media - processing will take place. Once started, pipelines will run in a - separate thread until you stop them or the end - of the data stream is reached. - - -
- &GStreamer; pipeline for a simple ogg player - - - - - -
- -
- - - Communication - - - &GStreamer; provides several mechanisms for communication and data exchange - between the application and the pipeline. - - - - - - buffers are objects for passing streaming data - between elements in the pipeline. Buffers always travel from sources - to sinks (downstream). - - - - - events are objects sent between elements or from - the application to elements. Events can travel upstream and downstream. - Downstream events can be synchronised to the data flow. - - - - - messages are objects posted by elements on - the pipeline's message bus, where they will be held for collection - by the application. Messages can be intercepted synchronously from - the streaming thread context of the element posting the message, but - are usually handled asynchronously by the application from the - application's main thread. Messages are used to transmit information - such as errors, tags, state changes, buffering state, redirects etc. - from elements to the application in a thread-safe way. - - - - - queries allow applications to request information - such as duration or current playback position from the pipeline. - Queries are always answered synchronously. Elements can also use - queries to request information from their peer elements (such as the - file size or duration). They can be used both ways within a pipeline, - but upstream queries are more common. - - - - -
- &GStreamer; pipeline with different communication flows - - - - - -
- -
- -
diff --git a/docs/manual/intro-gstreamer.xml b/docs/manual/intro-gstreamer.xml deleted file mode 100644 index 38329e8cd6..0000000000 --- a/docs/manual/intro-gstreamer.xml +++ /dev/null @@ -1,99 +0,0 @@ - - What is &GStreamer;? - - - - - - &GStreamer; is a framework for creating streaming media applications. - The fundamental design comes from the video pipeline at Oregon Graduate - Institute, as well as some ideas from DirectShow. - - - - &GStreamer;'s development framework makes it possible to write any - type of streaming multimedia application. The &GStreamer; framework - is designed to make it easy to write applications that handle audio - or video or both. It isn't restricted to audio and video, and can - process any kind of data flow. - The pipeline design is made to have little overhead above what the - applied filters induce. This makes &GStreamer; a good framework for - designing even high-end audio applications which put high demands on - latency. - - - - One of the most obvious uses of &GStreamer; is using it to build - a media player. &GStreamer; already includes components for building a - media player that can support a very wide variety of formats, including - MP3, Ogg/Vorbis, MPEG-1/2, AVI, Quicktime, mod, and more. &GStreamer;, - however, is much more than just another media player. Its main advantages - are that the pluggable components can be mixed and matched into arbitrary - pipelines so that it's possible to write a full-fledged video or audio - editing application. - - - - The framework is based on plugins that will provide the various codec - and other functionality. The plugins can be linked and arranged in - a pipeline. This pipeline defines the flow of the data. Pipelines can - also be edited with a GUI editor and saved as XML so that pipeline - libraries can be made with a minimum of effort. - - - - The &GStreamer; core function is to provide a framework for plugins, - data flow and media type handling/negotiation. It also provides an - API to write applications using the various plugins. - - - - - Specifically, &GStreamer; provides - - an API for multimedia applications - a plugin architecture - a pipeline architecture - a mechanism for media type handling/negotiation - a mechanism for synchronization - over 250 plug-ins providing more than 1000 elements - a set of tools - - - - - &GStreamer; plug-ins could be classified into - - protocols handling - sources: for audio and video (involves protocol plugins) - formats: parsers, formaters, muxers, demuxers, metadata, subtitles - codecs: coders and decoders - filters: converters, mixers, effects, ... - sinks: for audio and video (involves protocol plugins) - - - -
- Gstreamer overview - - - - - -
- - - &GStreamer; is packaged into - - gstreamer: the core package - gst-plugins-base: an essential exemplary set of elements - gst-plugins-good: a set of good-quality plug-ins under LGPL - gst-plugins-ugly: a set of good-quality plug-ins that might pose distribution problems - gst-plugins-bad: a set of plug-ins that need more quality - gst-libav: a set of plug-ins that wrap libav for decoding and encoding - a few others packages - - - - -
diff --git a/docs/manual/intro-motivation.xml b/docs/manual/intro-motivation.xml deleted file mode 100644 index 55571b6a14..0000000000 --- a/docs/manual/intro-motivation.xml +++ /dev/null @@ -1,300 +0,0 @@ - - Design principles - - - -
- Clean and powerful - - &GStreamer; provides a clean interface to: - - - - - The application programmer who wants to build a media pipeline. - The programmer can use an extensive set of powerful tools to create - media pipelines without writing a single line of code. Performing - complex media manipulations becomes very easy. - - - - - The plugin programmer. Plugin programmers are provided a clean and - simple API to create self-contained plugins. An extensive debugging - and tracing mechanism has been integrated. GStreamer also comes with - an extensive set of real-life plugins that serve as examples too. - - - - -
-
- Object oriented - - &GStreamer; adheres to GObject, the GLib 2.0 object model. A programmer - familiar with GLib 2.0 or GTK+ will be - comfortable with &GStreamer;. - - - &GStreamer; uses the mechanism of signals and object properties. - - - All objects can be queried at runtime for their various properties and - capabilities. - - - &GStreamer; intends to be similar in programming methodology to GTK+. - This applies to the object model, ownership of objects, reference - counting, etc. - -
- -
- Extensible - - All &GStreamer; Objects can be extended using the GObject - inheritance methods. - - - All plugins are loaded dynamically and can be extended and upgraded - independently. - -
- -
- Allow binary-only plugins - - Plugins are shared libraries that are loaded at runtime. Since all - the properties of the plugin can be set using the GObject properties, - there is no need (and in fact no way) to have any header files - installed for the plugins. - - - Special care has been taken to make plugins completely self-contained. - All relevant aspects of plugins can be queried at run-time. - -
- -
- High performance - - High performance is obtained by: - - - - - using GLib's GSlice allocator - - - - - extremely light-weight links between plugins. Data can travel - the pipeline with minimal overhead. Data passing between - plugins only involves a pointer dereference in a typical - pipeline. - - - - - providing a mechanism to directly work on the target memory. - A plugin can for example directly write to the X server's - shared memory space. Buffers can also point to arbitrary - memory, such as a sound card's internal hardware buffer. - - - - - refcounting and copy on write minimize usage of memcpy. - Sub-buffers efficiently split buffers into manageable pieces. - - - - - dedicated streaming threads, with scheduling handled by the kernel. - - - - - allowing hardware acceleration by using specialized plugins. - - - - - using a plugin registry with the specifications of the plugins so - that the plugin loading can be delayed until the plugin is actually - used. - - - -
- -
- Clean core/plugins separation - - The core of &GStreamer; is essentially media-agnostic. It only knows - about bytes and blocks, and only contains basic elements. - The core of &GStreamer; is functional enough to even implement - low-level system tools, like cp. - - - All of the media handling functionality is provided by plugins - external to the core. These tell the core how to handle specific - types of media. - -
- -
- Provide a framework for codec experimentation - - &GStreamer; also wants to be an easy framework where codec - developers can experiment with different algorithms, speeding up the - development of open and free multimedia codecs like those developed - by the Xiph.Org - Foundation (such as Theora and Vorbis). - -
- - -
diff --git a/docs/manual/intro-preface.xml b/docs/manual/intro-preface.xml deleted file mode 100644 index f32339ea4e..0000000000 --- a/docs/manual/intro-preface.xml +++ /dev/null @@ -1,93 +0,0 @@ - - - - Who should read this manual? - - This book is about &GStreamer; from an application developer's point of view; it - describes how to write a &GStreamer; application using the &GStreamer; - libraries and tools. For an explanation about writing plugins, we - suggest the Plugin - Writers Guide. - - - Also check out the other documentation available on the &GStreamer; web site. - - - - - - - Preliminary reading - - In order to understand this manual, you need to have a basic - understanding of the C language. - - - Since &GStreamer; adheres to the GObject programming model, this guide - also assumes that you understand the basics of GObject and glib programming. - - Especially, - - GObject instantiation - GObject properties (set/get) - GObject casting - GObject referecing/dereferencing - glib memory management - glib signals and callbacks - glib main loop - - - - - - - - Structure of this manual - - To help you navigate through this guide, it is divided into several large - parts. Each part addresses a particular broad topic concerning &GStreamer; - appliction development. The parts of this guide are laid out in the following - order: - - - - gives you an overview of &GStreamer;, - it's design principles and foundations. - - - - covers the basics of &GStreamer; - application programming. At the end of this part, you should be - able to build your own audio player using &GStreamer; - - - - In , we will move on to advanced - subjects which make &GStreamer; stand out of its competitors. We - will discuss application-pipeline interaction using dynamic parameters - and interfaces, we will discuss threading and threaded pipelines, - scheduling and clocks (and synchronization). Most of those topics are - not just there to introduce you to their API, but primarily to give - a deeper insight in solving application programming problems with - &GStreamer; and understanding their concepts. - - - - Next, in , we will go into higher-level - programming APIs for &GStreamer;. You don't exactly need to know all - the details from the previous parts to understand this, but you will - need to understand basic &GStreamer; concepts nevertheless. We will, - amongst others, discuss XML, playbin and autopluggers. - - - - Finally in , you will find some random - information on integrating with GNOME, KDE, OS X or Windows, some - debugging help and general tips to improve and simplify &GStreamer; - programming. - - diff --git a/docs/manual/linked-elements.png b/docs/manual/linked-elements.png deleted file mode 100644 index de75adf215..0000000000 Binary files a/docs/manual/linked-elements.png and /dev/null differ diff --git a/docs/manual/manual.xml b/docs/manual/manual.xml deleted file mode 100644 index ac268b5485..0000000000 --- a/docs/manual/manual.xml +++ /dev/null @@ -1,269 +0,0 @@ - - -%image-entities; - -%version-entities; - -%url-entities; - - - - The code for this example is automatically extracted from - the documentation and built under tests/examples/manual - in the GStreamer tarball. - - -"> - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -GStreamer"> -GStreamer Plugin Writer's Guide"> -]> - - - &TITLEPAGE; - - - -Foreword - - &GStreamer; is an extremely powerful and versatile framework for - creating streaming media applications. Many of the virtues of the - &GStreamer; framework come from its modularity: &GStreamer; can - seamlessly incorporate new plugin modules. But because modularity - and power often come at a cost of greater complexity, writing new - applications is not always easy. - - - This guide is intended to help you understand the &GStreamer; - framework (version &GST_VERSION;) so you can develop applications - based on it. The first chapters will focus on development of a - simple audio player, with much effort going into helping you - understand &GStreamer; concepts. Later chapters will go into - more advanced topics related to media playback, but also at - other forms of media processing (capture, editing, etc.). - - - - -Introduction - - &INTRO; - - - - - - - About GStreamer - - - This part gives you an overview of the technologies described in - this book. - - - - &GSTREAMER; - &MOTIVATION; - &CONCEPTS; - - - - - - - Building an Application - - - In these chapters, we will discuss the basic concepts of &GStreamer; - and the most-used objects, such as elements, pads and buffers. We - will use a visual representation of these objects so that we can - visualize the more complex pipelines you will learn to build later - on. You will get a first glance at the &GStreamer; API, which should - be enough for building elementary applications. Later on in this - part, you will also learn to build a basic command-line application. - - - Note that this part will give a look into the low-level API and - concepts of &GStreamer;. Once you're going to build applications, - you might want to use higher-level APIs. Those will be discussed - later on in this manual. - - - - &INIT; - &ELEMENTS; - &BINS; - &BUS; - &PADS; - &DATA; - &HELLOWORLD; - - - - - - - Advanced &GStreamer; concepts - - - In this part we will cover the more advanced features of &GStreamer;. - With the basics you learned in the previous part you should be - able to create a simple application. However, - &GStreamer; provides much more candy than just the basics of playing - back audio files. In this chapter, you will learn more of the - low-level features and internals of &GStreamer;. - - - Some parts of this part will serve mostly as an explanation of - how &GStreamer; works internally; they are not actually needed for - actual application development. This includes chapters such as the - ones covering scheduling, autoplugging and synchronization. Other - chapters, however, discuss more advanced ways of - pipeline-application interaction, and can turn out to be very useful - for certain applications. This includes the chapters on metadata, - querying and events, interfaces, dynamic parameters and pipeline - data manipulation. - - - - &QUERYEVENTS; - &METADATA; - &INTERFACES; - &CLOCKS; - &BUFFERING; - &DPARAMS; - &THREADS; - &AUTOPLUGGING; - &DATAACCESS; - - - - - - - Higher-level interfaces for &GStreamer; applications - - - In the previous two parts, you have learned many of the internals - and their corresponding low-level interfaces into &GStreamer; - application programming. Many people will, however, not need so - much control (and as much code), but will prefer to use a standard - playback interface that does most of the difficult internals for - them. In this chapter, we will introduce you into the concept of - autopluggers, playback managing elements and other such things. - Those higher-level interfaces are intended to - simplify &GStreamer;-based application programming. They do, however, - also reduce the flexibility. It is up to the application developer - to choose which interface he will want to use. - - - - &PLAYBACK; - - - - - - - Appendices - - - By now, you've learned all about the internals of &GStreamer; and - application programming using the &GStreamer; framework. This part - will go into some random bits that are useful to know if you're - going to use &GStreamer; for serious application programming. It - will touch upon things related to integration with popular desktop - environments that we run on (GNOME, KDE, OS X, Windows), it will - shortly explain how applications included with &GStreamer; can help - making your life easier, and some information on debugging. - - - In addition, we also provide a porting guide which will explain - easily how to port &GStreamer;-0.10 applications to &GStreamer;-1.0. - - - - - - &PROGRAMS; - &COMPILING; - &CHECKLIST; - &PORTING; - &INTEGRATION; - &LICENSING; - "ES; - - - diff --git a/docs/manual/mime-world.png b/docs/manual/mime-world.png deleted file mode 100644 index f1e51f2ccc..0000000000 Binary files a/docs/manual/mime-world.png and /dev/null differ diff --git a/docs/manual/outline.txt b/docs/manual/outline.txt deleted file mode 100644 index c05b512f9f..0000000000 --- a/docs/manual/outline.txt +++ /dev/null @@ -1,92 +0,0 @@ -Overview - Introduction - (creating multimedia apps) - (pipeline/plugin based) - - Motivation - (multitude of duplicate code) - (mostly focused on one goal) - (reinvent plugin mechanisms) - (network transparency?) - (catch up with Windows(tm) world) - - Goals - (clean and powerfull) - (building graphs) - (building plugins) - (object oriented) - (using GTK+ object model) - (extensible) - (alow binary only plugins) - (alow high performance) - (HW acceleration) - (efficient memory use) - (kernel buffers etc..) - -Basic concepts - elements - (what is it) - (types) sink, src, filter - (have pads) - linking elements - bin - (can contain elements) - pipeline (a complete graph) - thread (theaded operation) - buffers - (pass between elements) - (contains data) - (can cary metadata) - (use refcounting) - element states - (null) - (ready) - (paused) - (playing) - -Building apps - helloworld - (fdsrc->mp3decoder->audiosink) - (step by step explanation) - More on factories - problems with helloworld - MIME types - GStreamer types - Basic types - Your second application - - - -advanced concepts - threads - queues - cothreads - dynamic pipeline construction - ghost pads - type detection - utility functions - -XML in GStreamer - (saving) - (loading a pipeline) - -Plugin development - plugin types - chain based - loop based - buffers - metadata - subbuffers - adding pads - libraries - plugin registry - types - type detection - QoS messages - clocks - -GStreamer programs - editor - gstplay - - diff --git a/docs/manual/simple-player.png b/docs/manual/simple-player.png deleted file mode 100644 index b3b3ca1c50..0000000000 Binary files a/docs/manual/simple-player.png and /dev/null differ diff --git a/docs/manual/sink-element.png b/docs/manual/sink-element.png deleted file mode 100644 index 6a4eaa1a37..0000000000 Binary files a/docs/manual/sink-element.png and /dev/null differ diff --git a/docs/manual/src-element.png b/docs/manual/src-element.png deleted file mode 100644 index 27699e05b4..0000000000 Binary files a/docs/manual/src-element.png and /dev/null differ diff --git a/docs/manual/state-diagram.svg b/docs/manual/state-diagram.svg deleted file mode 100644 index 1be6d42a15..0000000000 --- a/docs/manual/state-diagram.svg +++ /dev/null @@ -1,233 +0,0 @@ - - - - - - - - - - image/svg+xml - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - NULL - - READY - - PLAYING - - PAUSED - - - diff --git a/docs/manual/thread-buffering.png b/docs/manual/thread-buffering.png deleted file mode 100644 index 006c602f81..0000000000 Binary files a/docs/manual/thread-buffering.png and /dev/null differ diff --git a/docs/manual/thread-synchronizing.png b/docs/manual/thread-synchronizing.png deleted file mode 100644 index 99c0a7bf3d..0000000000 Binary files a/docs/manual/thread-synchronizing.png and /dev/null differ diff --git a/docs/manual/titlepage.xml b/docs/manual/titlepage.xml deleted file mode 100644 index 3883384a4e..0000000000 --- a/docs/manual/titlepage.xml +++ /dev/null @@ -1,69 +0,0 @@ - - - - - Wim - Taymans - - - wim.taymans@chello.be - - - - - - Steve - Baker - - - stevebaker_org@yahoo.co.uk - - - - - - Andy - Wingo - - - wingo@pobox.com - - - - - - Ronald - S. - Bultje - - - rbultje@ronald.bitfreak.net - - - - - - Stefan - Kost - - - ensonic@users.sf.net - - - - - - - - This material may be distributed only subject to the terms and - conditions set forth in the Open Publication License, v1.0 or later (the - latest version is presently available at http://www.opencontent.org/opl.shtml). - - - - &GStreamer; Application Development Manual (&GST_VERSION;) - - - diff --git a/docs/pwg/.gitignore b/docs/pwg/.gitignore deleted file mode 100644 index db6eb2ffb1..0000000000 --- a/docs/pwg/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -Makefile -Makefile.in -.deps -build -html -*.pdf -*.ps diff --git a/docs/pwg/Makefile.am b/docs/pwg/Makefile.am deleted file mode 100644 index 0228a313b0..0000000000 --- a/docs/pwg/Makefile.am +++ /dev/null @@ -1,38 +0,0 @@ -### this is the part you can customize if you need to - -# base name of doc -DOC = pwg -# formats defined for upload-doc.mak -FORMATS=html ps pdf - -# main xml file -MAIN = $(DOC).xml -# all xml sources -XML = $(notdir $(wildcard $(srcdir)/*.xml)) -# base style sheet -CSS = base.css - -# image sources -PNG_SRC = -FIG_SRC = $(notdir $(wildcard $(srcdir)/*.fig)) - -# extra sources to copy in build directory -EXTRA_SRC = - -### this is the generic bit and you shouldn't need to change this - -# get the generic docbuilding Makefile stuff -include $(srcdir)/../manuals.mak -# get the generic upload target -include $(top_srcdir)/common/upload-doc.mak - -### this is standard automake stuff - -# package up all the source -EXTRA_DIST = $(SRC) - -# install documentation -pwgdir = $(docdir)/$(DOC) -pwg_DATA = $(PDF_DAT) $(PS_DAT) - -include $(srcdir)/../htmlinstall.mak diff --git a/docs/pwg/advanced-allocation.xml b/docs/pwg/advanced-allocation.xml deleted file mode 100644 index e7346bd36f..0000000000 --- a/docs/pwg/advanced-allocation.xml +++ /dev/null @@ -1,817 +0,0 @@ - - Memory allocation - - Memory allocation and management is a very important topic in - multimedia. High definition video uses many megabytes to store - one single frame of video. It is important to reuse the memory - when possible instead of constantly allocating and freeing - the memory. - - - Multimedia systems usually use special purpose chips, such as - DSPs or GPUs to perform the heavy lifting (especially for video). - These special purpose chips have usually strict requirements - for the memory that they can operate on and how the memory - is accessed. - - - This chapter talks about the memory management features that - &GStreamer; plugins can use. We will first talk about the - lowlevel GstMemory object that manages - access to a piece of memory. We then continue with - GstBuffer that is used to exchange data - between plugins (and the application) and that uses - GstMemory. We talk about - GstMeta that can be placed on buffers to - give extra info about the buffer and its memory. - For efficiently managing buffers of the same size, we take a - look at GstBufferPool. To conclude this - chapter we take a look at the GST_QUERY_ALLOCATION query that - is used to negotiate memory management options between elements. - - - - GstMemory - - GstMemory is an object that manages a region - of memory. The memory object points to a region of memory of - maxsize. The area in this memory starting at - offset and for size bytes is the - accessible region in the memory. the maxsize of the memory can - never be changed after the object is created, however, the offset - and size can be changed. - - - GstAllocator - - GstMemory objects are created by a - GstAllocator object. Most allocators implement the - default gst_allocator_alloc() method but some allocator - might implement a different method, for example when additional parameters - are needed to allocate the specific memory. - - - Different allocators exist for, for example, system memory, shared memory - and memory backed by a DMAbuf file descriptor. To implement support for a - new kind of memory type, you must implement a new allocator object as shown - below. - - - - GstMemory API example - - Data access to the memory wrapped by the GstMemory - object is always protected with a gst_memory_map() - and gst_memory_unmap() pair. An access mode - (read/write) must be given when mapping memory. The map - function returns a pointer to the valid memory region that can - then be accessed according to the requested access mode. - - - Below is an example of making a GstMemory - object and using the gst_memory_map() to - access the memory region. - - - - - - - - Implementing a GstAllocator - - WRITEME - - - - - - - GstBuffer - - A GstBuffer is an lightweight object that - is passed from an upstream to a downstream element and contains - memory and metadata. It represents the multimedia content that - is pushed or pull downstream by elements. - - - The buffer contains one or more GstMemory - objects that represent the data in the buffer. - - - Metadata in the buffer consists of: - - - - - DTS and PTS timestamps. These represent the decoding and - presentation timestamps of the buffer content and is used by - synchronizing elements to schedule buffers. Both these timestamps - can be GST_CLOCK_TIME_NONE when unknown/undefined. - - - - - The duration of the buffer contents. This duration can be - GST_CLOCK_TIME_NONE when unknown/undefined. - - - - - Media specific offsets and offset_end. For video this is the - frame number in the stream and for audio the sample number. Other - definitions for other media exist. - - - - - Arbitrary structures via GstMeta, see below. - - - - - - GstBuffer writability - - A buffer is writable when the refcount of the object is exactly 1, meaning - that only one object is holding a ref to the buffer. You can only - modify anything in the buffer when the buffer is writable. This means - that you need to call gst_buffer_make_writable() - before changing the timestamps, offsets, metadata or adding and - removing memory blocks. - - - - GstBuffer API examples - - You can create a buffer with gst_buffer_new () - and then add memory objects to it or you can use a convenience function - gst_buffer_new_allocate () which combines the - two. It's also possible to wrap existing memory with - gst_buffer_new_wrapped_full () where you can - give the function to call when the memory should be freed. - - - You can access the memory of the buffer by getting and mapping the - GstMemory objects individually or by using - gst_buffer_map (). The latter merges all the - memory into one big block and then gives you a pointer to this block. - - - Below is an example of how to create a buffer and access its memory. - - - - - - - - - GstMeta - - With the GstMeta system you can add arbitrary - structures on buffers. These structures describe extra properties - of the buffer such as cropping, stride, region of interest etc. - - - The metadata system separates API specification (what the metadata - and its API look like) and the implementation (how it works). This makes - it possible to make different implementations of the same API, - for example, depending on the hardware you are running on. - - - - GstMeta API example - - After allocating a new buffer, you can add metadata to the buffer - with the metadata specific API. This means that you will need to - link to the header file where the metadata is defined to use - its API. - - - By convention, a metadata API with name FooBar - should provide two methods, a - gst_buffer_add_foo_bar_meta () and a - gst_buffer_get_foo_bar_meta (). Both functions - should return a pointer to a FooBarMeta - structure that contains the metadata fields. Some of the - _add_*_meta () can have extra parameters that - will usually be used to configure the metadata structure for you. - - - Let's have a look at the metadata that is used to specify a cropping - region for video frames. - - - - -[...] - GstVideoCropMeta *meta; - - /* buffer points to a video frame, add some cropping metadata */ - meta = gst_buffer_add_video_crop_meta (buffer); - - /* configure the cropping metadata */ - meta->x = 8; - meta->y = 8; - meta->width = 120; - meta->height = 80; -[...] -]]> - - - An element can then use the metadata on the buffer when rendering - the frame like this: - - - - -[...] - GstVideoCropMeta *meta; - - /* buffer points to a video frame, get the cropping metadata */ - meta = gst_buffer_get_video_crop_meta (buffer); - - if (meta) { - /* render frame with cropping */ - _render_frame_cropped (buffer, meta->x, meta->y, meta->width, meta->height); - } else { - /* render frame */ - _render_frame (buffer); - } -[...] - -]]> - - - - - Implementing new GstMeta - - In the next sections we show how you can add new metadata to the - system and use it on buffers. - - - - Define the metadata API - - First we need to define what our API will look like and we - will have to register this API to the system. This is important - because this API definition will be used when elements negotiate - what kind of metadata they will exchange. The API definition - also contains arbitrary tags that give hints about what the - metadata contains. This is important when we see how metadata - is preserved when buffers pass through the pipeline. - - - If you are making a new implementation of an existing API, - you can skip this step and move on to the implementation step. - - - First we start with making the - my-example-meta.h header file that will contain - the definition of the API and structure for our metadata. - - - - -typedef struct _MyExampleMeta MyExampleMeta; - -struct _MyExampleMeta { - GstMeta meta; - - gint age; - gchar *name; -}; - -GType my_example_meta_api_get_type (void); -#define MY_EXAMPLE_META_API_TYPE (my_example_meta_api_get_type()) - -#define gst_buffer_get_my_example_meta(b) \ - ((MyExampleMeta*)gst_buffer_get_meta((b),MY_EXAMPLE_META_API_TYPE)) -]]> - - - The metadata API definition consists of the definition of the - structure that holds a gint and a string. The first field in - the structure must be GstMeta. - - - We also define a my_example_meta_api_get_type () - function that will register out metadata API definition. We - also define a convenience macro - gst_buffer_get_my_example_meta () that simply - finds and returns the metadata with our new API. - - - Next let's have a look at how the - my_example_meta_api_get_type () function is - implemented in the my-example-meta.c file. - - - - - - As you can see, it simply uses the - gst_meta_api_type_register () function to - register a name for the api and some tags. The result is a - new pointer GType that defines the newly registered API. - - - - - Implementing a metadata API - - Next we can make an implementation for a registered metadata - API GType. The implementation detail of a metadata API - are kept in a GstMetaInfo structure - that you will make available to the users of your metadata - API implementation with a my_example_meta_get_info () - function and a convenience MY_EXAMPLE_META_INFO - macro. You will also make a method to add your metadata - implementation to a GstBuffer. - Your my-example-meta.h header file will - need these additions: - - - - - - Let's have a look at how these functions are - implemented in the my-example-meta.c file. - - -age = 0; - emeta->name = NULL; - - return TRUE; -} - -static gboolean -my_example_meta_transform (GstBuffer * transbuf, GstMeta * meta, - GstBuffer * buffer, GQuark type, gpointer data) -{ - MyExampleMeta *emeta = (MyExampleMeta *) meta; - - /* we always copy no matter what transform */ - gst_buffer_add_my_example_meta (transbuf, emeta->age, emeta->name); - - return TRUE; -} - -static void -my_example_meta_free (GstMeta * meta, GstBuffer * buffer) -{ - MyExampleMeta *emeta = (MyExampleMeta *) meta; - - g_free (emeta->name); - emeta->name = NULL; -} - -const GstMetaInfo * -my_example_meta_get_info (void) -{ - static const GstMetaInfo *meta_info = NULL; - - if (g_once_init_enter (&meta_info)) { - const GstMetaInfo *mi = gst_meta_register (MY_EXAMPLE_META_API_TYPE, - "MyExampleMeta", - sizeof (MyExampleMeta), - my_example_meta_init, - my_example_meta_free, - my_example_meta_transform); - g_once_init_leave (&meta_info, mi); - } - return meta_info; -} - -MyExampleMeta * -gst_buffer_add_my_example_meta (GstBuffer *buffer, - gint age, - const gchar *name) -{ - MyExampleMeta *meta; - - g_return_val_if_fail (GST_IS_BUFFER (buffer), NULL); - - meta = (MyExampleMeta *) gst_buffer_add_meta (buffer, - MY_EXAMPLE_META_INFO, NULL); - - meta->age = age; - meta->name = g_strdup (name); - - return meta; -} -]]> - - - gst_meta_register () registers the implementation - details, like the API that you implement and the size of the - metadata structure along with methods to initialize and free the - memory area. You can also implement a transform function that will - be called when a certain transformation (identified by the quark and - quark specific data) is performed on a buffer. - - - Lastly, you implement a gst_buffer_add_*_meta() - that adds the metadata implementation to a buffer and sets the - values of the metadata. - - - - - - - - GstBufferPool - - The GstBufferPool object provides a convenient - base class for managing lists of reusable buffers. Essential for this - object is that all the buffers have the same properties such as size, - padding, metadata and alignment. - - - A bufferpool object can be configured to manage a minimum and maximum - amount of buffers of a specific size. A bufferpool can also be - configured to use a specific GstAllocator for - the memory of the buffers. There is support in the bufferpool to enable - bufferpool specific options, such as adding GstMeta - to the buffers in the pool or such as enabling specific padding on - the memory in the buffers. - - - A Bufferpool can be inactivate and active. In the inactive state, - you can configure the pool. In the active state, you can't change - the configuration anymore but you can acquire and release buffers - from/to the pool. - - - In the following sections we take a look at how you can use - a bufferpool. - - - - GstBufferPool API example - - Many different bufferpool implementations can exist; they are all - subclasses of the base class GstBufferPool. - For this example, we will assume we somehow have access to a - bufferpool, either because we created it ourselves or because - we were given one as a result of the ALLOCATION query as we will - see below. - - - The bufferpool is initially in the inactive state so that we can - configure it. Trying to configure a bufferpool that is not in the - inactive state will fail. Likewise, trying to activate a bufferpool - that is not configured will fail. - - - - - - The configuration of the bufferpool is maintained in a generic - GstStructure that can be obtained with - gst_buffer_pool_get_config(). Convenience - methods exist to get and set the configuration options in this - structure. After updating the structure, it is set as the current - configuration in the bufferpool again with - gst_buffer_pool_set_config(). - - - The following options can be configured on a bufferpool: - - - - - The caps of the buffers to allocate. - - - - - The size of the buffers. This is the suggested size of the - buffers in the pool. The pool might decide to allocate larger - buffers to add padding. - - - - - The minimum and maximum amount of buffers in the pool. When - minimum is set to > 0, the bufferpool will pre-allocate this - amount of buffers. When maximum is not 0, the bufferpool - will allocate up to maximum amount of buffers. - - - - - The allocator and parameters to use. Some bufferpools might - ignore the allocator and use its internal one. - - - - - Other arbitrary bufferpool options identified with a string. - a bufferpool lists the supported options with - gst_buffer_pool_get_options() and you - can ask if an option is supported with - gst_buffer_pool_has_option(). The option - can be enabled by adding it to the configuration structure - with gst_buffer_pool_config_add_option (). - These options are used to enable things like letting the - pool set metadata on the buffers or to add extra configuration - options for padding, for example. - - - - - After the configuration is set on the bufferpool, the pool can - be activated with - gst_buffer_pool_set_active (pool, TRUE). From - that point on you can use - gst_buffer_pool_acquire_buffer () to retrieve - a buffer from the pool, like this: - - - - - - It is important to check the return value of the acquire function - because it is possible that it fails: When your - element shuts down, it will deactivate the bufferpool and then - all calls to acquire will return GST_FLOW_FLUSHNG. - - - All buffers that are acquired from the pool will have their pool - member set to the original pool. When the last ref is decremented - on the buffer, &GStreamer; will automatically call - gst_buffer_pool_release_buffer() to release - the buffer back to the pool. You (or any other downstream element) - don't need to know if a buffer came from a pool, you can just - unref it. - - - - - Implementing a new GstBufferPool - - WRITEME - - - - - - - GST_QUERY_ALLOCATION - - The ALLOCATION query is used to negotiate - GstMeta, GstBufferPool - and GstAllocator between elements. Negotiation - of the allocation strategy is always initiated and decided by a srcpad - after it has negotiated a format and before it decides to push buffers. - A sinkpad can suggest an allocation strategy but it is ultimately the - source pad that will decide based on the suggestions of the downstream - sink pad. - - - The source pad will do a GST_QUERY_ALLOCATION with the negotiated caps - as a parameter. This is needed so that the downstream element knows - what media type is being handled. A downstream sink pad can answer the - allocation query with the following results: - - - - - An array of possible GstBufferPool suggestions - with suggested size, minimum and maximum amount of buffers. - - - - - An array of GstAllocator objects along with suggested allocation - parameters such as flags, prefix, alignment and padding. These - allocators can also be configured in a bufferpool when this is - supported by the bufferpool. - - - - - An array of supported GstMeta implementations - along with metadata specific parameters. - It is important that the upstream element knows what kind of - metadata is supported downstream before it places that metadata - on buffers. - - - - - When the GST_QUERY_ALLOCATION returns, the source pad will select - from the available bufferpools, allocators and metadata how it will - allocate buffers. - - - - ALLOCATION query example - - Below is an example of the ALLOCATION query. - - - -#include -#include - - GstCaps *caps; - GstQuery *query; - GstStructure *structure; - GstBufferPool *pool; - GstStructure *config; - guint size, min, max; - -[...] - - /* find a pool for the negotiated caps now */ - query = gst_query_new_allocation (caps, TRUE); - - if (!gst_pad_peer_query (scope->srcpad, query)) { - /* query failed, not a problem, we use the query defaults */ - } - - if (gst_query_get_n_allocation_pools (query) > 0) { - /* we got configuration from our peer, parse them */ - gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max); - } else { - pool = NULL; - size = 0; - min = max = 0; - } - - if (pool == NULL) { - /* we did not get a pool, make one ourselves then */ - pool = gst_video_buffer_pool_new (); - } - - config = gst_buffer_pool_get_config (pool); - gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); - gst_buffer_pool_config_set_params (config, caps, size, min, max); - gst_buffer_pool_set_config (pool, config); - - /* and activate */ - gst_buffer_pool_set_active (pool, TRUE); - -[...] -]]> - - - This particular implementation will make a custom - GstVideoBufferPool object that is specialized - in allocating video buffers. You can also enable the pool to - put GstVideoMeta metadata on the buffers from - the pool doing - gst_buffer_pool_config_add_option (config, - GST_BUFFER_POOL_OPTION_VIDEO_META). - - - - - The ALLOCATION query in base classes - - In many baseclasses you will see the following virtual methods for - influencing the allocation strategy: - - - - - propose_allocation () should suggest - allocation parameters for the upstream element. - - - - - decide_allocation () should decide the - allocation parameters from the suggestions received from - downstream. - - - - - Implementors of these methods should modify the given - GstQuery object by updating the pool options - and allocation options. - - - - diff --git a/docs/pwg/advanced-clock.xml b/docs/pwg/advanced-clock.xml deleted file mode 100644 index a0ffc4f836..0000000000 --- a/docs/pwg/advanced-clock.xml +++ /dev/null @@ -1,173 +0,0 @@ - - Clocking - - - When playing complex media, each sound and video sample must be played in a - specific order at a specific time. For this purpose, GStreamer provides a - synchronization mechanism. - - - - Clocks - - Time in &GStreamer; is defined as the value returned from a particular - GstClock object from the method - gst_clock_get_time (). - - - In a typical computer, there are many sources that can be used as a - time source, e.g., the system time, soundcards, CPU performance - counters, ... For this reason, there are many - GstClock implementations available in &GStreamer;. - The clock time doesn't always start from 0 or from some known value. - Some clocks start counting from some known start date, other clocks start - counting since last reboot, etc... - - - As clocks return an absolute measure of time, they are not usually used - directly. Instead, differences between two clock times are used to - measure elapsed time according to a clock. - - - - - Clock running-time - - A clock returns the absolute-time - according to that clock with gst_clock_get_time (). - From the absolute-time is a running-time - calculated, which is simply the difference between a previous snapshot - of the absolute-time called the base-time. - So: - - - running-time = absolute-time - base-time - - - A &GStreamer; GstPipeline object maintains a - GstClock object and a base-time when it goes - to the PLAYING state. The pipeline gives a handle to the selected - GstClock to each element in the pipeline along - with selected base-time. The pipeline will select a base-time in such - a way that the running-time reflects the total time spent in the - PLAYING state. As a result, when the pipeline is PAUSED, the - running-time stands still. - - - Because all objects in the pipeline have the same clock and base-time, - they can thus all calculate the running-time according to the pipeline - clock. - - - - - Buffer running-time - - To calculate a buffer running-time, we need a buffer timestamp and - the SEGMENT event that preceded the buffer. First we can convert - the SEGMENT event into a GstSegment object - and then we can use the - gst_segment_to_running_time () function to - perform the calculation of the buffer running-time. - - - Synchronization is now a matter of making sure that a buffer with a - certain running-time is played when the clock reaches the same - running-time. Usually this task is done by sink elements. Sink also - have to take into account the latency configured in the pipeline and - add this to the buffer running-time before synchronizing to the - pipeline clock. - - - - - - - Obligations of each element. - - - - Let us clarify the contract between GStreamer and each element in the - pipeline. - - - - Non-live source elements - - Non-live source elements must place a timestamp in each buffer that - they deliver when this is possible. They must choose the timestamps - and the values of the SEGMENT event in such a way that the - running-time of the buffer starts from 0. - - - Some sources, such as filesrc, is not able to generate timestamps - on all buffers. It can and must however create a timestamp on the - first buffer (with a running-time of 0). - - - The source then pushes out the SEGMENT event followed by the - timestamped buffers. - - - - - Live source elements - - Live source elements must place a timestamp in each buffer that - they deliver. They must choose the timestamps and the values of the - SEGMENT event in such a way that the running-time of the buffer - matches exactly the running-time of the pipeline clock when the first - byte in the buffer was captured. - - - - - Parser/Decoder/Encoder elements - - Parser/Decoder elements must use the incoming timestamps and transfer - those to the resulting output buffers. They are allowed to interpolate - or reconstruct timestamps on missing input buffers when they can. - - - - - Demuxer elements - - Demuxer elements can usually set the timestamps stored inside the media - file onto the outgoing buffers. They need to make sure that outgoing - buffers that are to be played at the same time have the same - running-time. Demuxers also need to take into account the incoming - timestamps on buffers and use that to calculate an offset on the outgoing - buffer timestamps. - - - - - Muxer elements - - Muxer elements should use the incoming buffer running-time to mux the - different streams together. They should copy the incoming running-time - to the outgoing buffers. - - - - - Sink elements - - If the element is intended to emit samples at a specific time (real time - playing), the element should require a clock, and thus implement the - method set_clock. - - - The sink should then make sure that the sample with running-time is played - exactly when the pipeline clock reaches that running-time + latency. - Some elements might use the clock API such as - gst_clock_id_wait() - to perform this action. Other sinks might need to use other means of - scheduling timely playback of the data. - - - - - diff --git a/docs/pwg/advanced-dparams.xml b/docs/pwg/advanced-dparams.xml deleted file mode 100644 index 4d78c30e8b..0000000000 --- a/docs/pwg/advanced-dparams.xml +++ /dev/null @@ -1,108 +0,0 @@ - - - - - Supporting Dynamic Parameters - - Warning, this part describes 0.10 and is outdated. - - - Sometimes object properties are not powerful enough to control the - parameters that affect the behaviour of your element. - When this is the case you can mark these parameters as being Controllable. - Aware applications can use the controller subsystem to dynamically adjust - the property values over time. - - - - Getting Started - - - The controller subsystem is contained within the - gstcontroller library. You need to include the header in - your element's source file: - - -... -#include <gst/gst.h> -#include <gst/controller/gstcontroller.h> -... - - - - Even though the gstcontroller library may be linked into - the host application, you should make sure it is initialized in your - plugin_init function: - - - static gboolean - plugin_init (GstPlugin *plugin) - { - ... - /* initialize library */ - gst_controller_init (NULL, NULL); - ... - } - - - It makes no sense for all GObject parameter to be real-time controlled. - Therefore the next step is to mark controllable parameters. - This is done by using the special flag GST_PARAM_CONTROLLABLE. - when setting up GObject params in the _class_init method. - - - g_object_class_install_property (gobject_class, PROP_FREQ, - g_param_spec_double ("freq", "Frequency", "Frequency of test signal", - 0.0, 20000.0, 440.0, - G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS)); - - - - - - The Data Processing Loop - - - In the last section we learned how to mark GObject params as controllable. - Application developers can then queue parameter changes for these parameters. - The approach the controller subsystem takes is to make plugins responsible - for pulling the changes in. This requires just one action: - - - gst_object_sync_values(element,timestamp); - - - This call makes all parameter-changes for the given timestamp active by - adjusting the GObject properties of the element. Its up to the element to - determine the synchronisation rate. - - - - The Data Processing Loop for Video Elements - - For video processing elements it is the best to synchronise for every frame. - That means one would add the gst_object_sync_values() - call described in the previous section to the data processing function of - the element. - - - - - The Data Processing Loop for Audio Elements - - For audio processing elements the case is not as easy as for video - processing elements. The problem here is that audio has a much higher rate. - For PAL video one will e.g. process 25 full frames per second, but for - standard audio it will be 44100 samples. - It is rarely useful to synchronise controllable parameters that often. - The easiest solution is also to have just one synchronisation call per - buffer processing. This makes the control-rate depend on the buffer - size. - - - Elements that need a specific control-rate need to break their data - processing loop to synchronise every n-samples. - - - - diff --git a/docs/pwg/advanced-events.xml b/docs/pwg/advanced-events.xml deleted file mode 100644 index 5471ace1e6..0000000000 --- a/docs/pwg/advanced-events.xml +++ /dev/null @@ -1,442 +0,0 @@ - - Events: Seeking, Navigation and More - - There are many different event types but only two ways they can travel in - the pipeline: downstream or upstream. It is very important to understand - how both of these methods work because if one element in the pipeline is not - handling them correctly the whole event system of the pipeline is broken. - We will try to explain here how these methods work and how elements are - supposed to implement them. - - - Downstream events - - Downstream events are received through the sink pad's event handler, - as set using gst_pad_set_event_function () when - the pad was created. - - - Downstream events can travel in two ways: they can be in-band (serialised - with the buffer flow) or out-of-band (travelling through the pipeline - instantly, possibly not in the same thread as the streaming thread that - is processing the buffers, skipping ahead of buffers being processed - or queued in the pipeline). The most common downstream events - (SEGMENT, CAPS, TAG, EOS) are all serialised with the buffer flow. - - - Here is a typical event function: - - -static gboolean -gst_my_filter_sink_event (GstPad *pad, GstObject * parent, GstEvent * event) -{ - GstMyFilter *filter; - gboolean ret; - - filter = GST_MY_FILTER (parent); - ... - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_SEGMENT: - /* maybe save and/or update the current segment (e.g. for output - * clipping) or convert the event into one in a different format - * (e.g. BYTES to TIME) or drop it and set a flag to send a segment - * event in a different format later */ - ret = gst_pad_push_event (filter->src_pad, event); - break; - case GST_EVENT_EOS: - /* end-of-stream, we should close down all stream leftovers here */ - gst_my_filter_stop_processing (filter); - ret = gst_pad_push_event (filter->src_pad, event); - break; - case GST_EVENT_FLUSH_STOP: - gst_my_filter_clear_temporary_buffers (filter); - ret = gst_pad_push_event (filter->src_pad, event); - break; - default: - ret = gst_pad_event_default (pad, parent, event); - break; - } - - ... - return ret; -} - - - If your element is chain-based, you will almost always have to implement - a sink event function, since that is how you are notified about - segments, caps and the end of the stream. - - - If your element is exclusively loop-based, you may or may not want a - sink event function (since the element is driving the pipeline it will - know the length of the stream in advance or be notified by the flow - return value of gst_pad_pull_range(). In some cases - even loop-based element may receive events from upstream though (for - example audio decoders with an id3demux or apedemux element in front of - them, or demuxers that are being fed input from sources that send - additional information about the stream in custom events, as DVD sources - do). - - - - Upstream events - - Upstream events are generated by an element somewhere downstream in - the pipeline (example: a video sink may generate navigation - events that informs upstream elements about the current position of - the mouse pointer). This may also happen indirectly on request of the - application, for example when the application executes a seek on a - pipeline this seek request will be passed on to a sink element which - will then in turn generate an upstream seek event. - - - The most common upstream events are seek events, Quality-of-Service - (QoS) and reconfigure events. - - - An upstream event can be sent using the - gst_pad_send_event function. This - function simply call the default event handler of that pad. The default - event handler of pads is gst_pad_event_default, and - it basically sends the event to the peer of the internally linked pad. - So upstream events always arrive on the src pad of your element and are - handled by the default event handler except if you override that handler - to handle it yourself. There are some specific cases where you have to - do that : - - - - - If you have multiple sink pads in your element. In that case you will - have to decide which one of the sink pads you will send the event to - (if not all of them). - - - - - If you need to handle that event locally. For example a navigation - event that you will want to convert before sending it upstream, or - a QoS event that you want to handle. - - - - - The processing you will do in that event handler does not really matter - but there are important rules you have to absolutely respect because - one broken element event handler is breaking the whole pipeline event - handling. Here they are : - - - - - Always handle events you won't handle using the default - gst_pad_event_default method. This method will - depending on the event, forward the event or drop it. - - - - - If you are generating some new event based on the one you received - don't forget to gst_event_unref the event you received. - - - - - Event handler function are supposed to return TRUE or FALSE indicating - if the event has been handled or not. Never simply return TRUE/FALSE - in that handler except if you really know that you have handled that - event. - - - - - Remember that the event handler might be called from a different - thread than the streaming thread, so make sure you use - appropriate locking everywhere. - - - - - - - All Events Together - - In this chapter follows a list of all defined events that are currently - being used, plus how they should be used/interpreted. You can check the - what type a certain event is using the GST_EVENT_TYPE macro (or if you - need a string for debugging purposes you can use GST_EVENT_TYPE_NAME). - - - In this chapter, we will discuss the following events: - - - - - - - - - - - - - - - - - For more comprehensive information about events and how they should be - used correctly in various circumstances please consult the GStreamer - design documentation. This section only gives a general overview. - - - - Stream Start - - WRITEME - - - - - Caps - - The CAPS event contains the format description of the following - buffers. See for more - information about negotiation. - - - - - Segment - - A segment event is sent downstream to announce the range of valid - timestamps in the stream and how they should be transformed into - running-time and stream-time. A segment event must always be sent - before the first buffer of data and after a flush (see above). - - - The first segment event is created by the element driving the - pipeline, like a source operating in push-mode or a demuxer/decoder - operating pull-based. This segment event then travels down the - pipeline and may be transformed on the way (a decoder, for example, - might receive a segment event in BYTES format and might transform - this into a segment event in TIMES format based on the average - bitrate). - - - Depending on the element type, the event can simply be forwarded using - gst_pad_event_default (), or it should be parsed - and a modified event should be sent on. The last is true for demuxers, - which generally have a byte-to-time conversion concept. Their input - is usually byte-based, so the incoming event will have an offset in - byte units (GST_FORMAT_BYTES), too. Elements - downstream, however, expect segment events in time units, so that - it can be used to synchronize against the pipeline clock. Therefore, - demuxers and similar elements should not forward the event, but parse - it, free it and send a segment event (in time units, - GST_FORMAT_TIME) further downstream. - - - The segment event is created using the function - gst_event_new_segment (). See the API - reference and design document for details about its parameters. - - - Elements parsing this event can use gst_event_parse_segment() - to extract the event details. Elements may find the GstSegment - API useful to keep track of the current segment (if they want to use - it for output clipping, for example). - - - - - Tag (metadata) - - Tagging events are being sent downstream to indicate the tags as parsed - from the stream data. This is currently used to preserve tags during - stream transcoding from one format to the other. Tags are discussed - extensively in . Most - elements will simply forward the event by calling - gst_pad_event_default (). - - - The tag event is created using the function - gst_event_new_tag (), but more often elements will - send a tag event downstream that will be converted into a message - on the bus by sink elements. - All of these functions require a filled-in taglist as - argument, which they will take ownership of. - - - Elements parsing this event can use the function - gst_event_parse_tag () to acquire the - taglist that the event contains. - - - - - End of Stream (EOS) - - End-of-stream events are sent if the stream that an element sends out - is finished. An element receiving this event (from upstream, so it - receives it on its sinkpad) will generally just process any buffered - data (if there is any) and then forward the event further downstream. - The gst_pad_event_default () takes care of all - this, so most elements do not need to support this event. Exceptions are - elements that explicitly need to close a resource down on EOS, and - N-to-1 elements. Note that the stream itself is not - a resource that should be closed down on EOS! Applications might seek - back to a point before EOS and continue playing again. - - - The EOS event has no properties, which makes it one of the simplest - events in &GStreamer;. It is created using the - gst_event_new_eos() function. - - - It is important to note that only elements driving the - pipeline should ever send an EOS event. If your element - is chain-based, it is not driving the pipeline. Chain-based elements - should just return GST_FLOW_EOS from their chain function at - the end of the stream (or the configured segment), the upstream - element that is driving the pipeline will then take care of - sending the EOS event (or alternatively post a SEGMENT_DONE message - on the bus depending on the mode of operation). If you are implementing - your own source element, you also do not need to ever manually send - an EOS event, you should also just return GST_FLOW_EOS in - your create or fill function (assuming your element derives from - GstBaseSrc or GstPushSrc). - - - - - Table Of Contents - - WRITEME - - - - - Gap - - WRITEME - - - - - Flush Start - - The flush start event is sent downstream (in push mode) or upstream - (in pull mode) if all buffers and caches in the pipeline should be - emptied. Queue elements will - empty their internal list of buffers when they receive this event, for - example. File sink elements (e.g. filesink) will flush - the kernel-to-disk cache (fdatasync () or - fflush ()) when they receive this event. Normally, - elements receiving this event will simply just forward it, since most - filter or filter-like elements don't have an internal cache of data. - gst_pad_event_default () does just that, so for - most elements, it is enough to forward the event using the default - event handler. - - - As a side-effect of flushing all data from the pipeline, this event - unblocks the streaming thread by making all pads reject data until - they receive a signal - (elements trying to push data will get a FLUSHING flow return - and stop processing data). - - - The flush-start event is created with the - gst_event_new_flush_start (). - Like the EOS event, it has no properties. This event is usually - only created by elements driving the pipeline, like source elements - operating in push-mode or pull-range based demuxers/decoders. - - - - - Flush Stop - - The flush-stop event is sent by an element driving the pipeline - after a flush-start and tells pads and elements downstream that - they should accept events and buffers again (there will be at - least a SEGMENT event before any buffers first though). - - - If your element keeps temporary caches of stream data, it should - clear them when it receives a FLUSH-STOP event (and also whenever - its chain function receives a buffer with the DISCONT flag set). - - - The flush-stop event is created with - gst_event_new_flush_stop (). It has one - parameter that controls if the running-time of the pipeline should - be reset to 0 or not. Normally after a flushing seek, the - running_time is set back to 0. - - - - - Quality Of Service (QOS) - - The QOS event contains a report about the current real-time - performance of the stream. See more info in - . - - - - - Seek Request - - Seek events are meant to request a new stream position to elements. - This new position can be set in several formats (time, bytes or - default units [a term indicating frames for video, - channel-independent samples for audio, etc.]). Seeking can be done with - respect to the end-of-file or start-of-file, and - usually happens in upstream direction (downstream seeking is done by - sending a SEGMENT event with the appropriate offsets for elements - that support that, like filesink). - - - Elements receiving seek events should, depending on the element type, - either just forward it upstream (filters, decoders), change the - format in which the event is given and then forward it (demuxers), - or handle the event by changing the file pointer in their internal - stream resource (file sources, demuxers/decoders driving the pipeline - in pull-mode) or something else. - - - Seek events are built up using positions in specified formats (time, - bytes, units). They are created using the function - gst_event_new_seek (). Note that many plugins do - not support seeking from the end of the stream. - An element not driving the pipeline and forwarding a seek - request should not assume that the seek succeeded or actually happened, - it should operate based on the SEGMENT events it receives. - - - Elements parsing this event can do this using - gst_event_parse_seek(). - - - - - Navigation - - Navigation events are sent upstream by video sinks to inform upstream - elements of where the mouse pointer is, if and where mouse pointer - clicks have happened, or if keys have been pressed or released. - - - All this information is contained in the event structure which can - be obtained with gst_event_get_structure (). - - - Check out the navigationtest element in gst-plugins-good for an idea - how to extract navigation information from this event. - - - - - diff --git a/docs/pwg/advanced-interfaces.xml b/docs/pwg/advanced-interfaces.xml deleted file mode 100644 index 7be34b6787..0000000000 --- a/docs/pwg/advanced-interfaces.xml +++ /dev/null @@ -1,237 +0,0 @@ - - Interfaces - - Previously, in the chapter , we have - introduced the concept of GObject properties of controlling an element's - behaviour. This is very powerful, but it has two big disadvantages: - first of all, it is too generic, and second, it isn't dynamic. - - - The first disadvantage is related to the customizability of the end-user - interface that will be built to control the element. Some properties are - more important than others. Some integer properties are better shown in a - spin-button widget, whereas others would be better represented by a slider - widget. Such things are not possible because the UI has no actual meaning - in the application. A UI widget that represents a bitrate property is the - same as a UI widget that represents the size of a video, as long as both - are of the same GParamSpec type. Another problem, - is that things like parameter grouping, function grouping, or parameter - coupling are not - really possible. - - - The second problem with parameters are that they are not dynamic. In - many cases, the allowed values for a property are not fixed, but depend - on things that can only be detected at runtime. The names of inputs for - a TV card in a video4linux source element, for example, can only be - retrieved from the kernel driver when we've opened the device; this only - happens when the element goes into the READY state. This means that we - cannot create an enum property type to show this to the user. - - - The solution to those problems is to create very specialized types of - controls for certain often-used controls. We use the concept of interfaces - to achieve this. The basis of this all is the glib - GTypeInterface type. For each case where we think - it's useful, we've created interfaces which can be implemented by elements - at their own will. - - - One important note: interfaces do not replace - properties. Rather, interfaces should be built next to - properties. There are two important reasons for this. First of all, - properties can be more easily introspected. Second, properties can be - specified on the commandline (gst-launch). - - - - How to Implement Interfaces - - Implementing interfaces is initiated in the _get_type () - of your element. You can register one or more interfaces after having - registered the type itself. Some interfaces have dependencies on other - interfaces or can only be registered by certain types of elements. You - will be notified of doing that wrongly when using the element: it will - quit with failed assertions, which will explain what went wrong. - If it does, you need to register support for that - interface before registering support for the interface that you're - wanting to support. The example below explains how to add support for a - simple interface with no further dependencies. - - -static void gst_my_filter_some_interface_init (GstSomeInterface *iface); - -GType -gst_my_filter_get_type (void) -{ - static GType my_filter_type = 0; - - if (!my_filter_type) { - static const GTypeInfo my_filter_info = { - sizeof (GstMyFilterClass), - NULL, - NULL, - (GClassInitFunc) gst_my_filter_class_init, - NULL, - NULL, - sizeof (GstMyFilter), - 0, - (GInstanceInitFunc) gst_my_filter_init - }; - static const GInterfaceInfo some_interface_info = { - (GInterfaceInitFunc) gst_my_filter_some_interface_init, - NULL, - NULL - }; - - my_filter_type = - g_type_register_static (GST_TYPE_ELEMENT, - "GstMyFilter", - &my_filter_info, 0); - g_type_add_interface_static (my_filter_type, - GST_TYPE_SOME_INTERFACE, - &some_interface_info); - } - - return my_filter_type; -} - -static void -gst_my_filter_some_interface_init (GstSomeInterface *iface) -{ - /* here, you would set virtual function pointers in the interface */ -} - - - Or more conveniently: - - -static void gst_my_filter_some_interface_init (GstSomeInterface *iface); - -G_DEFINE_TYPE_WITH_CODE (GstMyFilter, gst_my_filter,GST_TYPE_ELEMENT, - G_IMPLEMENT_INTERFACE (GST_TYPE_SOME_INTERFACE, - gst_my_filter_some_interface_init)); - - - - - - URI interface - - WRITEME - - - - - Color Balance Interface - - WRITEME - - - - - Video Overlay Interface - - The #GstVideoOverlay interface is used for 2 main purposes : - - - - To get a grab on the Window where the video sink element is going to render. - This is achieved by either being informed about the Window identifier that - the video sink element generated, or by forcing the video sink element to use - a specific Window identifier for rendering. - - - - - To force a redrawing of the latest video frame the video sink element - displayed on the Window. Indeed if the #GstPipeline is in #GST_STATE_PAUSED - state, moving the Window around will damage its content. Application - developers will want to handle the Expose events themselves and force the - video sink element to refresh the Window's content. - - - - - - A plugin drawing video output in a video window will need to have that - window at one stage or another. Passive mode simply means that no window - has been given to the plugin before that stage, so the plugin created the - window by itself. In that case the plugin is responsible of destroying - that window when it's not needed any more and it has to tell the - applications that a window has been created so that the application can - use it. This is done using the have-window-handle - message that can be posted from the plugin with the - gst_video_overlay_got_window_handle method. - - - As you probably guessed already active mode just means sending a video - window to the plugin so that video output goes there. This is done using - the gst_video_overlay_set_window_handle method. - - - It is possible to switch from one mode to another at any moment, so the - plugin implementing this interface has to handle all cases. There are only - 2 methods that plugins writers have to implement and they most probably - look like that : - - window) - gst_my_filter_destroy_window (my_filter->window); - - my_filter->window = handle; -} - -static void -gst_my_filter_xoverlay_init (GstVideoOverlayClass *iface) -{ - iface->set_window_handle = gst_my_filter_set_window_handle; -} - ]]> - - You will also need to use the interface methods to post messages when - needed such as when receiving a CAPS event where you will know the video - geometry and maybe create the window. - - win); -} - -/* called from the event handler for CAPS events */ -static gboolean -gst_my_filter_sink_set_caps (GstMyFilter *my_filter, GstCaps *caps) -{ - gint width, height; - gboolean ret; - ... - ret = gst_structure_get_int (structure, "width", &width); - ret &= gst_structure_get_int (structure, "height", &height); - if (!ret) return FALSE; - - gst_video_overlay_prepare_window_handle (GST_VIDEO_OVERLAY (my_filter)); - - if (!my_filter->window) - my_filter->window = gst_my_filter_create_window (my_filter, width, height); - - ... -} - ]]> - - - - Navigation Interface - - WRITEME - - - diff --git a/docs/pwg/advanced-negotiation.xml b/docs/pwg/advanced-negotiation.xml deleted file mode 100644 index 16591d3013..0000000000 --- a/docs/pwg/advanced-negotiation.xml +++ /dev/null @@ -1,601 +0,0 @@ - - Caps negotiation - - Caps negotiation is the act of finding a media format (GstCaps) between - elements that they can handle. This process in &GStreamer; can in most - cases find an optimal solution for the complete pipeline. In this section - we explain how this works. - - - - Caps negotiation basics - - In &GStreamer;, negotiation of the media format always follows the - following simple rules: - - - - - A downstream element suggest a format on its sinkpad and places the - suggestion in the result of the CAPS query performed on the sinkpad. - See also . - - - - - An upstream element decides on a format. It sends the selected media - format downstream on its source pad with a CAPS event. Downstream - elements reconfigure themselves to handle the media type in the CAPS - event on the sinkpad. - - - - - A downstream element can inform upstream that it would like to - suggest a new format by sending a RECONFIGURE event upstream. The - RECONFIGURE event simply instructs an upstream element to restart - the negotiation phase. Because the element that sent out the - RECONFIGURE event is now suggesting another format, the format - in the pipeline might change. - - - - - In addition to the CAPS and RECONFIGURE event and the CAPS query, there - is an ACCEPT_CAPS query to quickly check if a certain caps can - be accepted by an element. - - - All negotiation follows these simple rules. Let's take a look at some - typical uses cases and how negotiation happens. - - - - - Caps negotiation use cases - - In what follows we will look at some use cases for push-mode scheduling. - The pull-mode scheduling negotiation phase is discussed in - and is actually similar as we - will see. - - - Since the sink pads only suggest formats and the source pads need to - decide, the most complicated work is done in the source pads. - We can identify 3 caps negotiation use cases for the source pads: - - - - - Fixed negotiation. An element can output one format only. - See . - - - - - Transform negotiation. There is a (fixed) transform between the - input and output format of the element, usually based on some - element property. The caps that the element will produce depend - on the upstream caps and the caps that the element can accept - depend on the downstream caps. - See . - - - - - Dynamic negotiation. An element can output many formats. - See . - - - - - - Fixed negotiation - - In this case, the source pad can only produce a fixed format. Usually - this format is encoded inside the media. No downstream element can - ask for a different format, the only way that the source pad will - renegotiate is when the element decides to change the caps itself. - - - Elements that could implement fixed caps (on their source pads) are, - in general, all elements that are not renegotiable. Examples include: - - - - - A typefinder, since the type found is part of the actual data stream - and can thus not be re-negotiated. The typefinder will look at the - stream of bytes, figure out the type, send a CAPS event with the - caps and then push buffers of the type. - - - - - Pretty much all demuxers, since the contained elementary data - streams are defined in the file headers, and thus not - renegotiable. - - - - - Some decoders, where the format is embedded in the data stream - and not part of the peercaps and where the - decoder itself is not reconfigurable, too. - - - - - Some sources that produce a fixed format. - - - - - gst_pad_use_fixed_caps() is used on the source - pad with fixed caps. As long as the pad is not negotiated, the default - CAPS query will return the caps presented in the padtemplate. As soon - as the pad is negotiated, the CAPS query will return the negotiated - caps (and nothing else). These are the relevant code snippets for fixed - caps source pads. - - - - - - The fixed caps can then be set on the pad by calling - gst_pad_set_caps (). - - -, - "channels", G_TYPE_INT, , NULL); - if (!gst_pad_set_caps (pad, caps)) { - GST_ELEMENT_ERROR (element, CORE, NEGOTIATION, (NULL), - ("Some debug information here")); - return GST_FLOW_ERROR; - } -[..] -]]> - - - These types of elements also don't have a relation between the input - format and the output format, the input caps simply don't contain the - information needed to produce the output caps. - - - All other elements that need to be configured for the format should - implement full caps negotiation, which will be explained in the next - few sections. - - - - - Transform negotiation - - In this negotiation technique, there is a fixed transform between - the element input caps and the output caps. This transformation - could be parameterized by element properties but not by the - content of the stream (see - for that use-case). - - - The caps that the element can accept depend on the (fixed - transformation) downstream caps. The caps that the element can - produce depend on the (fixed transformation of) the upstream - caps. - - - This type of element can usually set caps on its source pad from - the _event() function on the sink pad when - it received the CAPS event. This means that the caps transform - function transforms a fixed caps into another fixed caps. - Examples of elements include: - - - - - Videobox. It adds configurable border around a video frame - depending on object properties. - - - - - Identity elements. All elements that don't change the format - of the data, only the content. Video and audio effects are an - example. Other examples include elements that inspect the - stream. - - - - - Some decoders and encoders, where the output format is defined - by input format, like mulawdec and mulawenc. These decoders - usually have no headers that define the content of the stream. - They are usually more like conversion elements. - - - - - Below is an example of a negotiation steps of a typical transform - element. In the sink pad CAPS event handler, we compute the caps - for the source pad and set those. - - -srcpad, outcaps); - gst_caps_unref (outcaps); - - return ret; -} - -static gboolean -gst_my_filter_sink_event (GstPad *pad, - GstObject *parent, - GstEvent *event) -{ - gboolean ret; - GstMyFilter *filter = GST_MY_FILTER (parent); - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_CAPS: - { - GstCaps *caps; - - gst_event_parse_caps (event, &caps); - ret = gst_my_filter_setcaps (filter, caps); - break; - } - default: - ret = gst_pad_event_default (pad, parent, event); - break; - } - return ret; -} - - [...] -]]> - - - - - Dynamic negotiation - - A last negotiation method is the most complex and powerful dynamic - negotiation. - - - Like with the transform negotiation in - , dynamic negotiation will - perform a transformation on the downstream/upstream caps. Unlike the - transform negotiation, this transform will convert fixed caps to - unfixed caps. This means that the sink pad input caps can be converted - into unfixed (multiple) formats. The source pad will have to choose a - format from all the possibilities. It would usually like to choose a - format that requires the least amount of effort to produce but it does - not have to be. The selection of the format should also depend on the - caps that can be accepted downstream (see a QUERY_CAPS function in - ). - - - A typical flow goes like this: - - - - - Caps are received on the sink pad of the element. - - - - - If the element prefers to operate in passthrough mode, check - if downstream accepts the caps with the ACCEPT_CAPS query. If it - does, we can complete negotiation and we can operate in - passthrough mode. - - - - - Calculate the possible caps for the source pad. - - - - - Query the downstream peer pad for the list of possible - caps. - - - - - Select from the downstream list the first caps that you can - transform to and set this as the output caps. You might have to - fixate the caps to some reasonable defaults to construct - fixed caps. - - - - - Examples of this type of elements include: - - - - - Converter elements such as videoconvert, audioconvert, audioresample, - videoscale, ... - - - - - Source elements such as audiotestsrc, videotestsrc, v4l2src, - pulsesrc, ... - - - - - Let's look at the example of an element that can convert between - samplerates, so where input and output samplerate don't have to be - the same: - - -srcpad, caps)) { - filter->passthrough = TRUE; - } else { - GstCaps *othercaps, *newcaps; - GstStructure *s = gst_caps_get_structure (caps, 0), *others; - - /* no passthrough, setup internal conversion */ - gst_structure_get_int (s, "channels", &filter->channels); - othercaps = gst_pad_get_allowed_caps (filter->srcpad); - others = gst_caps_get_structure (othercaps, 0); - gst_structure_set (others, - "channels", G_TYPE_INT, filter->channels, NULL); - - /* now, the samplerate value can optionally have multiple values, so - * we "fixate" it, which means that one fixed value is chosen */ - newcaps = gst_caps_copy_nth (othercaps, 0); - gst_caps_unref (othercaps); - gst_pad_fixate_caps (filter->srcpad, newcaps); - if (!gst_pad_set_caps (filter->srcpad, newcaps)) - return FALSE; - - /* we are now set up, configure internally */ - filter->passthrough = FALSE; - gst_structure_get_int (s, "rate", &filter->from_samplerate); - others = gst_caps_get_structure (newcaps, 0); - gst_structure_get_int (others, "rate", &filter->to_samplerate); - } - - return TRUE; -} - -static gboolean -gst_my_filter_sink_event (GstPad *pad, - GstObject *parent, - GstEvent *event) -{ - gboolean ret; - GstMyFilter *filter = GST_MY_FILTER (parent); - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_CAPS: - { - GstCaps *caps; - - gst_event_parse_caps (event, &caps); - ret = gst_my_filter_setcaps (filter, caps); - break; - } - default: - ret = gst_pad_event_default (pad, parent, event); - break; - } - return ret; -} - -static GstFlowReturn -gst_my_filter_chain (GstPad *pad, - GstObject *parent, - GstBuffer *buf) -{ - GstMyFilter *filter = GST_MY_FILTER (parent); - GstBuffer *out; - - /* push on if in passthrough mode */ - if (filter->passthrough) - return gst_pad_push (filter->srcpad, buf); - - /* convert, push */ - out = gst_my_filter_convert (filter, buf); - gst_buffer_unref (buf); - - return gst_pad_push (filter->srcpad, out); -} -]]> - - - - - - Upstream caps (re)negotiation - - Upstream negotiation's primary use is to renegotiate (part of) an - already-negotiated pipeline to a new format. Some practical examples - include to select a different video size because the size of the video - window changed, and the video output itself is not capable of rescaling, - or because the audio channel configuration changed. - - - Upstream caps renegotiation is requested by sending a GST_EVENT_RECONFIGURE - event upstream. The idea is that it will instruct the upstream element - to reconfigure its caps by doing a new query for the allowed caps and then - choosing a new caps. The element that sends out the RECONFIGURE event - would influence the selection of the new caps by returning the new - preferred caps from its GST_QUERY_CAPS query function. The RECONFIGURE - event will set the GST_PAD_FLAG_NEED_RECONFIGURE on all pads that it - travels over. - - - It is important to note here that different elements actually have - different responsibilities here: - - - - - Elements that want to propose a new format upstream need to first - check if the new caps are acceptable upstream with an ACCEPT_CAPS - query. Then they would send a RECONFIGURE event and be prepared to - answer the CAPS query with the new preferred format. It should be - noted that when there is no upstream element that can (or wants) - to renegotiate, the element needs to deal with the currently - configured format. - - - - - Elements that operate in transform negotiation according to - pass the RECONFIGURE - event upstream. Because these elements simply do a fixed transform - based on the upstream caps, they need to send the event upstream - so that it can select a new format. - - - - - Elements that operate in fixed negotiation - () drop the RECONFIGURE event. - These elements can't reconfigure and their output caps don't depend - on the upstream caps so the event can be dropped. - - - - - Elements that can be reconfigured on the source pad (source pads - implementing dynamic negotiation in - ) should check its - NEED_RECONFIGURE flag with - gst_pad_check_reconfigure () and it should - start renegotiation when the function returns TRUE. - - - - - - - Implementing a CAPS query function - - A _query ()-function with the GST_QUERY_CAPS query - type is called when a peer element would like to know which formats - this pad supports, and in what order of preference. The return value - should be all formats that this elements supports, taking into account - limitations of peer elements further downstream or upstream, sorted by - order of preference, highest preference first. - - - - -srcpad) ? filter->sinkpad : - filter->srcpad; - caps = gst_pad_get_allowed_caps (otherpad); - - gst_query_parse_caps (query, &filt); - - /* We support *any* samplerate, indifferent from the samplerate - * supported by the linked elements on both sides. */ - for (i = 0; i < gst_caps_get_size (caps); i++) { - GstStructure *structure = gst_caps_get_structure (caps, i); - - gst_structure_remove_field (structure, "rate"); - } - - /* make sure we only return results that intersect our - * padtemplate */ - tcaps = gst_pad_get_pad_template_caps (pad); - if (tcaps) { - temp = gst_caps_intersect (caps, tcaps); - gst_caps_unref (caps); - gst_caps_unref (tcaps); - caps = temp; - } - /* filter against the query filter when needed */ - if (filt) { - temp = gst_caps_intersect (caps, filt); - gst_caps_unref (caps); - caps = temp; - } - gst_query_set_caps_result (query, caps); - gst_caps_unref (caps); - ret = TRUE; - break; - } - default: - ret = gst_pad_query_default (pad, parent, query); - break; - } - return ret; -} -]]> - - - - - Pull-mode Caps negotiation - - WRITEME, the mechanism of pull-mode negotiation is not yet fully - understood. - - - - Using all the knowledge you've acquired by reading this chapter, you - should be able to write an element that does correct caps negotiation. - If in doubt, look at other elements of the same type in our git - repository to get an idea of how they do what you want to do. - - - diff --git a/docs/pwg/advanced-qos.xml b/docs/pwg/advanced-qos.xml deleted file mode 100644 index b87bf1e812..0000000000 --- a/docs/pwg/advanced-qos.xml +++ /dev/null @@ -1,283 +0,0 @@ - - Quality Of Service (QoS) - - - Quality of Service in &GStreamer; is about measuring and adjusting - the real-time performance of a pipeline. The real-time performance is - always measured relative to the pipeline clock and typically happens in - the sinks when they synchronize buffers against the clock. - - - When buffers arrive late in the sink, i.e. when their running-time is - smaller than that of the clock, we say that the pipeline is having a - quality of service problem. These are a few possible reasons: - - - - - High CPU load, there is not enough CPU power to handle the stream, - causing buffers to arrive late in the sink. - - - - - Network problems - - - - - Other resource problems such as disk load, memory bottlenecks etc - - - - - The measurements result in QOS events that aim to adjust the datarate - in one or more upstream elements. Two types of adjustments can be - made: - - - - - Short time "emergency" corrections based on latest observation in - the sinks. - - - Long term rate corrections based on trends observed in the sinks. - - - - - It is also possible for the application to artificially introduce delay - between synchronized buffers, this is called throttling. It can be used - to limit or reduce the framerate, for example. - - - - Measuring QoS - - Elements that synchronize buffers on the pipeline clock will usually - measure the current QoS. They will also need to keep some statistics - in order to generate the QOS event. - - - For each buffer that arrives in the sink, the element needs to calculate - how late or how early it was. This is called the jitter. Negative jitter - values mean that the buffer was early, positive values mean that the - buffer was late. the jitter value gives an indication of how early/late - a buffer was. - - - A synchronizing element will also need to calculate how much time - elapsed between receiving two consecutive buffers. We call this the - processing time because that is the amount of time it takes for the - upstream element to produce/process the buffer. We can compare this - processing time to the duration of the buffer to have a measurement - of how fast upstream can produce data, called the proportion. - If, for example, upstream can produce a buffer in 0.5 seconds of 1 - second long, it is operating at twice the required speed. If, on the - other hand, it takes 2 seconds to produce a buffer with 1 seconds worth - of data, upstream is producing buffers too slow and we won't be able to - keep synchronization. Usually, a running average is kept of the - proportion. - - - A synchronizing element also needs to measure its own performance in - order to figure out if the performance problem is upstream of itself. - - - These measurements are used to construct a QOS event that is sent - upstream. Note that a QoS event is sent for each buffer that arrives - in the sink. - - - - - Handling QoS - - An element will have to install an event function on its source pads - in order to receive QOS events. Usually, the element will need to - store the value of the QOS event and use them in the data processing - function. The element will need to use a lock to protect these QoS - values as shown in the example below. Also make sure to pass the - QoS event upstream. - - -qos_proportion = proportion; - priv->qos_timestamp = timestamp; - priv->qos_diff = diff; - GST_OBJECT_UNLOCK (decoder); - - res = gst_pad_push_event (decoder->sinkpad, event); - break; - } - - [...] -]]> - - - With the QoS values, there are two types of corrections that an element - can do: - - - - Short term correction - - The timestamp and the jitter value in the QOS event can be used to - perform a short term correction. If the jitter is positive, the - previous buffer arrived late and we can be sure that a buffer with - a timestamp < timestamp + jitter is also going to be late. We - can thus drop all buffers with a timestamp less than timestamp + - jitter. - - - If the buffer duration is known, a better estimation for the next - likely timestamp as: timestamp + 2 * jitter + duration. - - - A possible algorithm typically looks like this: - - -qos_proportion; - qos_timestamp = priv->qos_timestamp; - qos_diff = priv->qos_diff; - GST_OBJECT_UNLOCK (dec); - - /* calculate the earliest valid timestamp */ - if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (qos_timestamp))) { - if (G_UNLIKELY (qos_diff > 0)) { - earliest_time = qos_timestamp + 2 * qos_diff + frame_duration; - } else { - earliest_time = qos_timestamp + qos_diff; - } - } else { - earliest_time = GST_CLOCK_TIME_NONE; - } - - /* compare earliest_time to running-time of next buffer */ - if (earliest_time > timestamp) - goto drop_buffer; - - [...] -]]> - - - - - Long term correction - - Long term corrections are a bit more difficult to perform. They - rely on the value of the proportion in the QOS event. Elements should - reduce the amount of resources they consume by the proportion - field in the QoS message. - - - Here are some possible strategies to achieve this: - - - - - Permanently dropping frames or reducing the CPU or bandwidth - requirements of the element. Some decoders might be able to - skip decoding of B frames. - - - - - Switch to lower quality processing or reduce the algorithmic - complexity. Care should be taken that this doesn't introduce - disturbing visual or audible glitches. - - - - - Switch to a lower quality source to reduce network bandwidth. - - - - - Assign more CPU cycles to critical parts of the pipeline. This - could, for example, be done by increasing the thread priority. - - - - - In all cases, elements should be prepared to go back to their normal - processing rate when the proportion member in the QOS event approaches - the ideal proportion of 1.0 again. - - - - - - Throttling - - Elements synchronizing to the clock should expose a property to configure - them in throttle mode. In throttle mode, the time distance between buffers - is kept to a configurable throttle interval. This means that effectively - the buffer rate is limited to 1 buffer per throttle interval. This can be - used to limit the framerate, for example. - - - When an element is configured in throttling mode (this is usually only - implemented on sinks) it should produce QoS events upstream with the jitter - field set to the throttle interval. This should instruct upstream elements to - skip or drop the remaining buffers in the configured throttle interval. - - - The proportion field is set to the desired slowdown needed to get the - desired throttle interval. Implementations can use the QoS Throttle type, - the proportion and the jitter member to tune their implementations. - - - The default sink base class, has the throttle-time - property for this feature. You can test this with: - gst-launch-1.0 videotestsrc ! - xvimagesink throttle-time=500000000 - - - - - QoS Messages - - In addition to the QOS events that are sent between elements in the - pipeline, there are also QOS messages posted on the pipeline bus to - inform the application of QoS decisions. The QOS message contains - the timestamps of when something was dropped along with the amount - of dropped vs processed items. Elements must post a QOS - message under these conditions: - - - - - The element dropped a buffer because of QoS reasons. - - - - - An element changes its processing strategy because of QoS reasons - (quality). This could include a decoder that decides to drop every - B frame to increase its processing speed or an effect element - switching to a lower quality algorithm. - - - - - - diff --git a/docs/pwg/advanced-request.xml b/docs/pwg/advanced-request.xml deleted file mode 100644 index 8ef98977c6..0000000000 --- a/docs/pwg/advanced-request.xml +++ /dev/null @@ -1,290 +0,0 @@ - - Request and Sometimes pads - - Until now, we've only dealt with pads that are always available. However, - there's also pads that are only being created in some cases, or only if - the application requests the pad. The first is called a - sometimes; the second is called a - request pad. The availability of a pad (always, - sometimes or request) can be seen in a pad's template. This chapter will - discuss when each of the two is useful, how they are created and when - they should be disposed. - - - - Sometimes pads - - A sometimes pad is a pad that is created under certain - conditions, but not in all cases. This mostly depends on stream content: - demuxers will generally parse the stream header, decide what elementary - (video, audio, subtitle, etc.) streams are embedded inside the system - stream, and will then create a sometimes pad for each of those elementary - streams. At its own choice, it can also create more than one instance of - each of those per element instance. The only limitation is that each - newly created pad should have a unique name. Sometimes pads are disposed - when the stream data is disposed, too (i.e. when going from PAUSED to the - READY state). You should not dispose the pad on EOS, - because someone might re-activate the pipeline and seek back to before - the end-of-stream point. The stream should still stay valid after EOS, at - least until the stream data is disposed. In any case, the element is - always the owner of such a pad. - - - The example code below will parse a text file, where the first line is - a number (n). The next lines all start with a number (0 to n-1), which - is the number of the source pad over which the data should be sent. - - -3 -0: foo -1: bar -0: boo -2: bye - - - The code to parse this file and create the dynamic sometimes - pads, looks like this: - - -firstrun = TRUE; - filter->srcpadlist = NULL; -} - -/* - * Get one line of data - without newline. - */ - -static GstBuffer * -gst_my_filter_getline (GstMyFilter *filter) -{ - guint8 *data; - gint n, num; - - /* max. line length is 512 characters - for safety */ - for (n = 0; n < 512; n++) { - num = gst_bytestream_peek_bytes (filter->bs, &data, n + 1); - if (num != n + 1) - return NULL; - - /* newline? */ - if (data[n] == '\n') { - GstBuffer *buf = gst_buffer_new_allocate (NULL, n + 1, NULL); - - gst_bytestream_peek_bytes (filter->bs, &data, n); - gst_buffer_fill (buf, 0, data, n); - gst_buffer_memset (buf, n, '\0', 1); - gst_bytestream_flush_fast (filter->bs, n + 1); - - return buf; - } - } -} - -static void -gst_my_filter_loopfunc (GstElement *element) -{ - GstMyFilter *filter = GST_MY_FILTER (element); - GstBuffer *buf; - GstPad *pad; - GstMapInfo map; - gint num, n; - - /* parse header */ - if (filter->firstrun) { - gchar *padname; - guint8 id; - - if (!(buf = gst_my_filter_getline (filter))) { - gst_element_error (element, STREAM, READ, (NULL), - ("Stream contains no header")); - return; - } - gst_buffer_extract (buf, 0, &id, 1); - num = atoi (id); - gst_buffer_unref (buf); - - /* for each of the streams, create a pad */ - for (n = 0; n < num; n++) { - padname = g_strdup_printf ("src_%u", n); - pad = gst_pad_new_from_static_template (src_factory, padname); - g_free (padname); - - /* here, you would set _event () and _query () functions */ - - /* need to activate the pad before adding */ - gst_pad_set_active (pad, TRUE); - - gst_element_add_pad (element, pad); - filter->srcpadlist = g_list_append (filter->srcpadlist, pad); - } - } - - /* and now, simply parse each line and push over */ - if (!(buf = gst_my_filter_getline (filter))) { - GstEvent *event = gst_event_new (GST_EVENT_EOS); - GList *padlist; - - for (padlist = srcpadlist; - padlist != NULL; padlist = g_list_next (padlist)) { - pad = GST_PAD (padlist->data); - gst_pad_push_event (pad, gst_event_ref (event)); - } - gst_event_unref (event); - /* pause the task here */ - return; - } - - /* parse stream number and go beyond the ':' in the data */ - gst_buffer_map (buf, &map, GST_MAP_READ); - num = atoi (map.data[0]); - if (num >= 0 && num < g_list_length (filter->srcpadlist)) { - pad = GST_PAD (g_list_nth_data (filter->srcpadlist, num); - - /* magic buffer parsing foo */ - for (n = 0; map.data[n] != ':' && - map.data[n] != '\0'; n++) ; - if (map.data[n] != '\0') { - GstBuffer *sub; - - /* create region copy that starts right past the space. The reason - * that we don't just forward the data pointer is because the - * pointer is no longer the start of an allocated block of memory, - * but just a pointer to a position somewhere in the middle of it. - * That cannot be freed upon disposal, so we'd either crash or have - * a memleak. Creating a region copy is a simple way to solve that. */ - sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, - n + 1, map.size - n - 1); - gst_pad_push (pad, sub); - } - } - gst_buffer_unmap (buf, &map); - gst_buffer_unref (buf); -} -]]> - - - Note that we use a lot of checks everywhere to make sure that the content - in the file is valid. This has two purposes: first, the file could be - erroneous, in which case we prevent a crash. The second and most important - reason is that - in extreme cases - the file could be used maliciously to - cause undefined behaviour in the plugin, which might lead to security - issues. Always assume that the file could be used to - do bad things. - - - - - Request pads - - Request pads are similar to sometimes pads, except that - request are created on demand of something outside of the element rather - than something inside the element. This concept is often used in muxers, - where - for each elementary stream that is to be placed in the output - system stream - one sink pad will be requested. It can also be used in - elements with a variable number of input or outputs pads, such as the - tee (multi-output) or - input-selector (multi-input) elements. - - - To implement request pads, you need to provide a padtemplate with a - GST_PAD_REQUEST presence and implement the - request_new_pad virtual method in - GstElement. - To clean up, you will need to implement the - release_pad virtual method. - - -request_new_pad = gst_my_filter_request_new_pad; - element_class->release_pad = gst_my_filter_release_pad; -} - -static GstPad * -gst_my_filter_request_new_pad (GstElement *element, - GstPadTemplate *templ, - const gchar *name, - const GstCaps *caps) -{ - GstPad *pad; - GstMyFilterInputContext *context; - - context = g_new0 (GstMyFilterInputContext, 1); - pad = gst_pad_new_from_template (templ, name); - gst_pad_set_element_private (pad, context); - - /* normally, you would set _chain () and _event () functions here */ - - gst_element_add_pad (element, pad); - - return pad; -} - -static void -gst_my_filter_release_pad (GstElement *element, - GstPad *pad) -{ - GstMyFilterInputContext *context; - - context = gst_pad_get_element_private (pad); - g_free (context); - - gst_element_remove_pad (element, pad); -} - -]]> - - - diff --git a/docs/pwg/advanced-scheduling.xml b/docs/pwg/advanced-scheduling.xml deleted file mode 100644 index 4639d63687..0000000000 --- a/docs/pwg/advanced-scheduling.xml +++ /dev/null @@ -1,445 +0,0 @@ - - Different scheduling modes - - The scheduling mode of a pad defines how data is retrieved from (source) - or given to (sink) pads. &GStreamer; can operate in two scheduling - mode, called push- and pull-mode. &GStreamer; supports elements with pads - in any of the scheduling modes where not all pads need to be operating - in the same mode. - - - So far, we have only discussed _chain ()-operating - elements, i.e. elements that have a chain-function set on their sink pad - and push buffers on their source pad(s). We call this the push-mode - because a peer element will use gst_pad_push () on - a srcpad, which will cause our _chain ()-function - to be called, which in turn causes our element to push out a buffer on - the source pad. The initiative to start the dataflow happens somewhere - upstream when it pushes out a buffer and all downstream elements get - scheduled when their _chain ()-functions are - called in turn. - - - Before we explain pull-mode scheduling, let's first understand how the - different scheduling modes are selected and activated on a pad. - - - - The pad activation stage - - During the element state change of READY->PAUSED, the pads of an - element will be activated. This happens first on the source pads and - then on the sink pads of the element. &GStreamer; calls the - _activate () of a pad. By default this function - will activate the pad in push-mode by calling - gst_pad_activate_mode () with the GST_PAD_MODE_PUSH - scheduling mode. - It is possible to override the _activate () of a pad - and decide on a different scheduling mode. You can know in what - scheduling mode a pad is activated by overriding the - _activate_mode ()-function. - - - &GStreamer; allows the different pads of an element to operate in - different scheduling modes. This allows for many different possible - use-cases. What follows is an overview of some typical use-cases. - - - - - If all pads of an element are activated in push-mode scheduling, - the element as a whole is operating in push-mode. - For source elements this means that they will have to start a - task that pushes out buffers on the source pad to the downstream - elements. - Downstream elements will have data pushed to them by upstream elements - using the sinkpads _chain ()-function which will - push out buffers on the source pads. - Prerequisites for this scheduling mode are that a chain-function was - set for each sinkpad using gst_pad_set_chain_function () - and that all downstream elements operate in the same mode. - - - - - Alternatively, sinkpads can be the driving force behind a pipeline - by operating in pull-mode, while the sourcepads - of the element still operate in push-mode. In order to be the - driving force, those pads start a GstTask - when they are activated. This task is a thread, which - will call a function specified by the element. When called, this - function will have random data access (through - gst_pad_pull_range ()) over all sinkpads, and - can push data over the sourcepads, which effectively means that - this element controls data flow in the pipeline. Prerequisites for - this mode are that all downstream elements can act in push - mode, and that all upstream elements operate in pull-mode (see below). - - - Source pads can be activated in PULL mode by a downstream element - when they return GST_PAD_MODE_PULL from the GST_QUERY_SCHEDULING - query. Prerequisites for this scheduling mode are that a - getrange-function was set for the source pad using - gst_pad_set_getrange_function (). - - - - - Lastly, all pads in an element can be activated in PULL-mode. - However, contrary to the above, this does not mean that they - start a task on their own. Rather, it means that they are pull - slave for the downstream element, and have to provide random data - access to it from their _get_range ()-function. - Requirements are that the a _get_range - ()-function was set on this pad using the function - gst_pad_set_getrange_function (). Also, if - the element has any sinkpads, all those pads (and thereby their - peers) need to operate in PULL access mode, too. - - - When a sink element is activated in PULL mode, it should start a - task that calls gst_pad_pull_range () on its - sinkpad. It can only do this when the upstream SCHEDULING query - returns support for the GST_PAD_MODE_PULL scheduling mode. - - - - - In the next two sections, we will go closer into pull-mode scheduling - (elements/pads driving the pipeline, and elements/pads providing random - access), and some specific use cases will be given. - - - - - Pads driving the pipeline - - Sinkpads operating in pull-mode, with the sourcepads operating in - push-mode (or it has no sourcepads when it is a sink), can start a task - that will drive the pipeline data flow. - Within this task function, you have random access over all of the sinkpads, - and push data over the sourcepads. - This can come in useful for several different kinds of elements: - - - - - Demuxers, parsers and certain kinds of decoders where data comes - in unparsed (such as MPEG-audio or video streams), since those will - prefer byte-exact (random) access from their input. If possible, - however, such elements should be prepared to operate in push-mode - mode, too. - - - - - Certain kind of audio outputs, which require control over their - input data flow, such as the Jack sound server. - - - - - First you need to perform a SCHEDULING query to check if the upstream - element(s) support pull-mode scheduling. If that is possible, you - can activate the sinkpad in pull-mode. Inside the activate_mode - function you can then start the task. - - -#include "filter.h" -#include <string.h> - -static gboolean gst_my_filter_activate (GstPad * pad, - GstObject * parent); -static gboolean gst_my_filter_activate_mode (GstPad * pad, - GstObject * parent, - GstPadMode mode, - gboolean active); -static void gst_my_filter_loop (GstMyFilter * filter); - -G_DEFINE_TYPE (GstMyFilter, gst_my_filter, GST_TYPE_ELEMENT); - - -static void -gst_my_filter_init (GstMyFilter * filter) -{ - -[..] - - gst_pad_set_activate_function (filter->sinkpad, gst_my_filter_activate); - gst_pad_set_activatemode_function (filter->sinkpad, - gst_my_filter_activate_mode); - - -[..] -} - -[..] - -static gboolean -gst_my_filter_activate (GstPad * pad, GstObject * parent) -{ - GstQuery *query; - gboolean pull_mode; - - /* first check what upstream scheduling is supported */ - query = gst_query_new_scheduling (); - - if (!gst_pad_peer_query (pad, query)) { - gst_query_unref (query); - goto activate_push; - } - - /* see if pull-mode is supported */ - pull_mode = gst_query_has_scheduling_mode_with_flags (query, - GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE); - gst_query_unref (query); - - if (!pull_mode) - goto activate_push; - - /* now we can activate in pull-mode. GStreamer will also - * activate the upstream peer in pull-mode */ - return gst_pad_activate_mode (pad, GST_PAD_MODE_PULL, TRUE); - -activate_push: - { - /* something not right, we fallback to push-mode */ - return gst_pad_activate_mode (pad, GST_PAD_MODE_PUSH, TRUE); - } -} - -static gboolean -gst_my_filter_activate_pull (GstPad * pad, - GstObject * parent, - GstPadMode mode, - gboolean active) -{ - gboolean res; - GstMyFilter *filter = GST_MY_FILTER (parent); - - switch (mode) { - case GST_PAD_MODE_PUSH: - res = TRUE; - break; - case GST_PAD_MODE_PULL: - if (active) { - filter->offset = 0; - res = gst_pad_start_task (pad, - (GstTaskFunction) gst_my_filter_loop, filter, NULL); - } else { - res = gst_pad_stop_task (pad); - } - break; - default: - /* unknown scheduling mode */ - res = FALSE; - break; - } - return res; -} - - - Once started, your task has full control over input and output. The - most simple case of a task function is one that reads input and pushes - that over its source pad. It's not all that useful, but provides some - more flexibility than the old push-mode case that we've been looking - at so far. - - -#define BLOCKSIZE 2048 - -static void -gst_my_filter_loop (GstMyFilter * filter) -{ - GstFlowReturn ret; - guint64 len; - GstFormat fmt = GST_FORMAT_BYTES; - GstBuffer *buf = NULL; - - if (!gst_pad_query_duration (filter->sinkpad, fmt, &len)) { - GST_DEBUG_OBJECT (filter, "failed to query duration, pausing"); - goto stop; - } - - if (filter->offset >= len) { - GST_DEBUG_OBJECT (filter, "at end of input, sending EOS, pausing"); - gst_pad_push_event (filter->srcpad, gst_event_new_eos ()); - goto stop; - } - - /* now, read BLOCKSIZE bytes from byte offset filter->offset */ - ret = gst_pad_pull_range (filter->sinkpad, filter->offset, - BLOCKSIZE, &buf); - - if (ret != GST_FLOW_OK) { - GST_DEBUG_OBJECT (filter, "pull_range failed: %s", gst_flow_get_name (ret)); - goto stop; - } - - /* now push buffer downstream */ - ret = gst_pad_push (filter->srcpad, buf); - - buf = NULL; /* gst_pad_push() took ownership of buffer */ - - if (ret != GST_FLOW_OK) { - GST_DEBUG_OBJECT (filter, "pad_push failed: %s", gst_flow_get_name (ret)); - goto stop; - } - - /* everything is fine, increase offset and wait for us to be called again */ - filter->offset += BLOCKSIZE; - return; - -stop: - GST_DEBUG_OBJECT (filter, "pausing task"); - gst_pad_pause_task (filter->sinkpad); -} - - - - - - Providing random access - - In the previous section, we have talked about how elements (or pads) - that are activated to drive the pipeline using their own task, must use - pull-mode scheduling on their sinkpads. This means that all pads linked - to those pads need to be activated in pull-mode. - Source pads activated in pull-mode must implement a - _get_range ()-function set using - gst_pad_set_getrange_function (), and - that function will be called when the peer pad requests some data with - gst_pad_pull_range (). - The element is then responsible for seeking to the right offset and - providing the requested data. Several elements can implement random - access: - - - - - Data sources, such as a file source, that can provide data from any - offset with reasonable low latency. - - - - - Filters that would like to provide a pull-mode scheduling - over the whole pipeline. - - - - - Parsers who can easily provide this by skipping a small part of - their input and are thus essentially "forwarding" getrange - requests literally without any own processing involved. Examples - include tag readers (e.g. ID3) or single output parsers, such as - a WAVE parser. - - - - - The following example will show how a _get_range - ()-function can be implemented in a source element: - - -#include "filter.h" -static GstFlowReturn - gst_my_filter_get_range (GstPad * pad, - GstObject * parent, - guint64 offset, - guint length, - GstBuffer ** buf); - -G_DEFINE_TYPE (GstMyFilter, gst_my_filter, GST_TYPE_ELEMENT); - - - -static void -gst_my_filter_init (GstMyFilter * filter) -{ - -[..] - - gst_pad_set_getrange_function (filter->srcpad, - gst_my_filter_get_range); - -[..] -} - -static GstFlowReturn -gst_my_filter_get_range (GstPad * pad, - GstObject * parent, - guint64 offset, - guint length, - GstBuffer ** buf) -{ - - GstMyFilter *filter = GST_MY_FILTER (parent); - - [.. here, you would fill *buf ..] - - return GST_FLOW_OK; -} - - - - In practice, many elements that could theoretically do random access, - may in practice often be activated in push-mode scheduling anyway, - since there is no downstream element able to start its own task. - Therefore, in practice, those elements should implement both a - _get_range ()-function and a _chain - ()-function (for filters and parsers) or a _get_range - ()-function and be prepared to start their own task by - providing _activate_* ()-functions (for - source elements). - - - diff --git a/docs/pwg/advanced-tagging.xml b/docs/pwg/advanced-tagging.xml deleted file mode 100644 index d1e5393017..0000000000 --- a/docs/pwg/advanced-tagging.xml +++ /dev/null @@ -1,241 +0,0 @@ - - Tagging (Metadata and Streaminfo) - - - Overview - - Tags are pieces of information stored in a stream that are not the content - itself, but they rather describe the content. Most - media container formats support tagging in one way or another. Ogg uses - VorbisComment for this, MP3 uses ID3, AVI and WAV use RIFF's INFO list - chunk, etc. GStreamer provides a general way for elements to read tags from - the stream and expose this to the user. The tags (at least the metadata) - will be part of the stream inside the pipeline. The consequence of this is - that transcoding of files from one format to another will automatically - preserve tags, as long as the input and output format elements both support - tagging. - - - Tags are separated in two categories in GStreamer, even though applications - won't notice anything of this. The first are called metadata, - the second are called streaminfo. Metadata are tags - that describe the non-technical parts of stream content. They can be - changed without needing to re-encode the stream completely. Examples are - author, title or album. The - container format might still need to be re-written for the tags to fit in, - though. Streaminfo, on the other hand, are tags that describe the stream - contents technically. To change them, the stream needs to be re-encoded. - Examples are codec or bitrate. Note that some - container formats (like ID3) store various streaminfo tags as metadata in - the file container, which means that they can be changed so that they don't - match the content in the file any more. Still, they are called metadata - because technically, they can be changed without - re-encoding the whole stream, even though that makes them invalid. Files - with such metadata tags will have the same tag twice: once as metadata, - once as streaminfo. - - - There is no special name for tag reading elements in &GStreamer;. There are - specialised elements (e.g. id3demux) that do nothing besides tag reading, - but any &GStreamer; element may extract tags while processing data, and - most decoders, demuxers and parsers do. - - - A tag writer is called TagSetter. - An element supporting both can be used in a tag editor for quick tag - changing (note: in-place tag editing is still poorly supported at the time - of writing and usually requires tag extraction/stripping and remuxing of - the stream with new tags). - - - - - Reading Tags from Streams - - The basic object for tags is a GstTagList - . An element that is reading tags from a stream should - create an empty taglist and fill this with individual tags. Empty tag - lists can be created with gst_tag_list_new (). Then, - the element can fill the list using gst_tag_list_add () - or gst_tag_list_add_values (). - Note that elements often read metadata as strings, but the - values in the taglist might not necessarily be strings - they need to be - of the type the tag was registered as (the API documentation for each - predefined tag should contain the type). Be sure to use functions like - gst_value_transform () - to make sure that your data is of the right type. - After data reading, you can send the tags downstream with the TAG event. - When the TAG event reaches the sink, it will post the TAG message on - the pipeline's GstBus for the application to pick up. - - - We currently require the core to know the GType of tags before they are - being used, so all tags must be registered first. You can add new tags - to the list of known tags using gst_tag_register (). - If you think the tag will be useful in more cases than just your own - element, it might be a good idea to add it to gsttag.c - instead. That's up to you to decide. If you want to do it in your own - element, it's easiest to register the tag in one of your class init - functions, preferably _class_init (). - - - - - - - - Writing Tags to Streams - - Tag writers are the opposite of tag readers. Tag writers only take - metadata tags into account, since that's the only type of tags that have - to be written into a stream. Tag writers can receive tags in three ways: - internal, application and pipeline. Internal tags are tags read by the - element itself, which means that the tag writer is - in that case - a tag - reader, too. Application tags are tags provided to the element via the - TagSetter interface (which is just a layer). Pipeline tags are tags - provided to the element from within the pipeline. The element receives - such tags via the GST_EVENT_TAG event, which means - that tags writers should implement an event handler. The tag writer is - responsible for combining all these three into one list and writing them - to the output stream. - - - The example below will receive tags from both application and pipeline, - combine them and write them to the output stream. It implements the tag - setter so applications can set tags, and retrieves pipeline tags from - incoming events. - - - Warning, this example is outdated and doesn't work with the 1.0 version - of &GStreamer; anymore. - - -srcpad, buf); - } - - g_value_unset (&to); -} - -static void -gst_my_filter_task_func (GstElement *element) -{ - GstMyFilter *filter = GST_MY_FILTER (element); - GstTagSetter *tagsetter = GST_TAG_SETTER (element); - GstData *data; - GstEvent *event; - gboolean eos = FALSE; - GstTagList *taglist = gst_tag_list_new (); - - while (!eos) { - data = gst_pad_pull (filter->sinkpad); - - /* We're not very much interested in data right now */ - if (GST_IS_BUFFER (data)) - gst_buffer_unref (GST_BUFFER (data)); - event = GST_EVENT (data); - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_TAG: - gst_tag_list_insert (taglist, gst_event_tag_get_list (event), - GST_TAG_MERGE_PREPEND); - gst_event_unref (event); - break; - case GST_EVENT_EOS: - eos = TRUE; - gst_event_unref (event); - break; - default: - gst_pad_event_default (filter->sinkpad, event); - break; - } - } - - /* merge tags with the ones retrieved from the application */ - if ((gst_tag_setter_get_tag_list (tagsetter)) { - gst_tag_list_insert (taglist, - gst_tag_setter_get_tag_list (tagsetter), - gst_tag_setter_get_tag_merge_mode (tagsetter)); - } - - /* write tags */ - gst_tag_list_foreach (taglist, gst_my_filter_write_tag, filter); - - /* signal EOS */ - gst_pad_push (filter->srcpad, gst_event_new (GST_EVENT_EOS)); -} -]]> - - - Note that normally, elements would not read the full stream before - processing tags. Rather, they would read from each sinkpad until they've - received data (since tags usually come in before the first data buffer) - and process that. - - - diff --git a/docs/pwg/advanced-types.xml b/docs/pwg/advanced-types.xml deleted file mode 100644 index bb1d149ce3..0000000000 --- a/docs/pwg/advanced-types.xml +++ /dev/null @@ -1,1431 +0,0 @@ - - - - Types and Properties - - There is a very large set of possible types that may be used to pass data - between elements. Indeed, each new element that is defined may use a new - data format (though unless at least one other element recognises that - format, it will be most likely be useless since nothing will be able to - link with it). - - - In order for types to be useful, and for systems like autopluggers to - work, it is necessary that all elements agree on the type definitions, - and which properties are required for each type. The &GStreamer; framework - itself simply provides the ability to define types and parameters, but - does not fix the meaning of types and parameters, and does not enforce - standards on the creation of new types. This is a matter for a policy to - decide, not technical systems to enforce. - - - For now, the policy is simple: - - - - Do not create a new type if you could use one which already exists. - - - - - If creating a new type, discuss it first with the other &GStreamer; - developers, on at least one of: IRC, mailing lists. - - - - - Try to ensure that the name for a new format is as unlikely to - conflict with anything else created already, and is not a more - generalised name than it should be. For example: "audio/compressed" - would be too generalised a name to represent audio data compressed - with an mp3 codec. Instead "audio/mp3" might be an appropriate name, - or "audio/compressed" could exist and have a property indicating the - type of compression used. - - - - - Ensure that, when you do create a new type, you specify it clearly, - and get it added to the list of known types so that other developers - can use the type correctly when writing their elements. - - - - - - - - - Building a Simple Format for Testing - - If you need a new format that has not yet been defined in our , you will want to have some general - guidelines on media type naming, properties and such. A media type would - ideally be equivalent to the Mime-type defined by IANA; else, it should - be in the form type/x-name, where type is the sort of data this media type - handles (audio, video, ...) and name should be something specific for - this specific type. Audio and video media types should try to support the - general audio/video properties (see the list), and can use their own - properties, too. To get an idea of what properties we think are useful, - see (again) the list. - - - Take your time to find the right set of properties for your type. There - is no reason to hurry. Also, experimenting with this is generally a good - idea. Experience learns that theoretically thought-out types are good, - but they still need practical use to assure that they serve their needs. - Make sure that your property names do not clash with similar properties - used in other types. If they match, make sure they mean the same thing; - properties with different types but the same names are - not allowed. - - - - - - - Typefind Functions and Autoplugging - - With only defining the types, we're not yet there. - In order for a random data file to be recognized and played back as - such, we need a way of recognizing their type out of the blue. For this - purpose, typefinding was introduced. Typefinding is the - process of detecting the type of a data stream. Typefinding consists of - two separate parts: first, there's an unlimited number of functions - that we call typefind functions, which are each - able to recognize one or more types from an input stream. Then, - secondly, there's a small engine which registers and calls each of - those functions. This is the typefind core. On top of this typefind - core, you would normally write an autoplugger, which is able to use - this type detection system to dynamically build a pipeline around an - input stream. Here, we will focus only on typefind functions. - - - A typefind function usually lives in - gst-plugins-base/gst/typefind/gsttypefindfunctions.c, - unless there's a good reason (like library dependencies) to put it - elsewhere. The reason for this centralization is to reduce the - number of plugins that need to be loaded in order to detect a stream's - type. Below is an example that will recognize AVI files, which start - with a RIFF tag, then the size of the file and then an - AVI tag: - - -static void -gst_my_typefind_function (GstTypeFind *tf, - gpointer data) -{ - guint8 *data = gst_type_find_peek (tf, 0, 12); - - if (data && - GUINT32_FROM_LE (&((guint32 *) data)[0]) == GST_MAKE_FOURCC ('R','I','F','F') && - GUINT32_FROM_LE (&((guint32 *) data)[2]) == GST_MAKE_FOURCC ('A','V','I',' ')) { - gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, - gst_caps_new_simple ("video/x-msvideo", NULL)); - } -} - -static gboolean -plugin_init (GstPlugin *plugin) -{ - if (!gst_type_find_register (plugin, "", GST_RANK_PRIMARY, - gst_my_typefind_function, "avi", - gst_caps_new_simple ("video/x-msvideo", - NULL), NULL)) - return FALSE; -} - - - Note that - gst-plugins/gst/typefind/gsttypefindfunctions.c - has some simplification macros to decrease the amount of code. Make - good use of those if you want to submit typefinding patches with new - typefind functions. - - - Autoplugging has been discussed in great detail in the Application - Development Manual. - - - - - - - List of Defined Types - - Below is a list of all the defined types in &GStreamer;. They are split - up in separate tables for audio, video, container, subtitle and other - types, for the sake of readability. Below each table might follow a - list of notes that apply to that table. In the definition of each type, - we try to follow the types and rules as defined by - IANA for as far as possible. - - - Jump directly to a specific table: - - - - - - - - - - - - - - - - - - - - Note that many of the properties are not required, - but rather optional properties. This means that - most of these properties can be extracted from the container header, - but that - in case the container header does not provide these - they - can also be extracted by parsing the stream header or the stream - content. The policy is that your element should provide the data that - it knows about by only parsing its own content, not another element's - content. Example: the AVI header provides samplerate of the contained - audio stream in the header. MPEG system streams don't. This means that - an AVI stream demuxer would provide samplerate as a property for MPEG - audio streams, whereas an MPEG demuxer would not. A decoder needing - this data would require a stream parser in between two extract this - from the header or calculate it from the stream. - - - - Table of Audio Types - - - - - - - - Media Type - Description - Property - Property Type - Property Values - Property Description - - - - - - - - - - All audio types. - - - - - - - audio/* - - All audio types - - rate - integer - greater than 0 - - The sample rate of the data, in samples (per channel) per second. - - - - channels - integer - greater than 0 - - The number of channels of audio data. - - - - channel-mask - bitmask - - - Channel positions present. See GstAudioChannelPosition. - 0 means unpositioned. - - - - format - string - - S8 U8 S16LE S16BE U16LE U16BE S24_32LE S24_32BE U24_32LE U24_32BE S32LE S32BE U32LE U32BE - S24LE S24BE U24LE U24BE S20LE S20BE U20LE U20BE S18LE S18BE U18LE U18BE F32LE F32BE F64LE F64BE - - - The format of the sample data. - - - - layout - string - "interleaved" or "non-interleaved" - - Layout of channels within a buffer. - - - - - - - - All raw audio types. - - - - - - - audio/x-raw - - Unstructured and uncompressed raw audio data. - - - - - - All properties (except channel-mask, in the mono and stereo cases) are mandatory. - - - - - - - - All encoded audio types. - - - - - - - audio/x-ac3 - AC-3 or A52 audio streams. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-adpcm - ADPCM Audio streams. - layout - string - - quicktime, dvi, - microsoft or 4xm. - - - The layout defines the packing of the samples in the stream. In - ADPCM, most formats store multiple samples per channel together. - This number of samples differs per format, hence the different - layouts. On the long term, we probably want this variable to die - and use something more descriptive, but this will do for now. - - - - block_align - integer - - Any - - - Chunk buffer size. - - - - - - - audio/x-cinepak - Audio as provided in a Cinepak (Quicktime) stream. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-dv - Audio as provided in a Digital Video stream. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-flac - Free Lossless Audio codec (FLAC). - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-gsm - Data encoded by the GSM codec. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-alaw - A-Law Audio. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-mulaw - Mu-Law Audio. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-mace - MACE Audio (used in Quicktime). - maceversion - integer - 3 or 6 - - The version of the MACE audio codec used to encode the stream. - - - - - - - audio/mpeg - - Audio data compressed using the MPEG audio encoding scheme. - - mpegversion - integer - 1, 2 or 4 - - The MPEG-version used for encoding the data. The value 1 refers - to MPEG-1, -2 and -2.5 layer 1, 2 or 3. The values 2 and 4 refer - to the MPEG-AAC audio encoding schemes. - - - - framed - boolean - 0 or 1 - - A true value indicates that each buffer contains exactly one - frame. A false value indicates that frames and buffers do not - necessarily match up. - - - - layer - integer - 1, 2, or 3 - - The compression scheme layer used to compress the data - (only if mpegversion=1). - - - - bitrate - integer - greater than 0 - - The bitrate, in bits per second. For VBR (variable bitrate) - MPEG data, this is the average bitrate. - - - - - - - audio/x-qdm2 - Data encoded by the QDM version 2 codec. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-pn-realaudio - Realmedia Audio data. - raversion - integer - 1 or 2 - - The version of the Real Audio codec used to encode the stream. - 1 stands for a 14k4 stream, 2 stands for a 28k8 stream. - - - - - - - audio/x-speex - Data encoded by the Speex audio codec - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-vorbis - Vorbis audio data - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-wma - Windows Media Audio - wmaversion - integer - 1,2 or 3 - - The version of the WMA codec used to encode the stream. - - - - - - - audio/x-paris - Ensoniq PARIS audio - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-svx - Amiga IFF / SVX8 / SV16 audio - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-nist - Sphere NIST audio - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-voc - Sound Blaster VOC audio - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-ircam - Berkeley/IRCAM/CARL audio - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-w64 - Sonic Foundry's 64 bit RIFF/WAV - - - - - There are currently no specific properties defined or needed for - this type. - - - - - -
- - - Table of Video Types - - - - - - - - Media Type - Description - Property - Property Type - Property Values - Property Description - - - - - - - - - - All video types. - - - - - - - video/* - - All video types - - width - integer - greater than 0 - The width of the video image - - - height - integer - greater than 0 - The height of the video image - - - framerate - fraction - greater or equal 0; default 0/1 - - The (average) framerate in frames per second. Note that this - property does not guarantee in any way that - it will actually come close to this value. If you need a fixed - framerate, please use an element that provides that (such as - videorate). 0/1 means a variable framerate. - - - - max-framerate - fraction - greater or equal 0; default as framerate - - For variable framerates, the maximum framerate that is expected. - Only valid when framerate is 0/1. - - - - views - integer - greater than 0; default 1 - - The number of views for multiview video. Each buffer contains - multiple GstVideoMeta buffers that describe each view. Use the - frame ID to get access to the different views. - - - - interlace-mode - string - progressive, interleaved, mixed, fields; default progressive - - The interlace mode. Extra buffer flags describe the frame and fields. - - - - chroma-site - string - jpeg, mpeg2, dv; default UNKNOWN - - The chroma siting of the video frames. - - - - colorimetry - string - bt601, bt709, smpte240m; default UNKNOWN - - The colorimetry of the video frames. - - - - pixel-aspect-ratio - fraction - greater than 0; default 1/1 - - The pixel aspect ratio of the video. - - - - format - string - - I420 YV12 YUY2 UYVY AYUV RGBx BGRx xRGB xBGR RGBA BGRA ARGB ABGR RGB BGR Y41B Y42B - YVYU Y444 v210 v216 NV12 NV21 GRAY8 GRAY16_BE GRAY16_LE - v308 RGB16 BGR16 RGB15 BGR15 UYVP A420 RGB8P YUV9 YVU9 - IYU1 ARGB64 AYUV64 r210 I420_10LE I420_10BE I422_10LE I422_10BE - - - The format of the video. See FourCC definition site - for references and definitions. YUY2, YVYU and UYVY are 4:2:2 - packed-pixel, Y41P is 4:1:1 packed-pixel and IYU2 is 4:4:4 - packed-pixel. Y42B is 4:2:2 planar, YV12 and I420 are 4:2:0 - planar, Y41B is 4:1:1 planar and YUV9 and YVU9 are 4:1:0 planar. - Y800 contains Y-samples only (black/white). - - - - - - - - All raw video types. - - - - - - - video/x-raw - Unstructured and uncompressed raw video data. - - - - - The properties width, height and format are mandatory. - - - - - - - - All encoded video types. - - - - - - - video/x-3ivx - 3ivx video. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - video/x-divx - DivX video. - divxversion - integer - 3, 4 or 5 - - Version of the DivX codec used to encode the stream. - - - - - - - video/x-dv - Digital Video. - systemstream - boolean - FALSE - - Indicates that this stream is not a system - container stream. - - - - - - - video/x-ffv - FFMpeg video. - ffvversion - integer - 1 - - Version of the FFMpeg video codec used to encode the stream. - - - - - - - video/x-h263 - H-263 video. - variant - string - itu, lead, microsoft, vdolive, vivo, xirlink - - Vendor specific variant of the format. 'itu' is the standard. - - - - h263version - string - h263, h263p, h263pp - - Enhanced versions of the h263 codec. - - - - - - - video/x-h264 - H-264 video. - variant - string - itu, videosoft - - Vendor specific variant of the format. 'itu' is the standard. - - - - - - - video/x-huffyuv - Huffyuv video. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - video/x-indeo - Indeo video. - indeoversion - integer - 3 - - Version of the Indeo codec used to encode this stream. - - - - - - - video/x-intel-h263 - H-263 video. - variant - string - intel - - Vendor specific variant of the format. - - - - - - - video/x-jpeg - Motion-JPEG video. - - - - - There are currently no specific properties defined or needed for - this type. Note that video/x-jpeg only applies to Motion-JPEG - pictures (YUY2 colourspace). RGB colourspace JPEG images are - referred to as image/jpeg (JPEG image). - - - - - - - video/mpeg - MPEG video. - mpegversion - integer - 1, 2 or 4 - - Version of the MPEG codec that this stream was encoded with. - Note that we have different media types for 3ivx, XviD, DivX and - "standard" ISO MPEG-4. This is not a good - thing and we're fully aware of this. However, we do not have a - solution yet. - - - - systemstream - boolean - FALSE - - Indicates that this stream is not a system - container stream. - - - - - - - video/x-msmpeg - Microsoft MPEG-4 video deviations. - msmpegversion - integer - 41, 42 or 43 - - Version of the MS-MPEG-4-like codec that was used to encode this - version. A value of 41 refers to MS MPEG 4.1, 42 to 4.2 and 43 - to version 4.3. - - - - - - - video/x-msvideocodec - Microsoft Video 1 (oldish codec). - msvideoversion - integer - 1 - - Version of the codec - always 1. - - - - - - - video/x-pn-realvideo - Realmedia video. - rmversion - integer - 1, 2 or 3 - - Version of the Real Video codec that this stream was encoded - with. - - - - - - - video/x-rle - RLE animation format. - layout - string - "microsoft" or "quicktime" - - The RLE format inside the Microsoft AVI container has a - different byte layout than the RLE format inside Apple's - Quicktime container; this property keeps track of the - layout. - - - - depth - integer - 1 to 64 - - Bit depth of the used palette. This means that the palette - that belongs to this format defines 2^depth colors. - - - - palette_data - GstBuffer - - - Buffer containing a color palette (in native-endian RGBA) used - by this format. The buffer is of size 4*2^depth. - - - - - - - video/x-svq - Sorensen Video. - svqversion - integer - 1 or 3 - - Version of the Sorensen codec that the stream was encoded with. - - - - - - - video/x-tarkin - Tarkin video. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - video/x-theora - Theora video. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - video/x-vp3 - VP-3 video. - - - - - There are currently no specific properties defined or needed for - this type. Note that we have different media types for VP-3 and - Theora, which is not necessarily a good idea. This could probably - be improved. - - - - - - - video/x-wmv - Windows Media Video - wmvversion - integer - 1,2 or 3 - - Version of the WMV codec that the stream was encoded with. - - - - - - - video/x-xvid - XviD video. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - - All image types. - - - - - - - image/gif - Graphics Interchange Format. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - image/jpeg - Joint Picture Expert Group Image. - - - - - There are currently no specific properties defined or needed for - this type. Note that image/jpeg only applies to RGB-colourspace - JPEG images; YUY2-colourspace JPEG pictures are referred to as - video/x-jpeg ("Motion JPEG"). - - - - - - - image/png - Portable Network Graphics Image. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - image/tiff - Tagged Image File Format. - - - - - There are currently no specific properties defined or needed for - this type. - - - - -
- - - Table of Container Types - - - - - - - - Media Type - Description - Property - Property Type - Property Values - Property Description - - - - - - - - - video/x-ms-asf - Advanced Streaming Format (ASF). - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - video/x-msvideo - AVI. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - video/x-dv - Digital Video. - systemstream - boolean - TRUE - - Indicates that this is a container system stream rather than an - elementary video stream. - - - - - - - video/x-matroska - Matroska. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - video/mpeg - Motion Pictures Expert Group System Stream. - systemstream - boolean - TRUE - - Indicates that this is a container system stream rather than an - elementary video stream. - - - - - - - application/ogg - Ogg. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - video/quicktime - Quicktime. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - application/vnd.rn-realmedia - RealMedia. - - - - - There are currently no specific properties defined or needed for - this type. - - - - - - - audio/x-wav - WAV. - - - - - There are currently no specific properties defined or needed for - this type. - - - - -
- - - Table of Subtitle Types - - - - - - - - Media Type - Description - Property - Property Type - Property Values - Property Description - - - - - - - - - - - - - - - None defined yet. - - - - -
- - - Table of Other Types - - - - - - - - Media Type - Description - Property - Property Type - Property Values - Property Description - - - - - - - - - - - - - - - None defined yet. - - - - -
-
-
diff --git a/docs/pwg/appendix-checklist.xml b/docs/pwg/appendix-checklist.xml deleted file mode 100644 index bd118c4038..0000000000 --- a/docs/pwg/appendix-checklist.xml +++ /dev/null @@ -1,195 +0,0 @@ - - Things to check when writing an element - - This chapter contains a fairly random selection of things to take care - of when writing an element. It's up to you how far you're going to stick - to those guidelines. However, keep in mind that when you're writing an - element and hope for it to be included in the mainstream &GStreamer; - distribution, it has to meet those requirements. - As far as possible, we will try to explain why those requirements are - set. - - - - About states - - - - - Make sure the state of an element gets reset when going to - NULL. Ideally, this should set all - object properties to their original state. This function - should also be called from _init. - - - - - Make sure an element forgets everything - about its contained stream when going from - PAUSED to READY. In - READY, all stream states are reset. An - element that goes from PAUSED to - READY and back to - PAUSED should start reading the - stream from the start again. - - - - - People that use gst-launch for testing have - the tendency to not care about cleaning up. This is - wrong. An element should be tested using - various applications, where testing not only means to make - sure it doesn't crash, but also to test for memory leaks - using tools such as valgrind. Elements have to - be reusable in a pipeline after having been reset. - - - - - - - Debugging - - - - - Elements should never use their standard - output for debugging (using functions such as printf - () or g_print ()). Instead, - elements should use the logging functions provided by &GStreamer;, - named GST_DEBUG (), - GST_LOG (), GST_INFO (), - GST_WARNING () and - GST_ERROR (). The various logging levels can - be turned on and off at runtime and can thus be used for solving - issues as they turn up. Instead of GST_LOG () - (as an example), you can also use GST_LOG_OBJECT - () to print the object that you're logging output for. - - - - - Ideally, elements should use their own debugging category. Most - elements use the following code to do that: - - -GST_DEBUG_CATEGORY_STATIC (myelement_debug); -#define GST_CAT_DEFAULT myelement_debug - -[..] - -static void -gst_myelement_class_init (GstMyelementClass *klass) -{ -[..] - GST_DEBUG_CATEGORY_INIT (myelement_debug, "myelement", - 0, "My own element"); -} - - - At runtime, you can turn on debugging using the commandline - option --gst-debug=myelement:5. - - - - - Elements should use GST_DEBUG_FUNCPTR when setting pad functions or - overriding element class methods, for example: - -gst_pad_set_event_func (myelement->srcpad, - GST_DEBUG_FUNCPTR (my_element_src_event)); - - This makes debug output much easier to read later on. - - - - - Elements that are aimed for inclusion into one of the GStreamer - modules should ensure consistent naming of the element name, - structures and function names. For example, if the element type is - GstYellowFooDec, functions should be prefixed with - gst_yellow_foo_dec_ and the element should be registered - as 'yellowfoodec'. Separate words should be separate in this scheme, - so it should be GstFooDec and gst_foo_dec, and not GstFoodec and - gst_foodec. - - - - - - - Querying, events and the like - - - - - All elements to which it applies (sources, sinks, demuxers) - should implement query functions on their pads, so that - applications and neighbour elements can request the current - position, the stream length (if known) and so on. - - - - - Elements should make sure they forward events they do not - handle with gst_pad_event_default (pad, parent, event) instead of - just dropping them. Events should never be dropped unless - specifically intended. - - - - - Elements should make sure they forward queries they do not - handle with gst_pad_query_default (pad, parent, query) instead of - just dropping them. - - - - - - - Testing your element - - - - - gst-launch is not a good - tool to show that your element is finished. Applications such as - Rhythmbox and Totem (for GNOME) or AmaroK (for KDE) - are. gst-launch will not - test various things such as proper clean-up on reset, event - handling, querying and so on. - - - - - Parsers and demuxers should make sure to check their input. Input - cannot be trusted. Prevent possible buffer overflows and the like. - Feel free to error out on unrecoverable stream errors. Test your - demuxer using stream corruption elements such as - breakmydata (included in gst-plugins). It - will randomly insert, delete and modify bytes in a stream, and is - therefore a good test for robustness. If your element crashes - when adding this element, your element needs fixing. If it errors - out properly, it's good enough. Ideally, it'd just continue to - work and forward data as much as possible. - - - - - Demuxers should not assume that seeking works. Be prepared to - work with unseekable input streams (e.g. network sources) as - well. - - - - - Sources and sinks should be prepared to be assigned another clock - then the one they expose themselves. Always use the provided clock - for synchronization, else you'll get A/V sync issues. - - - - - diff --git a/docs/pwg/appendix-licensing.xml b/docs/pwg/appendix-licensing.xml deleted file mode 100644 index ab1d192c57..0000000000 --- a/docs/pwg/appendix-licensing.xml +++ /dev/null @@ -1,38 +0,0 @@ - - GStreamer licensing - - How to license the code you write for <application>GStreamer</application> - -GStreamer is a plugin-based framework licensed under the LGPL. The reason -for this choice in licensing is to ensure that everyone can use GStreamer -to build applications using licenses of their choice. - - -To keep this policy viable, the GStreamer community has made a few -licensing rules for code to be included in GStreamer's core or GStreamer's -official modules, like our plugin packages. We require that all code going -into our core package is LGPL. For the plugin code, we require the use of -the LGPL for all plugins written from scratch or linking to external -libraries. The only exception to this is when plugins contain older code -under more liberal licenses (like the MPL or BSD). They can use those -licenses instead and will still be considered for inclusion. We do not -accept GPL code to be added to our plugins module, but we do accept -LGPL-licensed plugins using an external GPL library. The reason for -demanding plugins be licensed under the LGPL, even when using a GPL -library, is that other developers might want to use the plugin code as a -template for plugins linking to non-GPL libraries. - - -We also plan on splitting out the plugins using GPL libraries into a -separate package eventually and implement a system which makes sure an -application will not be able to access these plugins unless it uses some -special code to do so. The point of this is not to block GPL-licensed -plugins from being used and developed, but to make sure people are not -unintentionally violating the GPL license of said plugins. - - -This advisory is part of a bigger advisory with a FAQ which you can find -on the GStreamer website - - - diff --git a/docs/pwg/appendix-porting.xml b/docs/pwg/appendix-porting.xml deleted file mode 100644 index ab457a8ce7..0000000000 --- a/docs/pwg/appendix-porting.xml +++ /dev/null @@ -1,196 +0,0 @@ - - Porting 0.8 plug-ins to 0.10 - - This section of the appendix will discuss shortly what changes to - plugins will be needed to quickly and conveniently port most - applications from &GStreamer;-0.8 to &GStreamer;-0.10, with references - to the relevant sections in this Plugin Writer's Guide where needed. - With this list, it should be possible to port most plugins to - &GStreamer;-0.10 in less than a day. Exceptions are elements that will - require a base class in 0.10 (sources, sinks), in which case it may take - a lot longer, depending on the coder's skills (however, when using the - GstBaseSink and GstBaseSrc - base-classes, it shouldn't be all too bad), and elements requiring - the deprecated bytestream interface, which should take 1-2 days with - random access. The scheduling parts of muxers will also need a rewrite, - which will take about the same amount of time. - - - - List of changes - - - - Discont events have been replaced by newsegment events. In 0.10, it is - essential that you send a newsegment event downstream before you send - your first buffer (in 0.8 the scheduler would invent discont events if - you forgot them, in 0.10 this is no longer the case). - - - - - In 0.10, buffers have caps attached to them. Elements should allocate - new buffers with gst_pad_alloc_buffer (). See - for more details. - - - - - Most functions returning an object or an object property have - been changed to return its own reference rather than a constant - reference of the one owned by the object itself. The reason for - this change is primarily thread-safety. This means effectively - that return values of functions such as - gst_element_get_pad (), - gst_pad_get_name (), - gst_pad_get_parent (), - gst_object_get_parent (), - and many more like these - have to be free'ed or unreferenced after use. Check the API - references of each function to know for sure whether return - values should be free'ed or not. - - - - - In 0.8, scheduling could happen in any way. Source elements could - be _get ()-based or _loop - ()-based, and any other element could be _chain - ()-based or _loop ()-based, with - no limitations. Scheduling in 0.10 is simpler for the scheduler, - and the element is expected to do some more work. Pads get - assigned a scheduling mode, based on which they can either - operate in random access-mode, in pipeline driving mode or in - push-mode. all this is documented in detail in . As a result of this, the bytestream - object no longer exists. Elements requiring byte-level access should - now use random access on their sinkpads. - - - - - Negotiation is asynchronous. This means that downstream negotiation - is done as data comes in and upstream negotiation is done whenever - renegotiation is required. All details are described in - . - - - - - For as far as possible, elements should try to use existing base - classes in 0.10. Sink and source elements, for example, could derive - from GstBaseSrc and - GstBaseSink. Audio sinks or sources could even - derive from audio-specific base classes. All existing base classes - have been discussed in and the - next few chapters. - - - - - In 0.10, event handling and buffers are separated once again. This - means that in order to receive events, one no longer has to set the - GST_FLAG_EVENT_AWARE flag, but can simply - set an event handling function on the element's sinkpad(s), using - the function gst_pad_set_event_function (). The - _chain ()-function will only receive buffers. - - - - - Although core will wrap most threading-related locking for you (e.g. - it takes the stream lock before calling your data handling - functions), you are still responsible for locking around certain - functions, e.g. object properties. Be sure to lock properly here, - since applications will change those properties in a different thread - than the thread which does the actual data passing! You can use the - GST_OBJECT_LOCK () and GST_OBJECT_UNLOCK - () helpers in most cases, fortunately, which grabs the - default property lock of the element. - - - - - GstValueFixedList and all - *_fixed_list_* () functions were renamed to - GstValueArray and *_array_* - (). - - - - - The semantics of GST_STATE_PAUSED and - GST_STATE_PLAYING have changed for elements that - are not sink elements. Non-sink elements need to be able to accept - and process data already in the GST_STATE_PAUSED - state now (i.e. when prerolling the pipeline). More details can be - found in . - - - - - If your plugin's state change function hasn't been superseded by - virtual start() and stop() methods of one of the new base classes, - then your plugin's state change functions may need to be changed in - order to safely handle concurrent access by multiple threads. Your - typical state change function will now first handle upwards state - changes, then chain up to the state change function of the parent - class (usually GstElementClass in these cases), and only then handle - downwards state changes. See the vorbis decoder plugin in - gst-plugins-base for an example. - - - The reason for this is that in the case of downwards state changes - you don't want to destroy allocated resources while your plugin's - chain function (for example) is still accessing those resources in - another thread. Whether your chain function might be running or not - depends on the state of your plugin's pads, and the state of those - pads is closely linked to the state of the element. Pad states are - handled in the GstElement class's state change function, including - proper locking, that's why it is essential to chain up before - destroying allocated resources. - - - As already mentioned above, you should really rewrite your plugin - to derive from one of the new base classes though, so you don't have - to worry about these things, as the base class will handle it for you. - There are no base classes for decoders and encoders yet, so the above - paragraphs about state changes definitively apply if your plugin is a - decoder or an encoder. - - - - - gst_pad_set_link_function (), which used to set - a function that would be called when a format was negotiated between - two GstPads, now sets a function that is - called when two elements are linked together in an application. For - all practical purposes, you most likely want to use the function - gst_pad_set_setcaps_function (), nowadays, which - sets a function that is called when the format streaming over a pad - changes (so similar to _set_link_function () in - &GStreamer;-0.8). - - - If the element is derived from a GstBase class, - then override the set_caps (). - - - - - gst_pad_use_explicit_caps () has been replaced by - gst_pad_use_fixed_caps (). You can then set the - fixed caps to use on a pad with gst_pad_set_caps (). - - - - - - - - Porting 0.10 plug-ins to 1.0 - - You can find the list of changes in the - Porting to 1.0 document. - - diff --git a/docs/pwg/appendix-python.xml b/docs/pwg/appendix-python.xml deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/docs/pwg/base.css b/docs/pwg/base.css deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/docs/pwg/building-boiler.xml b/docs/pwg/building-boiler.xml deleted file mode 100644 index 251bf33173..0000000000 --- a/docs/pwg/building-boiler.xml +++ /dev/null @@ -1,466 +0,0 @@ - - - - Constructing the Boilerplate - - In this chapter you will learn how to construct the bare minimum code for a - new plugin. Starting from ground zero, you will see how to get the - &GStreamer; template source. Then you will learn how to use a few basic - tools to copy and modify a template plugin to create a new plugin. If you - follow the examples here, then by the end of this chapter you will have a - functional audio filter plugin that you can compile and use in &GStreamer; - applications. - - - - - - Getting the GStreamer Plugin Templates - - There are currently two ways to develop a new plugin for &GStreamer;: You - can write the entire plugin by hand, or you can copy an existing plugin - template and write the plugin code you need. The second method is by far - the simpler of the two, so the first method will not even be described - here. (Errm, that is, it is left as an exercise to the - reader.) - - - The first step is to check out a copy of the - gst-template git module to get an important tool and - the source code template for a basic &GStreamer; plugin. To check out the - gst-template module, make sure you are connected to - the internet, and type the following commands at a command console: - - -shell $ git clone git://anongit.freedesktop.org/gstreamer/gst-template.git -Initialized empty Git repository in /some/path/gst-template/.git/ -remote: Counting objects: 373, done. -remote: Compressing objects: 100% (114/114), done. -remote: Total 373 (delta 240), reused 373 (delta 240) -Receiving objects: 100% (373/373), 75.16 KiB | 78 KiB/s, done. -Resolving deltas: 100% (240/240), done. - - - This command will check out a series of files and directories into - gst-template. The template you - will be using is in the - gst-template/gst-plugin/ - directory. You should look over the files in that directory to get a - general idea of the structure of a source tree for a plugin. - - - If for some reason you can't access the git repository, you can also - - download a snapshot of the latest revision via the cgit web - interface. - - - - - - - Using the Project Stamp - - The first thing to do when making a new element is to specify some basic - details about it: what its name is, who wrote it, what version number it - is, etc. We also need to define an object to represent the element and to - store the data the element needs. These details are collectively known as - the boilerplate. - - - The standard way of defining the boilerplate is simply to write some code, - and fill in some structures. As mentioned in the previous section, the - easiest way to do this is to copy a template and add functionality - according to your needs. To help you do so, there is a tool in the - ./gst-plugin/tools/ directory. - This tool, make_element, is a command line utility - that creates the boilerplate code for you. - - - To use make_element, first open up a terminal window. - Change to the gst-template/gst-plugin/src - directory, and then run the make_element command. The - arguments to the make_element are: - - - - the name of the plugin, and - - - - the source file that the tool will use. By default, - gstplugin is used. - - - - - For example, the following commands create the MyFilter plugin based on - the plugin template and put the output files in the - gst-template/gst-plugin/src - directory: - - -shell $ cd gst-template/gst-plugin/src -shell $ ../tools/make_element MyFilter - - - - Capitalization is important for the name of the plugin. Keep in mind - that under some operating systems, capitalization is also important - when specifying directory and file names in general. - - - - The last command creates two files: - gstmyfilter.c and - gstmyfilter.h. - - - - It is recommended that you create a copy of the gst-plugin - directory before continuing. - - - - Now one needs to adjust the Makefile.am to use the - new filenames and run autogen.sh from the parent - directory to bootstrap the build environment. After that, the project - can be built and installed using the well known - make && sudo make install commands. - - - - Be aware that by default autogen.sh and - configure would choose /usr/local - as a default location. One would need to add - /usr/local/lib/gstreamer-1.0 - to GST_PLUGIN_PATH in order to make the new plugin - show up in a gstreamer that's been installed from packages. - - - - - FIXME: this section is slightly outdated. gst-template is still useful - as an example for a minimal plugin build system skeleton. However, for - creating elements the tool gst-element-maker from gst-plugins-bad is - recommended these days. - - - - - - - - Examining the Basic Code - - First we will examine the code you would be likely to place in a header - file (although since the interface to the code is entirely defined by the - plugin system, and doesn't depend on reading a header file, this is not - crucial.) - - - - Example Plugin Header File - -#include <gst/gst.h> - -/* Definition of structure storing data for this element. */ -typedef struct _GstMyFilter { - GstElement element; - - GstPad *sinkpad, *srcpad; - - gboolean silent; - - - -} GstMyFilter; - -/* Standard definition defining a class for this element. */ -typedef struct _GstMyFilterClass { - GstElementClass parent_class; -} GstMyFilterClass; - -/* Standard macros for defining types for this element. */ -#define GST_TYPE_MY_FILTER (gst_my_filter_get_type()) -#define GST_MY_FILTER(obj) \ - (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MY_FILTER,GstMyFilter)) -#define GST_MY_FILTER_CLASS(klass) \ - (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MY_FILTER,GstMyFilterClass)) -#define GST_IS_MY_FILTER(obj) \ - (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MY_FILTER)) -#define GST_IS_MY_FILTER_CLASS(klass) \ - (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MY_FILTER)) - -/* Standard function returning type information. */ -GType gst_my_filter_get_type (void); - - - - Using this header file, you can use the following macro to setup - the GObject basics in your source file so - that all functions will be called appropriately: - - -#include "filter.h" - -G_DEFINE_TYPE (GstMyFilter, gst_my_filter, GST_TYPE_ELEMENT); - - - - - - - Element metadata - - The Element metadata provides extra element information. It is configured - with gst_element_class_set_metadata or - gst_element_class_set_static_metadata which takes the - following parameters: - - - - A long, English, name for the element. - - The type of the element, see the docs/design/draft-klass.txt document - in the GStreamer core source tree for details and examples. - - A brief description of the purpose of the element. - - The name of the author of the element, optionally followed by a contact - email address in angle brackets. - - - - For example: - - -gst_element_class_set_static_metadata (klass, - "An example plugin", - "Example/FirstExample", - "Shows the basic structure of a plugin", - "your name <your.name@your.isp>"); - - - The element details are registered with the plugin during - the _class_init () function, which is part of - the GObject system. The _class_init () function - should be set for this GObject in the function where you register - the type with GLib. - - -static void -gst_my_filter_class_init (GstMyFilterClass * klass) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (klass); - -[..] - gst_element_class_set_static_metadata (element_klass, - "An example plugin", - "Example/FirstExample", - "Shows the basic structure of a plugin", - "your name <your.name@your.isp>"); - -} - - - - - - - GstStaticPadTemplate - - A GstStaticPadTemplate is a description of a pad that the element will - (or might) create and use. It contains: - - - - A short name for the pad. - - - Pad direction. - - - - Existence property. This indicates whether the pad exists always (an - always pad), only in some cases (a - sometimes pad) or only if the application requested - such a pad (a request pad). - - - - Supported types by this element (capabilities). - - - - For example: - - -static GstStaticPadTemplate sink_factory = -GST_STATIC_PAD_TEMPLATE ( - "sink", - GST_PAD_SINK, - GST_PAD_ALWAYS, - GST_STATIC_CAPS ("ANY") -); - - - - - Those pad templates are registered during the - _class_init () function with the - gst_element_class_add_pad_template (). For this - function you need a handle the GstPadTemplate - which you can create from the static pad template with - gst_static_pad_template_get (). See below for more - details on this. - - - Pads are created from these static templates in the element's - _init () function using - gst_pad_new_from_static_template (). - In order to create a new pad from this - template using gst_pad_new_from_static_template (), you - will need to declare the pad template as a global variable. More on - this subject in . - - -static GstStaticPadTemplate sink_factory = [..], - src_factory = [..]; - -static void -gst_my_filter_class_init (GstMyFilterClass * klass) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (klass); -[..] - - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&src_factory)); - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&sink_factory)); -} - - - - The last argument in a template is its type - or list of supported types. In this example, we use 'ANY', which means - that this element will accept all input. In real-life situations, you - would set a media type and optionally a set of properties to make sure - that only supported input will come in. This representation should be - a string that starts with a media type, then a set of comma-separates - properties with their supported values. In case of an audio filter that - supports raw integer 16-bit audio, mono or stereo at any samplerate, the - correct template would look like this: - - - - - - Values surrounded by curly brackets ({ and - }) are lists, values surrounded by square brackets - ([ and ]) are ranges. - Multiple sets of types are supported too, and should be separated by - a semicolon (;). Later, in the chapter on pads, we will - see how to use types to know the exact format of a stream: - . - - - - - - - Constructor Functions - - Each element has two functions which are used for construction of an - element. The _class_init() function, - which is used to initialise the class only once (specifying what signals, - arguments and virtual functions the class has and setting up global - state); and the _init() function, which is used to - initialise a specific instance of this type. - - - - - - - The plugin_init function - - Once we have written code defining all the parts of the plugin, we need to - write the plugin_init() function. This is a special function, which is - called as soon as the plugin is loaded, and should return TRUE or FALSE - depending on whether it loaded initialized any dependencies correctly. - Also, in this function, any supported element type in the plugin should - be registered. - - - - - - - - Note that the information returned by the plugin_init() function will be - cached in a central registry. For this reason, it is important that the - same information is always returned by the function: for example, it - must not make element factories available based on runtime conditions. - If an element can only work in certain conditions (for example, if the - soundcard is not being used by some other process) this must be reflected - by the element being unable to enter the READY state if unavailable, - rather than the plugin attempting to deny existence of the plugin. - - - diff --git a/docs/pwg/building-chainfn.xml b/docs/pwg/building-chainfn.xml deleted file mode 100644 index 137ca49632..0000000000 --- a/docs/pwg/building-chainfn.xml +++ /dev/null @@ -1,158 +0,0 @@ - - - - - The chain function - - The chain function is the function in which all data processing takes - place. In the case of a simple filter, _chain () - functions are mostly linear functions - so for each incoming buffer, - one buffer will go out, too. Below is a very simple implementation of - a chain function: - - - -static GstFlowReturn gst_my_filter_chain (GstPad *pad, - GstObject *parent, - GstBuffer *buf); - -[..] - -static void -gst_my_filter_init (GstMyFilter * filter) -{ -[..] - /* configure chain function on the pad before adding - * the pad to the element */ - gst_pad_set_chain_function (filter->sinkpad, - gst_my_filter_chain); -[..] -} - -static GstFlowReturn -gst_my_filter_chain (GstPad *pad, - GstObject *parent, - GstBuffer *buf) -{ - GstMyFilter *filter = GST_MY_FILTER (parent); - - if (!filter->silent) - g_print ("Have data of size %" G_GSIZE_FORMAT" bytes!\n", - gst_buffer_get_size (buf)); - - return gst_pad_push (filter->srcpad, buf); -} - - - - Obviously, the above doesn't do much useful. Instead of printing that the - data is in, you would normally process the data there. Remember, however, - that buffers are not always writeable. - - - In more advanced elements (the ones that do event processing), you may want - to additionally specify an event handling function, which will be called - when stream-events are sent (such as caps, end-of-stream, newsegment, tags, etc.). - - -static void -gst_my_filter_init (GstMyFilter * filter) -{ -[..] - gst_pad_set_event_function (filter->sinkpad, - gst_my_filter_sink_event); -[..] -} - - - -static gboolean -gst_my_filter_sink_event (GstPad *pad, - GstObject *parent, - GstEvent *event) -{ - GstMyFilter *filter = GST_MY_FILTER (parent); - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_CAPS: - /* we should handle the format here */ - break; - case GST_EVENT_EOS: - /* end-of-stream, we should close down all stream leftovers here */ - gst_my_filter_stop_processing (filter); - break; - default: - break; - } - - return gst_pad_event_default (pad, parent, event); -} - -static GstFlowReturn -gst_my_filter_chain (GstPad *pad, - GstObject *parent, - GstBuffer *buf) -{ - GstMyFilter *filter = GST_MY_FILTER (parent); - GstBuffer *outbuf; - - outbuf = gst_my_filter_process_data (filter, buf); - gst_buffer_unref (buf); - if (!outbuf) { - /* something went wrong - signal an error */ - GST_ELEMENT_ERROR (GST_ELEMENT (filter), STREAM, FAILED, (NULL), (NULL)); - return GST_FLOW_ERROR; - } - - return gst_pad_push (filter->srcpad, outbuf); -} - - - - In some cases, it might be useful for an element to have control over the - input data rate, too. In that case, you probably want to write a so-called - loop-based element. Source elements (with only source - pads) can also be get-based elements. These concepts - will be explained in the advanced section of this guide, and in the section - that specifically discusses source pads. - - diff --git a/docs/pwg/building-eventfn.xml b/docs/pwg/building-eventfn.xml deleted file mode 100644 index b72e376bf3..0000000000 --- a/docs/pwg/building-eventfn.xml +++ /dev/null @@ -1,72 +0,0 @@ - - - - - The event function - - The event function notifies you of special events that happen in - the datastream (such as caps, end-of-stream, newsegment, tags, etc.). - Events can travel both upstream and downstream, so you can receive them - on sink pads as well as source pads. - - - Below follows a very simple event function that we install on the sink - pad of our element. - - -sinkpad, - gst_my_filter_sink_event); -[..] -} - -static gboolean -gst_my_filter_sink_event (GstPad *pad, - GstObject *parent, - GstEvent *event) -{ - gboolean ret; - GstMyFilter *filter = GST_MY_FILTER (parent); - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_CAPS: - /* we should handle the format here */ - - /* push the event downstream */ - ret = gst_pad_push_event (filter->srcpad, event); - break; - case GST_EVENT_EOS: - /* end-of-stream, we should close down all stream leftovers here */ - gst_my_filter_stop_processing (filter); - - ret = gst_pad_event_default (pad, parent, event); - break; - default: - /* just call the default handler */ - ret = gst_pad_event_default (pad, parent, event); - break; - } - return ret; -} -]]> - - - It is a good idea to call the default event handler - gst_pad_event_default () for unknown events. - Depending on the event type, the default handler will forward - the event or simply unref it. The CAPS event is by default not - forwarded so we need to do this in the event handler ourselves. - - diff --git a/docs/pwg/building-pads.xml b/docs/pwg/building-pads.xml deleted file mode 100644 index 5e31648596..0000000000 --- a/docs/pwg/building-pads.xml +++ /dev/null @@ -1,146 +0,0 @@ - - - - - Specifying the pads - - As explained before, pads are the port through which data goes in and out - of your element, and that makes them a very important item in the process - of element creation. In the boilerplate code, we have seen how static pad - templates take care of registering pad templates with the element class. - Here, we will see how to create actual elements, use an _event - ()-function to configure for a particular format and how to - register functions to let data flow through the element. - - - In the element _init () function, you create the pad - from the pad template that has been registered with the element class in - the _class_init () function. After creating the pad, - you have to set a _chain () function pointer that will - receive and process the input data on the sinkpad. - You can optionally also set an _event () function - pointer and a _query () function pointer. - Alternatively, pads can also operate in looping mode, which means that they - can pull data themselves. More on this topic later. After that, you have - to register the pad with the element. This happens like this: - - - - - -static void -gst_my_filter_init (GstMyFilter *filter) -{ - /* pad through which data comes in to the element */ - filter->sinkpad = gst_pad_new_from_static_template ( - &sink_template, "sink"); - /* pads are configured here with gst_pad_set_*_function () */ - - - - gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad); - - /* pad through which data goes out of the element */ - filter->srcpad = gst_pad_new_from_static_template ( - &src_template, "src"); - /* pads are configured here with gst_pad_set_*_function () */ - - - - gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad); - - /* properties initial value */ - filter->silent = FALSE; -} - - - - - diff --git a/docs/pwg/building-props.xml b/docs/pwg/building-props.xml deleted file mode 100644 index 10f7d77b4c..0000000000 --- a/docs/pwg/building-props.xml +++ /dev/null @@ -1,169 +0,0 @@ - - - - Adding Properties - - The primary and most important way of controlling how an element behaves, - is through GObject properties. GObject properties are defined in the - _class_init () function. The element optionally - implements a _get_property () and a - _set_property () function. These functions will be - notified if an application changes or requests the value of a property, - and can then fill in the value or take action required for that property - to change value internally. - - - You probably also want to keep an instance variable around - with the currently configured value of the property that you use in the - get and set functions. - Note that GObject will not automatically set your - instance variable to the default value, you will have to do that in the - _init () function of your element. - - - -/* properties */ -enum { - PROP_0, - PROP_SILENT - /* FILL ME */ -}; - -static void gst_my_filter_set_property (GObject *object, - guint prop_id, - const GValue *value, - GParamSpec *pspec); -static void gst_my_filter_get_property (GObject *object, - guint prop_id, - GValue *value, - GParamSpec *pspec); - -static void -gst_my_filter_class_init (GstMyFilterClass *klass) -{ - GObjectClass *object_class = G_OBJECT_CLASS (klass); - - /* define virtual function pointers */ - object_class->set_property = gst_my_filter_set_property; - object_class->get_property = gst_my_filter_get_property; - - /* define properties */ - g_object_class_install_property (object_class, PROP_SILENT, - g_param_spec_boolean ("silent", "Silent", - "Whether to be very verbose or not", - FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); -} - -static void -gst_my_filter_set_property (GObject *object, - guint prop_id, - const GValue *value, - GParamSpec *pspec) -{ - GstMyFilter *filter = GST_MY_FILTER (object); - - switch (prop_id) { - case PROP_SILENT: - filter->silent = g_value_get_boolean (value); - g_print ("Silent argument was changed to %s\n", - filter->silent ? "true" : "false"); - break; - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; - } -} - -static void -gst_my_filter_get_property (GObject *object, - guint prop_id, - GValue *value, - GParamSpec *pspec) -{ - GstMyFilter *filter = GST_MY_FILTER (object); - - switch (prop_id) { - case PROP_SILENT: - g_value_set_boolean (value, filter->silent); - break; - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; - } -} - - - - The above is a very simple example of how properties are used. Graphical - applications will use these properties and will display a - user-controllable widget with which these properties can be changed. - This means that - for the property to be as user-friendly - as possible - you should be as exact as possible in the definition of the - property. Not only in defining ranges in between which valid properties - can be located (for integers, floats, etc.), but also in using very - descriptive (better yet: internationalized) strings in the definition of - the property, and if possible using enums and flags instead of integers. - The GObject documentation describes these in a very complete way, but - below, we'll give a short example of where this is useful. Note that using - integers here would probably completely confuse the user, because they - make no sense in this context. The example is stolen from videotestsrc. - - -typedef enum { - GST_VIDEOTESTSRC_SMPTE, - GST_VIDEOTESTSRC_SNOW, - GST_VIDEOTESTSRC_BLACK -} GstVideotestsrcPattern; - -[..] - -#define GST_TYPE_VIDEOTESTSRC_PATTERN (gst_videotestsrc_pattern_get_type ()) -static GType -gst_videotestsrc_pattern_get_type (void) -{ - static GType videotestsrc_pattern_type = 0; - - if (!videotestsrc_pattern_type) { - static GEnumValue pattern_types[] = { - { GST_VIDEOTESTSRC_SMPTE, "SMPTE 100% color bars", "smpte" }, - { GST_VIDEOTESTSRC_SNOW, "Random (television snow)", "snow" }, - { GST_VIDEOTESTSRC_BLACK, "0% Black", "black" }, - { 0, NULL, NULL }, - }; - - videotestsrc_pattern_type = - g_enum_register_static ("GstVideotestsrcPattern", - pattern_types); - } - - return videotestsrc_pattern_type; -} - -[..] - -static void -gst_videotestsrc_class_init (GstvideotestsrcClass *klass) -{ -[..] - g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_PATTERN, - g_param_spec_enum ("pattern", "Pattern", - "Type of test pattern to generate", - GST_TYPE_VIDEOTESTSRC_PATTERN, GST_VIDEOTESTSRC_SMPTE, - G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); -[..] -} - - diff --git a/docs/pwg/building-queryfn.xml b/docs/pwg/building-queryfn.xml deleted file mode 100644 index 2f8816318a..0000000000 --- a/docs/pwg/building-queryfn.xml +++ /dev/null @@ -1,72 +0,0 @@ - - - - - The query function - - Through the query function, your element will receive queries that it - has to reply to. These are queries like position, duration but also - about the supported formats and scheduling modes your element supports. - Queries can travel both upstream and downstream, so you can receive them - on sink pads as well as source pads. - - - Below follows a very simple query function that we install on the source - pad of our element. - - -srcpad, - gst_my_filter_src_query); -[..] -} - -static gboolean -gst_my_filter_src_query (GstPad *pad, - GstObject *parent, - GstQuery *query) -{ - gboolean ret; - GstMyFilter *filter = GST_MY_FILTER (parent); - - switch (GST_QUERY_TYPE (query)) { - case GST_QUERY_POSITION: - /* we should report the current position */ - [...] - break; - case GST_QUERY_DURATION: - /* we should report the duration here */ - [...] - break; - case GST_QUERY_CAPS: - /* we should report the supported caps here */ - [...] - break; - default: - /* just call the default handler */ - ret = gst_pad_query_default (pad, parent, query); - break; - } - return ret; -} -]]> - - - It is a good idea to call the default query handler - gst_pad_query_default () for unknown queries. - Depending on the query type, the default handler will forward - the query or simply unref it. - - diff --git a/docs/pwg/building-signals.xml b/docs/pwg/building-signals.xml deleted file mode 100644 index 73be168e9c..0000000000 --- a/docs/pwg/building-signals.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - - - Signals - - GObject signals can be used to notify applications of events specific - to this object. Note, however, that the application needs to be aware - of signals and their meaning, so if you're looking for a generic way - for application-element interaction, signals are probably not what - you're looking for. In many cases, however, signals can be very useful. - See the GObject - documentation for all internals about signals. - - diff --git a/docs/pwg/building-state.xml b/docs/pwg/building-state.xml deleted file mode 100644 index f07ccefbbc..0000000000 --- a/docs/pwg/building-state.xml +++ /dev/null @@ -1,184 +0,0 @@ - - What are states? - - A state describes whether the element instance is initialized, whether it - is ready to transfer data and whether it is currently handling data. There - are four states defined in &GStreamer;: - - - - - GST_STATE_NULL - - - - - GST_STATE_READY - - - - - GST_STATE_PAUSED - - - - - GST_STATE_PLAYING - - - - - which will from now on be referred to simply as NULL, - READY, PAUSED and PLAYING. - - - GST_STATE_NULL is the default state of an element. In this state, it - has not allocated any runtime resources, it has not loaded any runtime - libraries and it can obviously not handle data. - - - GST_STATE_READY is the next state that an element can be in. In the - READY state, an element has all default resources (runtime-libraries, - runtime-memory) allocated. However, it has not yet allocated or defined - anything that is stream-specific. When going from NULL to READY state - (GST_STATE_CHANGE_NULL_TO_READY), an element should - allocate any non-stream-specific resources and should load runtime-loadable - libraries (if any). When going the other way around (from READY to NULL, - GST_STATE_CHANGE_READY_TO_NULL), an element should unload - these libraries and free all allocated resources. Examples of such - resources are hardware devices. Note that files are generally streams, - and these should thus be considered as stream-specific resources; therefore, - they should not be allocated in this state. - - - GST_STATE_PAUSED is the state in which an element is - ready to accept and handle data. For most elements this state is the same - as PLAYING. The only exception to this rule are sink elements. Sink - elements only accept one single buffer of data and then block. At this - point the pipeline is 'prerolled' and ready to render data immediately. - - - GST_STATE_PLAYING is the highest state that an element - can be in. For most elements this state is exactly the same as PAUSED, - they accept and process events and buffers with data. Only sink elements - need to differentiate between PAUSED and PLAYING state. In PLAYING state, - sink elements actually render incoming data, e.g. output audio to a sound - card or render video pictures to an image sink. - - - - Managing filter state - - If at all possible, your element should derive from one of the new base - classes (). There are ready-made - general purpose base classes for different types of sources, sinks and - filter/transformation elements. In addition to those, specialised base - classes exist for audio and video elements and others. - - - If you use a base class, you will rarely have to handle state changes - yourself. All you have to do is override the base class's start() and - stop() virtual functions (might be called differently depending on the - base class) and the base class will take care of everything for you. - - - If, however, you do not derive from a ready-made base class, but from - GstElement or some other class not built on top of a base class, you - will most likely have to implement your own state change function to - be notified of state changes. This is definitively necessary if your - plugin is a demuxer or a muxer, as there are no base classes for - muxers or demuxers yet. - - - An element can be notified of state changes through a virtual function - pointer. Inside this function, the element can initialize any sort of - specific data needed by the element, and it can optionally fail to - go from one state to another. - - - Do not g_assert for unhandled state changes; this is taken care of by - the GstElement base class. - - -static GstStateChangeReturn -gst_my_filter_change_state (GstElement *element, GstStateChange transition); - -static void -gst_my_filter_class_init (GstMyFilterClass *klass) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (klass); - - element_class->change_state = gst_my_filter_change_state; -} - - - -static GstStateChangeReturn -gst_my_filter_change_state (GstElement *element, GstStateChange transition) -{ - GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; - GstMyFilter *filter = GST_MY_FILTER (element); - - switch (transition) { - case GST_STATE_CHANGE_NULL_TO_READY: - if (!gst_my_filter_allocate_memory (filter)) - return GST_STATE_CHANGE_FAILURE; - break; - default: - break; - } - - ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); - if (ret == GST_STATE_CHANGE_FAILURE) - return ret; - - switch (transition) { - case GST_STATE_CHANGE_READY_TO_NULL: - gst_my_filter_free_memory (filter); - break; - default: - break; - } - - return ret; -} - - - - Note that upwards (NULL=>READY, READY=>PAUSED, PAUSED=>PLAYING) - and downwards (PLAYING=>PAUSED, PAUSED=>READY, READY=>NULL) state - changes are handled in two separate blocks with the downwards state change - handled only after we have chained up to the parent class's state - change function. This is necessary in order to safely handle concurrent - access by multiple threads. - - - The reason for this is that in the case of downwards state changes - you don't want to destroy allocated resources while your plugin's - chain function (for example) is still accessing those resources in - another thread. Whether your chain function might be running or not - depends on the state of your plugin's pads, and the state of those - pads is closely linked to the state of the element. Pad states are - handled in the GstElement class's state change function, including - proper locking, that's why it is essential to chain up before - destroying allocated resources. - - - diff --git a/docs/pwg/building-testapp.xml b/docs/pwg/building-testapp.xml deleted file mode 100644 index 43a16f9575..0000000000 --- a/docs/pwg/building-testapp.xml +++ /dev/null @@ -1,216 +0,0 @@ - - - - Building a Test Application - - Often, you will want to test your newly written plugin in an as small - setting as possible. Usually, gst-launch-1.0 is a - good first step at testing a plugin. If you have not installed your - plugin in a directory that GStreamer searches, then you will need to - set the plugin path. Either set GST_PLUGIN_PATH to the directory - containing your plugin, or use the command-line option --gst-plugin-path. - If you based your plugin off of the gst-plugin template, then this - will look something like - - gst-launch-1.0 --gst-plugin-path=$HOME/gst-template/gst-plugin/src/.libs TESTPIPELINE - - However, you will often need more - testing features than gst-launch-1.0 can provide, such as seeking, events, - interactivity and more. Writing your own small testing program is the - easiest way to accomplish this. This section explains - in a few words - - how to do that. For a complete application development guide, see the - Application Development - Manual. - - - - At the start, you need to initialize the &GStreamer; core library by - calling gst_init (). You can alternatively call - gst_init_get_option_group (), which will return - a pointer to GOptionGroup. You can then use GOption to handle the - initialization, and this will finish the &GStreamer; initialization. - - - - You can create elements using gst_element_factory_make (), - where the first argument is the element type that you want to create, - and the second argument is a free-form name. The example at the end uses - a simple filesource - decoder - soundcard output pipeline, but you can - use specific debugging elements if that's necessary. For example, an - identity element can be used in the middle of - the pipeline to act as a data-to-application transmitter. This can be - used to check the data for misbehaviours or correctness in your test - application. Also, you can use a fakesink - element at the end of the pipeline to dump your data to the stdout - (in order to do this, set the dump property to - TRUE). Lastly, you can use valgrind to check for memory errors. - - - - During linking, your test application can use filtered caps - as a way to drive a specific type of data to or from your element. This - is a very simple and effective way of checking multiple types of input - and output in your element. - - - - Note that during running, you should listen for at least the - error and eos messages on the bus - and/or your plugin/element to check for correct handling of this. Also, - you should add events into the pipeline and make sure your plugin handles - these correctly (with respect to clocking, internal caching, etc.). - - - - Never forget to clean up memory in your plugin or your test application. - When going to the NULL state, your element should clean up allocated - memory and caches. Also, it should close down any references held to - possible support libraries. Your application should unref () - the pipeline and make sure it doesn't crash. - - - -#include <gst/gst.h> - -static gboolean -bus_call (GstBus *bus, - GstMessage *msg, - gpointer data) -{ - GMainLoop *loop = data; - - switch (GST_MESSAGE_TYPE (msg)) { - case GST_MESSAGE_EOS: - g_print ("End-of-stream\n"); - g_main_loop_quit (loop); - break; - case GST_MESSAGE_ERROR: { - gchar *debug = NULL; - GError *err = NULL; - - gst_message_parse_error (msg, &err, &debug); - - g_print ("Error: %s\n", err->message); - g_error_free (err); - - if (debug) { - g_print ("Debug details: %s\n", debug); - g_free (debug); - } - - g_main_loop_quit (loop); - break; - } - default: - break; - } - - return TRUE; -} - -gint -main (gint argc, - gchar *argv[]) -{ - GstStateChangeReturn ret; - GstElement *pipeline, *filesrc, *decoder, *filter, *sink; - GstElement *convert1, *convert2, *resample; - GMainLoop *loop; - GstBus *bus; - guint watch_id; - - /* initialization */ - gst_init (&argc, &argv); - loop = g_main_loop_new (NULL, FALSE); - if (argc != 2) { - g_print ("Usage: %s <mp3 filename>\n", argv[0]); - return 01; - } - - /* create elements */ - pipeline = gst_pipeline_new ("my_pipeline"); - - /* watch for messages on the pipeline's bus (note that this will only - * work like this when a GLib main loop is running) */ - bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); - watch_id = gst_bus_add_watch (bus, bus_call, loop); - gst_object_unref (bus); - - filesrc = gst_element_factory_make ("filesrc", "my_filesource"); - decoder = gst_element_factory_make ("mad", "my_decoder"); - - /* putting an audioconvert element here to convert the output of the - * decoder into a format that my_filter can handle (we are assuming it - * will handle any sample rate here though) */ - convert1 = gst_element_factory_make ("audioconvert", "audioconvert1"); - - /* use "identity" here for a filter that does nothing */ - filter = gst_element_factory_make ("my_filter", "my_filter"); - - /* there should always be audioconvert and audioresample elements before - * the audio sink, since the capabilities of the audio sink usually vary - * depending on the environment (output used, sound card, driver etc.) */ - convert2 = gst_element_factory_make ("audioconvert", "audioconvert2"); - resample = gst_element_factory_make ("audioresample", "audioresample"); - sink = gst_element_factory_make ("pulsesink", "audiosink"); - - if (!sink || !decoder) { - g_print ("Decoder or output could not be found - check your install\n"); - return -1; - } else if (!convert1 || !convert2 || !resample) { - g_print ("Could not create audioconvert or audioresample element, " - "check your installation\n"); - return -1; - } else if (!filter) { - g_print ("Your self-written filter could not be found. Make sure it " - "is installed correctly in $(libdir)/gstreamer-1.0/ or " - "~/.gstreamer-1.0/plugins/ and that gst-inspect-1.0 lists it. " - "If it doesn't, check with 'GST_DEBUG=*:2 gst-inspect-1.0' for " - "the reason why it is not being loaded."); - return -1; - } - - g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL); - - gst_bin_add_many (GST_BIN (pipeline), filesrc, decoder, convert1, filter, - convert2, resample, sink, NULL); - - /* link everything together */ - if (!gst_element_link_many (filesrc, decoder, convert1, filter, convert2, - resample, sink, NULL)) { - g_print ("Failed to link one or more elements!\n"); - return -1; - } - - /* run */ - ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); - if (ret == GST_STATE_CHANGE_FAILURE) { - GstMessage *msg; - - g_print ("Failed to start up pipeline!\n"); - - /* check if there is an error message with details on the bus */ - msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0); - if (msg) { - GError *err = NULL; - - gst_message_parse_error (msg, &err, NULL); - g_print ("ERROR: %s\n", err->message); - g_error_free (err); - gst_message_unref (msg); - } - return -1; - } - - g_main_loop_run (loop); - - /* clean up */ - gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (pipeline); - g_source_remove (watch_id); - g_main_loop_unref (loop); - - return 0; -} - - diff --git a/docs/pwg/intro-basics.xml b/docs/pwg/intro-basics.xml deleted file mode 100644 index de065d27d3..0000000000 --- a/docs/pwg/intro-basics.xml +++ /dev/null @@ -1,395 +0,0 @@ - - - - Foundations - - This chapter of the guide introduces the basic concepts of &GStreamer;. - Understanding these concepts will help you grok the issues involved in - extending &GStreamer;. Many of these concepts are explained in greater - detail in the &GstAppDevMan;; the basic concepts presented here serve mainly - to refresh your memory. - - - - - - Elements and Plugins - - Elements are at the core of &GStreamer;. In the context of plugin - development, an element is an object derived from the - - GstElement class. Elements provide some sort of - functionality when linked with other elements: For example, a source - element provides data to a stream, and a filter element acts on the data - in a stream. Without elements, &GStreamer; is just a bunch of conceptual - pipe fittings with nothing to link. A large number of elements ship - with &GStreamer;, but extra elements can also be written. - - - Just writing a new element is not entirely enough, however: You will need - to encapsulate your element in a plugin to enable - &GStreamer; to use it. A plugin is essentially a loadable block of code, - usually called a shared object file or a dynamically linked library. A - single plugin may contain the implementation of several elements, or just - a single one. For simplicity, this guide concentrates primarily on plugins - containing one element. - - - A filter is an important type of element that - processes a stream of data. Producers and consumers of data are called - source and sink elements, - respectively. Bin elements contain other elements. - One type of bin is responsible for synchronization of the elements that they - contain so that data flows smoothly. Another type of bin, called - autoplugger elements, automatically add other - elements to the bin and links them together so that they act as a - filter between two arbitrary stream types. - - - The plugin mechanism is used everywhere in &GStreamer;, even if only the - standard packages are being used. A few very basic functions reside in the - core library, and all others are implemented in plugins. A plugin registry - is used to store the details of the plugins in an binary registry file. - This way, a program using &GStreamer; does not have to load all plugins to - determine which are needed. Plugins are only loaded when their provided - elements are requested. - - - See the &GstLibRef; for the current implementation details of GstElement - and GstPlugin. - - - - - - - Pads - - Pads are used to negotiate links and data flow - between elements in &GStreamer;. A pad can be viewed as a - place or port on an element where - links may be made with other elements, and through which data can - flow to or from those elements. Pads have specific data handling - capabilities: A pad can restrict the type of data that flows - through it. Links are only allowed between two pads when the - allowed data types of the two pads are compatible. - - - An analogy may be helpful here. A pad is similar to a plug or jack on a - physical device. Consider, for example, a home theater system consisting - of an amplifier, a DVD player, and a (silent) video projector. Linking - the DVD player to the amplifier is allowed because both devices have audio - jacks, and linking the projector to the DVD player is allowed because - both devices have compatible video jacks. Links between the - projector and the amplifier may not be made because the projector and - amplifier have different types of jacks. Pads in &GStreamer; serve the - same purpose as the jacks in the home theater system. - - - For the most part, all data in &GStreamer; flows one way through a link - between elements. Data flows out of one element through one or more - source pads, and elements accept incoming data through - one or more sink pads. Source and sink elements have - only source and sink pads, respectively. - - - See the &GstLibRef; for the current implementation details of a GstPad. - - - - - - - GstMiniObject, Buffers and Events - - All streams of data in &GStreamer; are chopped up into chunks that are - passed from a source pad on one element to a sink pad on another element. - GstMiniObject is the structure used to hold these - chunks of data. - - - GstMiniObject contains the following important types: - - - - An exact type indicating what type of data (event, buffer, ...) - this GstMiniObject is. - - - - - A reference count indicating the number of elements currently - holding a reference to the miniobject. When the reference count - falls to zero, the miniobject will be disposed, and its memory will be - freed in some sense (see below for more details). - - - - - - For data transport, there are two types of GstMiniObject defined: - events (control) and buffers (content). - - - Buffers may contain any sort of data that the two linked pads - know how to handle. Normally, a buffer contains a chunk of some sort of - audio or video data that flows from one element to another. - - - Buffers also contain metadata describing the buffer's contents. Some of - the important types of metadata are: - - - - Pointers to one or more GstMemory objects. GstMemory objects are - refcounted objects that encapsulate a region of memory. - - - - - A timestamp indicating the preferred display timestamp of the - content in the buffer. - - - - - - Events - contain information on the state of the stream flowing between the two - linked pads. Events will only be sent if the element explicitly supports - them, else the core will (try to) handle the events automatically. Events - are used to indicate, for example, a media type, the end of a - media stream or that the cache should be flushed. - - - Events may contain several of the following items: - - - - A subtype indicating the type of the contained event. - - - - - The other contents of the event depend on the specific event type. - - - - - - Events will be discussed extensively in . - Until then, the only event that will be used is the EOS - event, which is used to indicate the end-of-stream (usually end-of-file). - - - See the &GstLibRef; for the current implementation details of a GstMiniObject, GstBuffer and GstEvent. - - - - Buffer Allocation - - Buffers are able to store chunks of memory of several different - types. The most generic type of buffer contains memory allocated - by malloc(). Such buffers, although convenient, are not always - very fast, since data often needs to be specifically copied into - the buffer. - - - Many specialized elements create buffers that point to special - memory. For example, the filesrc element usually - maps a file into the address space of the application (using mmap()), - and creates buffers that point into that address range. These - buffers created by filesrc act exactly like generic buffers, except - that they are read-only. The buffer freeing code automatically - determines the correct method of freeing the underlying memory. - Downstream elements that receive these kinds of buffers do not - need to do anything special to handle or unreference it. - - - Another way an element might get specialized buffers is to - request them from a downstream peer through a GstBufferPool or - GstAllocator. Elements can ask a GstBufferPool or GstAllocator - from the downstream peer element. If downstream is able to provide - these objects, upstream can use them to allocate buffers. - See more in . - - - Many sink elements have accelerated methods for copying data - to hardware, or have direct access to hardware. It is common - for these elements to be able to create a GstBufferPool or - GstAllocator for their upstream peers. One such example is - ximagesink. It creates buffers that contain XImages. Thus, - when an upstream peer copies data into the buffer, it is copying - directly into the XImage, enabling ximagesink to draw the - image directly to the screen instead of having to copy data - into an XImage first. - - - Filter elements often have the opportunity to either work on - a buffer in-place, or work while copying from a source buffer - to a destination buffer. It is optimal to implement both - algorithms, since the &GStreamer; framework can choose the - fastest algorithm as appropriate. Naturally, this only makes - sense for strict filters -- elements that have exactly the - same format on source and sink pads. - - - - - - - - Media types and Properties - - &GStreamer; uses a type system to ensure that the data passed between - elements is in a recognized format. The type system is also important - for ensuring that the parameters required to fully specify a format match - up correctly when linking pads between elements. Each link that is - made between elements has a specified type and optionally a set of - properties. See more about caps negotiation in - . - - - - - - The Basic Types - - &GStreamer; already supports many basic media types. Following is a - table of a few of the basic types used for buffers in - &GStreamer;. The table contains the name ("media type") and a - description of the type, the properties associated with the type, and - the meaning of each property. A full list of supported types is - included in . - - - - Table of Example Types - - - - - Media Type - Description - Property - Property Type - Property Values - Property Description - - - - - - - - - audio/* - - All audio types - - rate - integer - greater than 0 - - The sample rate of the data, in samples (per channel) per second. - - - - channels - integer - greater than 0 - - The number of channels of audio data. - - - - - - - audio/x-raw - - Unstructured and uncompressed raw integer audio data. - - format - string - - S8 U8 S16LE S16BE U16LE U16BE S24_32LE S24_32BE U24_32LE U24_32BE S32LE S32BE U32LE U32BE - S24LE S24BE U24LE U24BE S20LE S20BE U20LE U20BE S18LE S18BE U18LE U18BE F32LE F32BE F64LE F64BE - - - The format of the sample data. - - - - - - - audio/mpeg - - Audio data compressed using the MPEG audio encoding scheme. - - mpegversion - integer - 1, 2 or 4 - - The MPEG-version used for encoding the data. The value 1 refers - to MPEG-1, -2 and -2.5 layer 1, 2 or 3. The values 2 and 4 refer - to the MPEG-AAC audio encoding schemes. - - - - framed - boolean - 0 or 1 - - A true value indicates that each buffer contains exactly one - frame. A false value indicates that frames and buffers do not - necessarily match up. - - - - layer - integer - 1, 2, or 3 - - The compression scheme layer used to compress the data - (only if mpegversion=1). - - - - bitrate - integer - greater than 0 - - The bitrate, in bits per second. For VBR (variable bitrate) - MPEG data, this is the average bitrate. - - - - - - - audio/x-vorbis - Vorbis audio data - - - - - There are currently no specific properties defined for this type. - - - - -
-
-
-
diff --git a/docs/pwg/intro-preface.xml b/docs/pwg/intro-preface.xml deleted file mode 100644 index 40e6541ee1..0000000000 --- a/docs/pwg/intro-preface.xml +++ /dev/null @@ -1,296 +0,0 @@ - - - - - Preface - - - - - What is &GStreamer;? - - &GStreamer; is a framework for creating streaming media applications. - The fundamental design comes from the video pipeline at Oregon Graduate - Institute, as well as some ideas from DirectShow. - - - - &GStreamer;'s development framework makes it possible to write any - type of streaming multimedia application. The &GStreamer; framework - is designed to make it easy to write applications that handle audio - or video or both. It isn't restricted to audio and video, and can - process any kind of data flow. - The pipeline design is made to have little overhead above what the - applied filters induce. This makes &GStreamer; a good framework for - designing even high-end audio applications which put high demands on - latency or performance. - - - - One of the most obvious uses of &GStreamer; is using it to build - a media player. &GStreamer; already includes components for building a - media player that can support a very wide variety of formats, including - MP3, Ogg/Vorbis, MPEG-1/2, AVI, Quicktime, mod, and more. &GStreamer;, - however, is much more than just another media player. Its main advantages - are that the pluggable components can be mixed and matched into arbitrary - pipelines so that it's possible to write a full-fledged video or audio - editing application. - - - - The framework is based on plugins that will provide the various codec - and other functionality. The plugins can be linked and arranged in - a pipeline. This pipeline defines the flow of the data. - - - - The &GStreamer; core function is to provide a framework for plugins, - data flow, synchronization and media type handling/negotiation. It - also provides an API to write applications using the various plugins. - - - - - - - Who Should Read This Guide? - - This guide explains how to write new modules for &GStreamer;. The guide is - relevant to several groups of people: - - - - - Anyone who wants to add support for new ways of processing data in - &GStreamer;. For example, a person in this group might want to create - a new data format converter, a new visualization tool, or a new - decoder or encoder. - - - - - Anyone who wants to add support for new input and output devices. For - example, people in this group might want to add the ability to write - to a new video output system or read data from a digital camera or - special microphone. - - - - - Anyone who wants to extend &GStreamer; in any way. You need to have an - understanding of how the plugin system works before you can understand - the constraints that the plugin system places on the rest of the code. - Also, you might be surprised after reading this at how much can be - done with plugins. - - - - - This guide is not relevant to you if you only want to use the existing - functionality of &GStreamer;, or if you just want to use an application - that uses &GStreamer;. If you are only interested in using existing - plugins to write a new application - and there are quite a lot of - plugins already - you might want to check the &GstAppDevMan;. If you - are just trying to get help with a &GStreamer; application, then you - should check with the user manual for that particular application. - - - - - - - Preliminary Reading - - This guide assumes that you are somewhat familiar with the basic workings - of &GStreamer;. For a gentle introduction to programming concepts in - &GStreamer;, you may wish to read the &GstAppDevMan; first. - Also check out the other documentation available on the &GStreamer; web site. - - - In order to understand this manual, you will need to have a basic - understanding of the C language. - Since &GStreamer; adheres to the GObject programming model, this guide - also assumes that you understand the basics of GObject - programming. - You may also want to have a look - at Eric Harlow's book Developing Linux Applications with - GTK+ and GDK. - - - - - - - Structure of This Guide - - To help you navigate through this guide, it is divided into several large - parts. Each part addresses a particular broad topic concerning &GStreamer; - plugin development. The parts of this guide are laid out in the following - order: - - - - - - - Introduction to the structure of a plugin, using an example audio - filter for illustration. - - - This part covers all the basic steps you generally need to perform - to build a plugin, such as registering the element with &GStreamer; - and setting up the basics so it can receive data from and send data - to neighbour elements. The discussion begins by giving examples of - generating the basic structures and registering an element in - . Then, you will learn how - to write the code to get a basic filter plugin working in , and . - - - After that, we will show some of the GObject concepts on how to - make an element configurable for applications and how to do - application-element interaction in - and . Next, you will learn to build - a quick test application to test all that you've just learned in - . We will just touch upon - basics here. For full-blown application development, you should - look at the - Application Development Manual. - - - - - - - Information on advanced features of &GStreamer; plugin development. - - - After learning about the basic steps, you should be able to create a - functional audio or video filter plugin with some nice features. - However, &GStreamer; offers more for plugin writers. This part of the - guide includes chapters on more advanced topics, such as scheduling, - media type definitions in &GStreamer;, clocks, interfaces and - tagging. Since these features are purpose-specific, you can read them - in any order, most of them don't require knowledge from other - sections. - - - The first chapter, named , - will explain some of the basics of element scheduling. It is not - very in-depth, but is mostly some sort of an introduction on why - other things work as they do. Read this chapter if you're interested - in &GStreamer; internals. Next, we will apply this knowledge and - discuss another type of data transmission than what you learned in - : . Loop-based elements will give - you more control over input rate. This is useful when writing, for - example, muxers or demuxers. - - - Next, we will discuss media identification in &GStreamer; in . You will learn how to define - new media types and get to know a list of standard media types - defined in &GStreamer;. - - - In the next chapter, you will learn the concept of request- and - sometimes-pads, which are pads that are created dynamically, either - because the application asked for it (request) or because the media - stream requires it (sometimes). This will be in . - - - The next chapter, , will - explain the concept of clocks in &GStreamer;. You need this - information when you want to know how elements should achieve - audio/video synchronization. - - - The next few chapters will discuss advanced ways of doing - application-element interaction. Previously, we learned on the - GObject-ways of doing this in - and . We will discuss - dynamic parameters, which are a way of defining element behaviour - over time in advance, in . Next, - you will learn about interfaces in . Interfaces are very target- - specific ways of application-element interaction, based on GObject's - GInterface. Lastly, you will learn about how metadata is handled in - &GStreamer; in . - - - The last chapter, , will - discuss the concept of events in &GStreamer;. Events are, on the - one hand, another way of doing application-element interaction. It - takes care of seeking, for example. On the other hand, it is also - a way in which elements interact with each other, such as letting - each other know about media stream discontinuities, forwarding tags - inside a pipeline and so on. - - - - - - Explanation - of writing other plugin types. - - - Because the first two parts of the guide use an audio filter as an - example, the concepts introduced apply to filter plugins. But many of - the concepts apply equally to other plugin types, including sources, - sinks, and autopluggers. This part of the guide presents the issues - that arise when working on these more specialized plugin types. The - chapter starts with a special focus on elements that can be written - using a base-class (), and - later also goes into writing special types of elements in - , and . - - - - - - Further - information for plugin developers. - - - The appendices contain some information that stubbornly refuses - to fit cleanly in other sections of the guide. Most of this section - is not yet finished. - - - - - - The remainder of this introductory part of the guide presents a short - overview of the basic concepts involved in &GStreamer; plugin development. - Topics covered include , , and - . If you are already familiar with - this information, you can use this short overview to refresh your memory, - or you can skip to . - - - As you can see, there a lot to learn, so let's get started! - - - - - - Creating compound and complex elements by extending from a GstBin. - This will allow you to create plugins that have other plugins embedded - in them. - - - - - Adding new media types to the registry along with typedetect functions. - This will allow your plugin to operate on a completely new media type. - - - - - diff --git a/docs/pwg/other-base.xml b/docs/pwg/other-base.xml deleted file mode 100644 index 0c00f98117..0000000000 --- a/docs/pwg/other-base.xml +++ /dev/null @@ -1,310 +0,0 @@ - - Pre-made base classes - - So far, we've been looking at low-level concepts of creating any type of - &GStreamer; element. Now, let's assume that all you want is to create an - simple audiosink that works exactly the same as, say, - esdsink, or a filter that simply normalizes audio volume. - Such elements are very general in concept and since they do nothing - special, they should be easier to code than to provide your own scheduler - activation functions and doing complex caps negotiation. For this purpose, - &GStreamer; provides base classes that simplify some types of elements. - Those base classes will be discussed in this chapter. - - - - Writing a sink - - Sinks are special elements in &GStreamer;. This is because sink elements - have to take care of preroll, which is the process - that takes care that elements going into the - GST_STATE_PAUSED state will have buffers ready - after the state change. The result of this is that such elements can - start processing data immediately after going into the - GST_STATE_PLAYING state, without requiring to - take some time to initialize outputs or set up decoders; all that is done - already before the state-change to GST_STATE_PAUSED - successfully completes. - - - Preroll, however, is a complex process that would require the same - code in many elements. Therefore, sink elements can derive from the - GstBaseSink base-class, which does preroll and - a few other utility functions automatically. The derived class only - needs to implement a bunch of virtual functions and will work - automatically. - - - The base class implement much of the synchronization logic that a - sink has to perform. - - - The GstBaseSink base-class specifies some - limitations on elements, though: - - - - - It requires that the sink only has one sinkpad. Sink elements that - need more than one sinkpad, must make a manager element with - multiple GstBaseSink elements inside. - - - - - Sink elements can derive from GstBaseSink using - the usual GObject convenience macro - G_DEFINE_TYPE (): - - -G_DEFINE_TYPE (GstMySink, gst_my_sink, GST_TYPE_BASE_SINK); - -[..] - -static void -gst_my_sink_class_init (GstMySinkClass * klass) -{ - klass->set_caps = [..]; - klass->render = [..]; -[..] -} - - - The advantages of deriving from GstBaseSink are - numerous: - - - - - Derived implementations barely need to be aware of preroll, and do - not need to know anything about the technical implementation - requirements of preroll. The base-class does all the hard work. - - - Less code to write in the derived class, shared code (and thus - shared bugfixes). - - - - - There are also specialized base classes for audio and video, let's look - at those a bit. - - - - Writing an audio sink - - Essentially, audio sink implementations are just a special case of a - general sink. An audio sink has the added complexity that it needs to - schedule playback of samples. It must match the clock selected in the - pipeline against the clock of the audio device and calculate and - compensate for drift and jitter. - - - There are two audio base classes that you can choose to - derive from, depending on your needs: - GstAudioBasesink and - GstAudioSink. The audiobasesink provides full - control over how synchronization and scheduling is handled, by using - a ringbuffer that the derived class controls and provides. The - audiosink base-class is a derived class of the audiobasesink, - implementing a standard ringbuffer implementing default - synchronization and providing a standard audio-sample clock. Derived - classes of this base class merely need to provide a _open - (), _close () and a _write - () function implementation, and some optional functions. - This should suffice for many sound-server output elements and even - most interfaces. More demanding audio systems, such as Jack, would - want to implement the GstAudioBaseSink - base-class. - - - The GstAudioBaseSink has little to no - limitations and should fit virtually every implementation, but is - hard to implement. The GstAudioSink, on the - other hand, only fits those systems with a simple open - () / close () / write - () API (which practically means pretty much all of them), - but has the advantage that it is a lot easier to implement. The - benefits of this second base class are large: - - - - - Automatic synchronization, without any code in the derived class. - - - - - Also automatically provides a clock, so that other sinks (e.g. in - case of audio/video playback) are synchronized. - - - - - Features can be added to all audiosinks by making a change in the - base class, which makes maintenance easy. - - - - - Derived classes require only three small functions, plus some - GObject boilerplate code. - - - - - In addition to implementing the audio base-class virtual functions, - derived classes can (should) also implement the - GstBaseSink set_caps () and - get_caps () virtual functions for negotiation. - - - - - Writing a video sink - - Writing a videosink can be done using the - GstVideoSink base-class, which derives from - GstBaseSink internally. Currently, it does - nothing yet but add another compile dependency, so derived classes - will need to implement all base-sink virtual functions. When they do - this correctly, this will have some positive effects on the end user - experience with the videosink: - - - - - Because of preroll (and the preroll () virtual - function), it is possible to display a video frame already when - going into the GST_STATE_PAUSED state. - - - - - By adding new features to GstVideoSink, it - will be possible to add extensions to videosinks that affect all of - them, but only need to be coded once, which is a huge maintenance - benefit. - - - - - - - - Writing a source - - In the previous part, particularly , we have learned that some types - of elements can provide random access. This applies most definitely to - source elements reading from a randomly seekable location, such as file - sources. However, other source elements may be better described as a - live source element, such as a camera source, an audio card source and - such; those are not seekable and do not provide byte-exact access. For - all such use cases, &GStreamer; provides two base classes: - GstBaseSrc for the basic source functionality, and - GstPushSrc, which is a non-byte exact source - base-class. The pushsource base class itself derives from basesource as - well, and thus all statements about the basesource apply to the - pushsource, too. - - - The basesrc class does several things automatically for derived classes, - so they no longer have to worry about it: - - - - - Fixes to GstBaseSrc apply to all derived - classes automatically. - - - - - Automatic pad activation handling, and task-wrapping in case we get - assigned to start a task ourselves. - - - - - The GstBaseSrc may not be suitable for all cases, - though; it has limitations: - - - - - There is one and only one sourcepad. Source elements requiring - multiple sourcepads must implement a manager bin and use multiple - source elements internally or make a manager element that uses - a source element and a demuxer inside. - - - - - It is possible to use special memory, such as X server memory pointers - or mmap ()'ed memory areas, as data pointers in - buffers returned from the create() virtual function. - - - - Writing an audio source - - An audio source is nothing more but a special case of a pushsource. - Audio sources would be anything that reads audio, such as a source - reading from a soundserver, a kernel interface (such as ALSA) or a - test sound / signal generator. &GStreamer; provides two base classes, - similar to the two audiosinks described in ; one is ringbuffer-based, and - requires the derived class to take care of its own scheduling, - synchronization and such. The other is based on this - GstAudioBaseSrc and is called - GstAudioSrc, and provides a simple - open (), close () and - read () interface, which is rather simple to - implement and will suffice for most soundserver sources and audio - interfaces (e.g. ALSA or OSS) out there. - - - The GstAudioSrc base-class has several benefits - for derived classes, on top of the benefits of the - GstPushSrc base-class that it is based on: - - - - - Does syncronization and provides a clock. - - - - - New features can be added to it and will apply to all derived - classes automatically. - - - - - - - - Writing a transformation element - - A third base-class that &GStreamer; provides is the - GstBaseTransform. This is a base class for - elements with one sourcepad and one sinkpad which act as a filter - of some sort, such as volume changing, audio resampling, audio format - conversion, and so on and so on. There is quite a lot of bookkeeping - that such elements need to do in order for things such as buffer - allocation forwarding, passthrough, in-place processing and such to all - work correctly. This base class does all that for you, so that you just - need to do the actual processing. - - - Since the GstBaseTransform is based on the 1-to-1 - model for filters, it may not apply well to elements such as decoders, - which may have to parse properties from the stream. Also, it will not - work for elements requiring more than one sourcepad or sinkpad. - - - diff --git a/docs/pwg/other-manager.xml b/docs/pwg/other-manager.xml deleted file mode 100644 index 53381146bf..0000000000 --- a/docs/pwg/other-manager.xml +++ /dev/null @@ -1,46 +0,0 @@ - - Writing a Manager - - Managers are elements that add a function or unify the function of - another (series of) element(s). Managers are generally a - GstBin with one or more ghostpads. Inside them - is/are the actual element(s) that matters. There is several cases where - this is useful. For example: - - - - - To add support for private events with custom event handling to - another element. - - - - - To add support for custom pad _query () - or _convert () handling to another element. - - - - - To add custom data handling before or after another element's data - handler function (generally its _chain () - function). - - - - - To embed an element, or a series of elements, into something that - looks and works like a simple element to the outside world. This - is particular handy for implementing sources and sink elements with - multiple pads. - - - - - Making a manager is about as simple as it gets. You can derive from a - GstBin, and in most cases, you can embed the - required elements in the _init () already, including - setup of ghostpads. If you need any custom data handlers, you can connect - signals or embed a second element which you control. - - diff --git a/docs/pwg/other-ntoone.xml b/docs/pwg/other-ntoone.xml deleted file mode 100644 index 9efcff63af..0000000000 --- a/docs/pwg/other-ntoone.xml +++ /dev/null @@ -1,155 +0,0 @@ - - Writing a N-to-1 Element or Muxer - - N-to-1 elements have been previously mentioned and discussed in both - and in - . The main noteworthy thing - about N-to-1 elements is that each pad is push-based in its own thread, - and the N-to-1 element synchronizes those streams by - expected-timestamp-based logic. This means it lets all streams wait - except for the one that provides the earliest next-expected timestamp. - When that stream has passed one buffer, the next - earliest-expected-timestamp is calculated, and we start back where we - were, until all streams have reached EOS. There is a helper base class, - called GstCollectPads, that will help you to do - this. - - - Note, however, that this helper class will only help you with grabbing - a buffer from each input and giving you the one with earliest timestamp. - If you need anything more difficult, such as "don't-grab-a-new-buffer - until a given timestamp" or something like that, you'll need to do this - yourself. - - - - diff --git a/docs/pwg/other-oneton.xml b/docs/pwg/other-oneton.xml deleted file mode 100644 index bc61bac4ff..0000000000 --- a/docs/pwg/other-oneton.xml +++ /dev/null @@ -1,45 +0,0 @@ - - Writing a Demuxer or Parser - - Demuxers are the 1-to-N elements that need very special care. - They are responsible for timestamping raw, unparsed data into - elementary video or audio streams, and there are many things that you - can optimize or do wrong. Here, several culprits will be mentioned - and common solutions will be offered. Parsers are demuxers with only - one source pad. Also, they only cut the stream into buffers, they - don't touch the data otherwise. - - - As mentioned previously in , - demuxers should use fixed caps, since their data type will not change. - - - As discussed in , demuxer elements - can be written in multiple ways: - - - - - They can be the driving force of the pipeline, by running their own - task. This works particularly well for elements that need random - access, for example an AVI demuxer. - - - - - They can also run in push-based mode, which means that an upstream - element drives the pipeline. This works particularly well for streams - that may come from network, such as Ogg. - - - - - In addition, audio parsers with one output can, in theory, also be written - in random access mode. Although simple playback will mostly work if your - element only accepts one mode, it may be required to implement multiple - modes to work in combination with all sorts of applications, such as - editing. Also, performance may become better if you implement multiple - modes. See to see how an element - can accept multiple scheduling modes. - - diff --git a/docs/pwg/other-sink.xml b/docs/pwg/other-sink.xml deleted file mode 100644 index 3222106838..0000000000 --- a/docs/pwg/other-sink.xml +++ /dev/null @@ -1,167 +0,0 @@ - - - - - Writing a Sink - - Sinks are output elements that, opposite to sources, have no source - pads and one or more (usually one) sink pad. They can be sound card - outputs, disk writers, etc. This chapter will discuss the basic - implementation of sink elements. - - - - Data processing, events, synchronization and clocks - - Except for corner cases, sink elements will be _chain - ()-based elements. The concept of such elements has - been discussed before in detail, so that will be skipped here. What - is very important in sink elements, specifically in real-time audio - and video sources (such as osssink or - ximagesink), is event handling in the - _chain ()-function, because most elements rely - on EOS-handling of the sink element, and because A/V synchronization - can only be perfect if the element takes this into account. - - - How to achieve synchronization between streams depends on whether - you're a clock-providing or a clock-receiving element. If you're - the clock provider, you can do with time whatever you want. Correct - handling would mean that you check whether the end of the previous - buffer (if any) and the start of the current buffer are the same. - If so, there's no gap between the two and you can continue playing - right away. If there is a gap, then you'll need to wait for your - clock to reach that time. How to do that depends on the element - type. In the case of audio output elements, you would output silence - for a while. In the case of video, you would show background color. - In case of subtitles, show no subtitles at all. - - - In the case that the provided clock and the received clock are not - the same (or in the case where your element provides no clock, which - is the same), you simply wait for the clock to reach the timestamp of - the current buffer and then you handle the data in it. - - - A simple data handling function would look like this: - - -static void -gst_my_sink_chain (GstPad *pad, - GstData *data) -{ - GstMySink *sink = GST_MY_SINK (gst_pad_get_parent (pad)); - GstBuffer *buf; - GstClockTime time; - - /* only needed if the element is GST_EVENT_AWARE */ - if (GST_IS_EVENT (data)) { - GstEvent *event = GST_EVENT (data); - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_EOS: - [ if your element provides a clock, disable (inactivate) it here ] - /* pass-through */ - - default: - /* the default handler handles discontinuities, even if your - * element provides a clock! */ - gst_pad_event_default (pad, event); - break; - } - - return; - } - - buf = GST_BUFFER (data); - if (GST_BUFFER_TIME_IS_VALID (buf)) - time = GST_BUFFER_TIMESTAMP (buf); - else - time = sink->expected_next_time; - - /* Synchronization - the property is only useful in case the - * element has the option of not syncing. So it is not useful - * for hardware-sync (clock-providing) elements. */ - if (sink->sync) { - /* This check is only needed if you provide a clock. Else, - * you can always execute the 'else' clause. */ - if (sink->provided_clock == sink->received_clock) { - /* GST_SECOND / 10 is 0,1 sec, it's an arbitrary value. The - * casts are needed because else it'll be unsigned and we - * won't detect negative values. */ - if (llabs ((gint64) sink->expected_next_time - (gint64) time) > - (GST_SECOND / 10)) { - /* so are we ahead or behind? */ - if (time > sink->expected_time) { - /* we need to wait a while... In case of audio, output - * silence. In case of video, output background color. - * In case of subtitles, display nothing. */ - [..] - } else { - /* Drop data. */ - [..] - } - } - } else { - /* You could do more sophisticated things here, but we'll - * keep it simple for the purpose of the example. */ - gst_element_wait (GST_ELEMENT (sink), time); - } - } - - /* And now handle the data. */ -[..] -} - - - - - Special memory - - Like source elements, sink elements can sometimes provide externally - allocated (such as X-provided or DMA'able) memory to elements earlier - in the pipeline, and thereby prevent the need for - memcpy () for incoming data. We do this by - providing a pad-allocate-buffer function. - - -static GstBuffer * gst_my_sink_buffer_allocate (GstPad *pad, - guint64 offset, - guint size); - -static void -gst_my_sink_init (GstMySink *sink) -{ -[..] - gst_pad_set_bufferalloc_function (sink->sinkpad, - gst_my_sink_buffer_allocate); -} - -static void -gst_my_sink_buffer_free (GstBuffer *buf) -{ - GstMySink *sink = GST_MY_SINK (GST_BUFFER_PRIVATE (buf)); - - /* Do whatever is needed here. */ -[..] -} - -static GstBuffer * -gst_my_sink_buffer_allocate (GstPad *pad, - guint64 offset, - guint size) -{ - GstBuffer *buf = gst_buffer_new (); - - /* So here it's up to you to wrap your private buffers and - * return that. */ - GST_BUFFER_FREE_DATA_FUNC (buf) = gst_my_sink_buffer_free; - GST_BUFFER_PRIVATE (buf) = sink; - GST_BUFFER_FLAG_SET (buf, GST_BUFFER_DONTFREE); -[..] - - return buf; -} - - - diff --git a/docs/pwg/other-source.xml b/docs/pwg/other-source.xml deleted file mode 100644 index 55aaafc7de..0000000000 --- a/docs/pwg/other-source.xml +++ /dev/null @@ -1,475 +0,0 @@ - - - - - Writing a Source - - Source elements are the start of a data streaming pipeline. Source - elements have no sink pads and have one or more source pads. We will - focus on single-sourcepad elements here, but the concepts apply equally - well to multi-sourcepad elements. This chapter will explain the essentials - of source elements, which features it should implement and which it - doesn't have to, and how source elements will interact with other - elements in a pipeline. - - - - The get()-function - - Source elements have the special option of having a - _get ()-function rather than a - _loop ()- or _chain - ()-function. A _get ()-function is - called by the scheduler every time the next elements needs data. Apart - from corner cases, every source element will want to be _get - ()-based. - - -static GstData * gst_my_source_get (GstPad *pad); - -static void -gst_my_source_init (GstMySource *src) -{ -[..] - gst_pad_set_get_function (src->srcpad, gst_my_source_get); -} - -static GstData * -gst_my_source_get (GstPad *pad) -{ - GstBuffer *buffer; - - buffer = gst_buffer_new (); - GST_BUFFER_DATA (buf) = g_strdup ("hello pipeline!"); - GST_BUFFER_SIZE (buf) = strlen (GST_BUFFER_DATA (buf)); - /* terminating '/0' */ - GST_BUFFER_MAZSIZE (buf) = GST_BUFFER_SIZE (buf) + 1; - - return GST_DATA (buffer); -} - - - - - Events, querying and converting - - One of the most important functions of source elements is to - implement correct query, convert and event handling functions. - Those will continuously describe the current state of the stream. - Query functions can be used to get stream properties such as current - position and length. This can be used by fellow elements to convert - this same value into a different unit, or by applications to provide - information about the length/position of the stream to the user. - Conversion functions are used to convert such values from one unit - to another. Lastly, events are mostly used to seek to positions - inside the stream. Any function is essentially optional, but the - element should try to provide as much information as it knows. Note - that elements providing an event function should also list their - supported events in an _get_event_mask () - function. Elements supporting query operations should list the - supported operations in a _get_query_types - () function. Elements supporting either conversion - or query operations should also implement a _get_formats - () function. - - - An example source element could, for example, be an element that - continuously generates a wave tone at 44,1 kHz, mono, 16-bit. This - element will generate 44100 audio samples per second or 88,2 kB/s. - This information can be used to implement such functions: - - -static GstFormat * gst_my_source_format_list (GstPad *pad); -static GstQueryType * gst_my_source_query_list (GstPad *pad); - -static gboolean gst_my_source_convert (GstPad *pad, - GstFormat from_fmt, - gint64 from_val, - GstFormat *to_fmt, - gint64 *to_val); -static gboolean gst_my_source_query (GstPad *pad, - GstQueryType type, - GstFormat *to_fmt, - gint64 *to_val); - -static void -gst_my_source_init (GstMySource *src) -{ -[..] - gst_pad_set_convert_function (src->srcpad, gst_my_source_convert); - gst_pad_set_formats_function (src->srcpad, gst_my_source_format_list); - gst_pad_set_query_function (src->srcpad, gst_my_source_query); - gst_pad_set_query_type_function (src->srcpad, gst_my_source_query_list); -} - -/* - * This function returns an enumeration of supported GstFormat - * types in the query() or convert() functions. See gst/gstformat.h - * for a full list. - */ - -static GstFormat * -gst_my_source_format_list (GstPad *pad) -{ - static const GstFormat formats[] = { - GST_FORMAT_TIME, - GST_FORMAT_DEFAULT, /* means "audio samples" */ - GST_FORMAT_BYTES, - 0 - }; - - return formats; -} - -/* - * This function returns an enumeration of the supported query() - * operations. Since we generate audio internally, we only provide - * an indication of how many samples we've played so far. File sources - * or such elements could also provide GST_QUERY_TOTAL for the total - * stream length, or other things. See gst/gstquery.h for details. - */ - -static GstQueryType * -gst_my_source_query_list (GstPad *pad) -{ - static const GstQueryType query_types[] = { - GST_QUERY_POSITION, - 0, - }; - - return query_types; -} - -/* - * And below are the logical implementations. - */ - -static gboolean -gst_my_source_convert (GstPad *pad, - GstFormat from_fmt, - gint64 from_val, - GstFormat *to_fmt, - gint64 *to_val) -{ - gboolean res = TRUE; - GstMySource *src = GST_MY_SOURCE (gst_pad_get_parent (pad)); - - switch (from_fmt) { - case GST_FORMAT_TIME: - switch (*to_fmt) { - case GST_FORMAT_TIME: - /* nothing */ - break; - - case GST_FORMAT_BYTES: - *to_val = from_val / (GST_SECOND / (44100 * 2)); - break; - - case GST_FORMAT_DEFAULT: - *to_val = from_val / (GST_SECOND / 44100); - break; - - default: - res = FALSE; - break; - } - break; - - case GST_FORMAT_BYTES: - switch (*to_fmt) { - case GST_FORMAT_TIME: - *to_val = from_val * (GST_SECOND / (44100 * 2)); - break; - - case GST_FORMAT_BYTES: - /* nothing */ - break; - - case GST_FORMAT_DEFAULT: - *to_val = from_val / 2; - break; - - default: - res = FALSE; - break; - } - break; - - case GST_FORMAT_DEFAULT: - switch (*to_fmt) { - case GST_FORMAT_TIME: - *to_val = from_val * (GST_SECOND / 44100); - break; - - case GST_FORMAT_BYTES: - *to_val = from_val * 2; - break; - - case GST_FORMAT_DEFAULT: - /* nothing */ - break; - - default: - res = FALSE; - break; - } - break; - - default: - res = FALSE; - break; - } - - return res; -} - -static gboolean -gst_my_source_query (GstPad *pad, - GstQueryType type, - GstFormat *to_fmt, - gint64 *to_val) -{ - GstMySource *src = GST_MY_SOURCE (gst_pad_get_parent (pad)); - gboolean res = TRUE; - - switch (type) { - case GST_QUERY_POSITION: - res = gst_pad_convert (pad, GST_FORMAT_BYTES, src->total_bytes, - to_fmt, to_val); - break; - - default: - res = FALSE; - break; - } - - return res; -} - - - Be sure to increase src->total_bytes after each call to your - _get () function. - - - Event handling has already been explained previously in the events - chapter. - - - - - Time, clocking and synchronization - - The above example does not provide any timing info, but will suffice - for elementary data sources such as a file source or network data - source element. Things become slightly more complicated, but still - very simple, if we create artificial video or audio data sources, - such as a video test image source or an artificial audio source (e.g. - audiotestsrc). - It will become more complicated if we want the element to be a - realtime capture source, such as a video4linux source (for reading - video frames from a TV card) or an ALSA source (for reading data - from soundcards supported by an ALSA-driver). Here, we will need to - make the element aware of timing and clocking. - - - Timestamps can essentially be generated from all the information - given above without any difficulty. We could add a very small amount - of code to generate perfectly timestamped buffers from our - _get ()-function: - - -static void -gst_my_source_init (GstMySource *src) -{ -[..] - src->total_bytes = 0; -} - -static GstData * -gst_my_source_get (GstPad *pad) -{ - GstMySource *src = GST_MY_SOURCE (gst_pad_get_parent (pad)); - GstBuffer *buf; - GstFormat fmt = GST_FORMAT_TIME; -[..] - GST_BUFFER_DURATION (buf) = GST_BUFFER_SIZE (buf) * (GST_SECOND / (44100 * 2)); - GST_BUFFER_TIMESTAMP (buf) = src->total_bytes * (GST_SECOND / (44100 * 2)); - src->total_bytes += GST_BUFFER_SIZE (buf); - - return GST_DATA (buf); -} - -static GstStateChangeReturn -gst_my_source_change_state (GstElement *element, GstStateChange transition) -{ - GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; - GstMySource *src = GST_MY_SOURCE (element); - - /* First, handle upwards state changes */ - switch (transition) { - case GST_STATE_READY_TO_PAUSED: - /* do something */ - break; - default: - break; - } - - ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); - if (ret == GST_STATE_CHANGE_FAILURE) - return ret; - - /* Now handle downwards state changes after chaining up */ - switch (transition) { - case GST_STATE_PAUSED_TO_READY: - src->total_bytes = 0; - break; - default: - break; - } - - return ret; -} - - - That wasn't too hard. Now, let's assume real-time elements. Those - can either have hardware-timing, in which case we can rely on backends - to provide sync for us (in which case you probably want to provide a - clock), or we will have to emulate that internally (e.g. to acquire - sync in artificial data elements such as - audiotestsrc). - Let's first look at the second option (software sync). The first option - (hardware sync + providing a clock) does not require any special code - with respect to timing, and the clocking section already explained how - to provide a clock. - - -enum { - ARG_0, -[..] - ARG_SYNC, -[..] -}; - -static void -gst_my_source_class_init (GstMySourceClass *klass) -{ - GObjectClass *object_class = G_OBJECT_CLASS (klass); -[..] - g_object_class_install_property (object_class, ARG_SYNC, - g_param_spec_boolean ("sync", "Sync", "Synchronize to clock", - FALSE, G_PARAM_READWRITE | - G_PARAM_STATIC_STRINGS)); -[..] -} - -static void -gst_my_source_init (GstMySource *src) -{ -[..] - src->sync = FALSE; -} - -static GstData * -gst_my_source_get (GstPad *pad) -{ - GstMySource *src = GST_MY_SOURCE (gst_pad_get_parent (pad)); - GstBuffer *buf; -[..] - if (src->sync) { - /* wait on clock */ - gst_element_wait (GST_ELEMENT (src), GST_BUFFER_TIMESTAMP (buf)); - } - - return GST_DATA (buf); -} - -static void -gst_my_source_get_property (GObject *object, - guint prop_id, - GParamSpec *pspec, - GValue *value) -{ - GstMySource *src = GST_MY_SOURCE (gst_pad_get_parent (pad)); - - switch (prop_id) { -[..] - case ARG_SYNC: - g_value_set_boolean (value, src->sync); - break; -[..] - } -} - -static void -gst_my_source_get_property (GObject *object, - guint prop_id, - GParamSpec *pspec, - const GValue *value) -{ - GstMySource *src = GST_MY_SOURCE (gst_pad_get_parent (pad)); - - switch (prop_id) { -[..] - case ARG_SYNC: - src->sync = g_value_get_boolean (value); - break; -[..] - } -} - - - Most of this is GObject wrapping code. The actual code to do - software-sync (in the _get ()-function) - is relatively small. - - - - Using special memory - - In some cases, it might be useful to use specially allocated memory - (e.g. mmap ()'ed DMA'able memory) in - your buffers, and those will require special handling when they are - being dereferenced. For this, &GStreamer; uses the concept of - buffer-free functions. Those are special functions pointers that an - element can set on buffers that it created itself. The given function - will be called when the buffer has been dereferenced, so that the - element can clean up or re-use memory internally rather than using - the default implementation (which simply calls - g_free () on the data pointer). - - -static void -gst_my_source_buffer_free (GstBuffer *buf) -{ - GstMySource *src = GST_MY_SOURCE (GST_BUFFER_PRIVATE (buf)); - - /* do useful things here, like re-queueing the buffer which - * makes it available for DMA again. The default handler will - * not free this buffer because of the GST_BUFFER_DONTFREE - * flag. */ -} - -static GstData * -gst_my_source_get (GstPad *pad) -{ - GstMySource *src = GST_MY_SOURCE (gst_pad_get_parent (pad)); - GstBuffer *buf; -[..] - buf = gst_buffer_new (); - GST_BUFFER_FREE_DATA_FUNC (buf) = gst_my_source_buffer_free; - GST_BUFFER_PRIVATE (buf) = src; - GST_BUFFER_FLAG_SET (buf, GST_BUFFER_READONLY | GST_BUFFER_DONTFREE); -[..] - - return GST_DATA (buf); -} - - - Note that this concept should not be used to - decrease the number of calls made to functions such as - g_malloc () inside your element. We - have better ways of doing that elsewhere (&GStreamer; core, Glib, - Glibc, Linux kernel, etc.). - - - diff --git a/docs/pwg/pwg.xml b/docs/pwg/pwg.xml deleted file mode 100644 index 52d729e088..0000000000 --- a/docs/pwg/pwg.xml +++ /dev/null @@ -1,198 +0,0 @@ - - -%image-entities; - -%version-entities; - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -GStreamer"> -GStreamer Application Development Manual"> -GStreamer Library Reference"> -]> - - - &TITLEPAGE; - - - - - Introduction - - - &GStreamer; is an extremely powerful and versatile framework for creating - streaming media applications. Many of the virtues of the &GStreamer; - framework come from its modularity: &GStreamer; can seamlessly - incorporate new plugin modules. But because modularity and power often - come at a cost of greater complexity (consider, for example, CORBA), writing new - plugins is not always easy. - - - This guide is intended to help you understand the &GStreamer; framework - (version &GST_VERSION;) so you can develop new plugins to extend the - existing functionality. The guide addresses most issues by following the - development of an example plugin - an audio filter plugin - - written in C. However, the later parts of the guide also present some - issues involved in writing other types of plugins, and the end of the - guide describes some of the Python bindings for &GStreamer;. - - - - &INTRO_PREFACE; - &INTRO_BASICS; - - - - - - Building a Plugin - - - You are now ready to learn how to build a plugin. In this part of the - guide, you will learn how to apply basic &GStreamer; - programming concepts to write a simple plugin. The previous parts of the - guide have contained no explicit example code, perhaps making things a - bit abstract and difficult to understand. In contrast, this section will - present both applications and code by following the development of an - example audio filter plugin called MyFilter. - - - The example filter element will begin with a single input pad and a - single - output pad. The filter will, at first, simply pass media and event data - from its sink pad to its source pad without modification. But by the end - of this part of the guide, you will learn to add some more interesting - functionality, including properties and signal handlers. And after - reading the next part of the guide, , you - will be able to add even more functionality to your plugins. - - - - &BUILDING_BOILER; - &BUILDING_PADS; - &BUILDING_CHAINFN; - &BUILDING_EVENTFN; - &BUILDING_QUERYFN; - &BUILDING_STATE; - &BUILDING_PROPS; - &BUILDING_SIGNALS; - &BUILDING_TESTAPP; - - - - - - Advanced Filter Concepts - - - By now, you should be able to create basic filter elements that can - receive and send data. This is the simple model that &GStreamer; stands - for. But &GStreamer; can do much more than only this! In this chapter, - various advanced topics will be discussed, such as scheduling, special - pad types, clocking, events, interfaces, tagging and more. These topics - are the sugar that makes &GStreamer; so easy to use for applications. - - - - &ADVANCED_REQUEST; - &ADVANCED_SCHEDULING; - &ADVANCED_NEGOTIATION; - &ADVANCED_ALLOCATION; - &ADVANCED_TYPES; - &ADVANCED_EVENTS; - &ADVANCED_CLOCK; - &ADVANCED_QOS; - &ADVANCED_DPARAMS; - &ADVANCED_INTERFACES; - &ADVANCED_TAGGING; - - - - - - - - - Creating special element types - - - By now, we have looked at pretty much any feature that can be embedded - into a &GStreamer; element. Most of this has been fairly low-level and - given deep insights in how &GStreamer; works internally. Fortunately, - &GStreamer; contains some easier-to-use interfaces to create such - elements. In order to do that, we will look closer at the element - types for which &GStreamer; provides base classes (sources, sinks and - transformation elements). We will also look closer at some types of - elements that require no specific coding such as scheduling-interaction - or data passing, but rather require specific pipeline control (e.g. - N-to-1 elements and managers). - - - - &OTHER_BASE; - &OTHER_ONETON; - &OTHER_NTOONE; - &OTHER_MANAGER; - - - - - - Appendices - - - This chapter contains things that don't belong anywhere else. - - - - &APPENDIX_CHECKLIST; - &APPENDIX_PORTING; - &APPENDIX_LICENSING; - &APPENDIX_PYTHON; - - - diff --git a/docs/pwg/titlepage.xml b/docs/pwg/titlepage.xml deleted file mode 100644 index e249d16e86..0000000000 --- a/docs/pwg/titlepage.xml +++ /dev/null @@ -1,98 +0,0 @@ - - - - - Richard - John - Boulton - - - richard-gst@tartarus.org - - - - - - Erik - Walthinsen - - - omega@temple-baptist.com - - - - - - Steve - Baker - - - stevebaker_org@yahoo.co.uk - - - - - - Leif - Johnson - - - leif@ambient.2y.net - - - - - - Ronald - S. - Bultje - - - rbultje@ronald.bitfreak.net - - - - - - Stefan - Kost - - - ensonic@users.sf.net - - - - - - Tim-Philipp - Müller - - - tim centricular . net - - - - - Wim - Taymans - - - wim.taymans@gmail.com - - - - - - - - This material may be distributed only subject to the terms and - conditions set forth in the Open Publication License, v1.0 or later (the - latest version is presently available at http://www.opencontent.org/openpub/). - - - - &GStreamer; Plugin Writer's Guide (&GST_VERSION;) - - diff --git a/tests/examples/Makefile.am b/tests/examples/Makefile.am index 6918f90238..ea563c8641 100644 --- a/tests/examples/Makefile.am +++ b/tests/examples/Makefile.am @@ -8,7 +8,6 @@ endif always_dirs = \ controller \ helloworld \ - manual \ memory \ netclock \ ptp \ diff --git a/tests/examples/manual/.gitignore b/tests/examples/manual/.gitignore deleted file mode 100644 index 1f16c7d065..0000000000 --- a/tests/examples/manual/.gitignore +++ /dev/null @@ -1,50 +0,0 @@ -Makefile -Makefile.in -*.c -*.o -*.lo -*.la -.deps -.libs - -appsink -appsrc -blockprobe -dynformat -elementget -elementmake -gnome -helloworld -helloworld2 -init -popt -queue -threads -bin -decodebin -dynamic -elementcreate -elementfactory -elementlink -ghostpad -pad -playbin -playsink -norebuffer -probe -query -fakesrc -typefind -effectswitch -testrtpool - -xml-mp3 -xml -xmlTest.gst -README - -*.bb -*.bbg -*.da - -test-registry.* diff --git a/tests/examples/manual/Makefile.am b/tests/examples/manual/Makefile.am deleted file mode 100644 index 42ae9e5911..0000000000 --- a/tests/examples/manual/Makefile.am +++ /dev/null @@ -1,136 +0,0 @@ -# if HAVE_LIBGNOMEUI -# GNOME = gnome -# else -GNOME = -# endif - -# gnome_LDADD = $(GST_OBJ_LIBS) $(LIBGNOMEUI_LIBS) -# gnome_CFLAGS = $(GST_OBJ_CFLAGS) $(LIBGNOMEUI_CFLAGS) - -CHECK_REGISTRY = $(top_builddir)/tests/examples/manual/test-registry.reg - -REGISTRY_ENVIRONMENT = \ - GST_REGISTRY=$(CHECK_REGISTRY) - -AM_TESTS_ENVIRONMENT = \ - $(REGISTRY_ENVIRONMENT) \ - GST_PLUGIN_SCANNER_1_0=$(top_builddir)/libs/gst/helpers/gst-plugin-scanner \ - GST_PLUGIN_SYSTEM_PATH_1_0= \ - GST_PLUGIN_PATH_1_0=$(top_builddir)/plugins - -EXTRA_DIST = extract.pl - -EXAMPLES = \ - $(GNOME) \ - elementcreate \ - elementmake \ - elementfactory \ - elementget \ - elementlink \ - bin \ - pad \ - ghostpad \ - helloworld \ - init \ - query \ - typefind \ - blockprobe \ - probe \ - appsrc \ - appsink \ - dynformat \ - effectswitch \ - norebuffer \ - playbin \ - decodebin \ - playsink - -BUILT_SOURCES = \ - elementmake.c elementcreate.c elementget.c elementlink.c elementfactory.c \ - bin.c \ - pad.c ghostpad.c \ - gnome.c \ - helloworld.c \ - init.c \ - query.c \ - typefind.c \ - blockprobe.c \ - probe.c \ - appsrc.c \ - appsink.c \ - dynformat.c \ - effectswitch.c \ - norebuffer.c \ - playbin.c decodebin.c \ - playsink.c - -if HAVE_PTHREAD -BUILT_SOURCES += testrtpool.c -EXAMPLES += testrtpool -endif - -CLEANFILES = core core.* test-registry.* *.gcno *.gcda $(BUILT_SOURCES) - -AM_CFLAGS = $(GST_OBJ_CFLAGS) -LDADD = $(top_builddir)/libs/gst/base/libgstbase-@GST_API_VERSION@.la \ - $(GST_OBJ_LIBS) - -elementmake.c elementcreate.c elementget.c elementlink.c elementfactory.c: $(top_srcdir)/docs/manual/basics-elements.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -bin.c : $(top_srcdir)/docs/manual/basics-bins.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -pad.c ghostpad.c: $(top_srcdir)/docs/manual/basics-pads.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -gnome.c: $(top_srcdir)/docs/manual/appendix-integration.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -helloworld.c: $(top_srcdir)/docs/manual/basics-helloworld.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -init.c: $(top_srcdir)/docs/manual/basics-init.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -query.c: $(top_srcdir)/docs/manual/advanced-position.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -typefind.c: $(top_srcdir)/docs/manual/advanced-autoplugging.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -blockprobe.c: $(top_srcdir)/docs/manual/advanced-dataaccess.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -probe.c: $(top_srcdir)/docs/manual/advanced-dataaccess.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -appsrc.c: $(top_srcdir)/docs/manual/advanced-dataaccess.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -appsink.c: $(top_srcdir)/docs/manual/advanced-dataaccess.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -dynformat.c: $(top_srcdir)/docs/manual/advanced-dataaccess.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -effectswitch.c: $(top_srcdir)/docs/manual/advanced-dataaccess.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -norebuffer.c: $(top_srcdir)/docs/manual/advanced-buffering.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -playbin.c decodebin.c playsink.c: $(top_srcdir)/docs/manual/highlevel-playback.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -testrtpool.c: $(top_srcdir)/docs/manual/advanced-threads.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ $< - -TESTS = bin \ - elementcreate elementfactory elementget elementlink elementmake \ - ghostpad init - -noinst_PROGRAMS = $(EXAMPLES) - -testrtpool_LDADD = $(GST_OBJ_LIBS) $(PTHREAD_LIBS) -testrtpool_CFLAGS = $(GST_OBJ_CFLAGS) $(PTHREAD_CFLAGS) diff --git a/tests/examples/manual/extract.pl b/tests/examples/manual/extract.pl deleted file mode 100755 index a4b7c1c9a1..0000000000 --- a/tests/examples/manual/extract.pl +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/perl - -# extract code fragments from xml program listings -# first argument: source code file to find -# second argument: xml files to extract code from - -# main - -# decodes xml by translating & < > back to what they should be -# and also ignore -# and -sub -xml_decode ($) -{ - my $input = shift; - - $input =~ s/\&/&/g; - $input =~ s/<//g; - - if ($input =~ //) { $input = ""; } - if ($input =~ //) { $input = ""; } - - #print "Returning line $input"; - return $input; -} - -# main -my $output = shift @ARGV; -my $outputname; - -# strip path parts -if ($output =~ m/.*\/(.*)$/) -{ - $outputname = $1; -} -else -{ - $outputname = $output; -} - -$found = 0; -%blocks = (); - -foreach $file (@ARGV) -{ - open FILE, $file or die "Cannot open file $file"; - - while ($line = ) - { - if ($line =~ //) - { - $found = 1; - $block_id = $1; - $block = "\n/*** block $block_id from $file ***/\n"; - - print "Extracting $outputname block $block_id from $file\n"; - - while ($line = ) - { - if ($line =~ //) - { - last; - } - $block .= xml_decode ($line); - } - $blocks{$block_id} = $block; - } - } -} - - -if (!$found) -{ - print "Could not find $outputname example !\n"; - exit(1); -} - -# now output all the blocks in the right order -open OUTPUT, ">$output"; -@block_ids = keys %blocks; -foreach $block_id (sort @block_ids) -{ - print "Writing $output block $block_id\n"; - print OUTPUT $blocks{$block_id}; -} -close OUTPUT;