From 2abdd0bfdafb285b445e031729fc55556d19084f Mon Sep 17 00:00:00 2001 From: "Ronald S. Bultje" Date: Wed, 29 Jun 2005 09:25:51 +0000 Subject: [PATCH] docs/manual/: Update (until threads/scheduling) Application Development Manual; remove GstThread, add GstBus, add sim... Original commit message from CVS: * docs/manual/advanced-clocks.xml: * docs/manual/advanced-interfaces.xml: * docs/manual/advanced-metadata.xml: * docs/manual/advanced-position.xml: * docs/manual/advanced-schedulers.xml: * docs/manual/advanced-threads.xml: * docs/manual/appendix-porting.xml: * docs/manual/basics-bins.xml: * docs/manual/basics-bus.xml: * docs/manual/basics-elements.xml: * docs/manual/basics-helloworld.xml: * docs/manual/basics-pads.xml: * docs/manual/highlevel-components.xml: * docs/manual/manual.xml: * docs/manual/thread.fig: Update (until threads/scheduling) Application Development Manual; remove GstThread, add GstBus, add simple porting checklist, add documentation for tag writing, clocks, make all examples until this part compile and run. * examples/manual/Makefile.am: Update from changes to Application Development Manual; add bus example, remove thread example. --- ChangeLog | 25 +++ docs/manual/advanced-clocks.xml | 60 ++++++- docs/manual/advanced-interfaces.xml | 25 +++ docs/manual/advanced-metadata.xml | 51 ++++-- docs/manual/advanced-position.xml | 181 ++++++++++++-------- docs/manual/advanced-schedulers.xml | 152 ----------------- docs/manual/advanced-threads.xml | 236 +++++--------------------- docs/manual/appendix-porting.xml | 90 ++++++++++ docs/manual/basics-bins.xml | 2 +- docs/manual/basics-bus.xml | 186 ++++++++++++++++++++ docs/manual/basics-elements.xml | 38 +++-- docs/manual/basics-helloworld.xml | 87 +++++++--- docs/manual/basics-pads.xml | 54 ++++-- docs/manual/highlevel-components.xml | 38 ----- docs/manual/manual.xml | 25 ++- docs/manual/thread.fig | 51 ------ examples/manual/Makefile.am | 10 +- tests/old/examples/manual/Makefile.am | 10 +- 18 files changed, 725 insertions(+), 596 deletions(-) delete mode 100644 docs/manual/advanced-schedulers.xml create mode 100644 docs/manual/appendix-porting.xml create mode 100644 docs/manual/basics-bus.xml delete mode 100644 docs/manual/thread.fig diff --git a/ChangeLog b/ChangeLog index 19c4d0690c..27cf1ffe60 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,28 @@ +2005-06-29 Ronald S. Bultje + + * docs/manual/advanced-clocks.xml: + * docs/manual/advanced-interfaces.xml: + * docs/manual/advanced-metadata.xml: + * docs/manual/advanced-position.xml: + * docs/manual/advanced-schedulers.xml: + * docs/manual/advanced-threads.xml: + * docs/manual/appendix-porting.xml: + * docs/manual/basics-bins.xml: + * docs/manual/basics-bus.xml: + * docs/manual/basics-elements.xml: + * docs/manual/basics-helloworld.xml: + * docs/manual/basics-pads.xml: + * docs/manual/highlevel-components.xml: + * docs/manual/manual.xml: + * docs/manual/thread.fig: + Update (until threads/scheduling) Application Development Manual; + remove GstThread, add GstBus, add simple porting checklist, add + documentation for tag writing, clocks, make all examples until this + part compile and run. + * examples/manual/Makefile.am: + Update from changes to Application Development Manual; add bus + example, remove thread example. + 2005-06-28 Wim Taymans * gst/gstbus.c: (gst_bus_post), (gst_bus_have_pending), diff --git a/docs/manual/advanced-clocks.xml b/docs/manual/advanced-clocks.xml index 4781155314..33505ef05f 100644 --- a/docs/manual/advanced-clocks.xml +++ b/docs/manual/advanced-clocks.xml @@ -1,7 +1,63 @@ Clocks in GStreamer - - WRITEME + + To maintain sync in pipeline playback (which is the only case where this + really matters), &GStreamer; uses clocks. Clocks + are exposed by some elements, whereas other elements are merely clock + slaves. The primary task of a clock is to represent the time progress + according to the element exposing the clock, based on its own playback + rate. If no clock provider is available in a pipeline, the system clock + is used instead. + + + Clock providers + + + Clock providers exist because they play back media at some rate, and + this rate is not necessarily the same as the system clock rate. For + example, a soundcard may playback at 44,1 kHz, but that doesn't mean + that after exactly 1 second according + to the system clock, the soundcard has played back 44.100 + samples. This is only true by approximation. Therefore, generally, + pipelines with an audio output use the audiosink as clock provider. + This ensures that one second of video will be played back at the same + rate as that the soundcard plays back 1 second of audio. + + + Whenever some part of the pipeline requires to know the current clock + time, it will be requested from the clock through + gst_clock_get_time (). The clock-time does not + need to start at 0. The pipeline, which contains the global clock that + all elements in the pipeline will use, in addition has a base + time, which is the clock time at the the point where media time + is starting from zero. This timestamp is subctracted from the clock + time, and that value is returned by _get_time (). + + + The clock provider is responsible for making sure that the clock time + always represents the current media time as closely as possible; it + has to take care of things such as playback latencies, buffering in + audio-kernel modules, and so on, since all those could affect a/v sync + and thus decrease the user experience. + + + + + Clock slaves + + Clock slaves get assigned a clock by their containing pipeline. Their + task is to make sure that media playback follows the time progress as + represented by this clock as closely as possible. For most elements, + that will simply mean to wait until a certain time is reached before + playing back their current sample; this can be done with the function + gst_clock_id_wait (). Some elements may need to + support dropping samples too, however. + + + For more information on how to write elements that conform to this + required behaviour, see the Plugin Writer's Guide. + + diff --git a/docs/manual/advanced-interfaces.xml b/docs/manual/advanced-interfaces.xml index f107cec7ad..89d5b6e63b 100644 --- a/docs/manual/advanced-interfaces.xml +++ b/docs/manual/advanced-interfaces.xml @@ -16,6 +16,31 @@ scope and purpose of each interface. + + The URI interface + + + In all examples so far, we have only supported local files through the + filesrc element. &GStreamer;, obviously, supports many + more location sources. However, we don't want applications to need to + know any particular element implementation details, such as element + names for particular network source types and so on. Therefore, there + is a URI interface, which can be used to get the source element that + supports a particular URI type. There is no strict rule for URI naming, + but in general we follow naming conventions that others use, too. For + example, assuming you have the correct plugins installed, &GStreamer; + supports file:///<path>/<file>, + http://<host>/<path>/<file>, + mms://<host>/<path>/<file>, and so on. + + + In order to get the source or sink element supporting a particular URI, + use gst_element_make_from_uri (), with the URI + type being either GST_URI_SRC for a source + element, or GST_URI_SINK for a sink element. + + + The Mixer interface diff --git a/docs/manual/advanced-metadata.xml b/docs/manual/advanced-metadata.xml index 7855d30d18..31aecb40b0 100644 --- a/docs/manual/advanced-metadata.xml +++ b/docs/manual/advanced-metadata.xml @@ -13,34 +13,34 @@ GstPad. - - Stream information + + Metadata reading Stream information can most easily be read by reading them from a GstPad. This has already been discussed before in . Therefore, we will skip - it here. + it here. Note that this requires access to all pads of which you + want stream information. - - - - Tag reading - Tag reading is remarkably simple in &GStreamer; Every element supports - the found-tag signal, which will be fired each the time - the element reads tags from the stream. A GstBin - will conveniently forward tags found by its childs. Therefore, in most - applications, you will only need to connect to the - found-tag signal on the top-most bin in your pipeline, - and you will automatically retrieve all tags from the stream. + Tag reading is done through a bus in &GStreamer;, which has been + discussed previously in . You can + listen for GST_MESSAGE_TAG messages and handle + them as you wish. - Note, however, that the found-tag might be fired - multiple times and by multiple elements in the pipeline. It is the + Note, however, that the GST_MESSAGE_TAG + message may be fired multiple times in the pipeline. It is the application's responsibility to put all those tags together and - display them to the user in a nice, coherent way. + display them to the user in a nice, coherent way. Usually, using + gst_tag_list_merge () is a good enough way + of doing this; make sure to empty the cache when loading a new song, + or after every few minutes when listening to internet radio. Also, + make sure you use GST_TAG_MERGE_PREPEND as + merging mode, so that a new title (which came in later) has a + preference over the old one for display. @@ -48,7 +48,22 @@ Tag writing - WRITEME + Tag writing is done using the GstTagSetter + interface. All that's required is a tag-set-supporting element in + your pipeline. In order to see if any of the elements in your + pipeline supports tag writing, you can use the function + gst_bin_iterate_all_by_interface (pipeline, + GST_TYPE_TAG_SETTER). On the resulting element, usually + an encoder or muxer, you can use gst_tag_setter_merge + () (with a taglist) or gst_tag_setter_add + () (with individual tags) to set tags on it. + + + A nice extra feature in &GStreamer; tag support is that tags are + preserved in pipelines. This means that if you transcode one file + containing tags into another media type, and that new media type + supports tags too, then the tags will be handled as part of the + data stream and be merged into the newly written media file, too. diff --git a/docs/manual/advanced-position.xml b/docs/manual/advanced-position.xml index 4310696aa3..eaea228551 100644 --- a/docs/manual/advanced-position.xml +++ b/docs/manual/advanced-position.xml @@ -3,15 +3,15 @@ So far, we've looked at how to create a pipeline to do media processing - and how to make it run ("iterate"). Most application developers will be - interested in providing feedback to the user on media progress. Media - players, for example, will want to show a slider showing the progress in - the song, and usually also a label indicating stream length. Transcoding - applications will want to show a progress bar on how much % of the task - is done. &GStreamer; has built-in support for doing all this using a - concept known as querying. Since seeking is very - similar, it will be discussed here as well. Seeking is done using the - concept of events. + and how to make it run. Most application developers will be interested + in providing feedback to the user on media progress. Media players, for + example, will want to show a slider showing the progress in the song, + and usually also a label indicating stream length. Transcoding + applications will want to show a progress bar on how much percent of + the task is done. &GStreamer; has built-in support for doing all this + using a concept known as querying. Since seeking + is very similar, it will be discussed here as well. Seeking is done + using the concept of events. @@ -22,42 +22,82 @@ to progress tracking. This includes getting the length of a stream (if available) or getting the current position. Those stream properties can be retrieved in various formats such as time, audio samples, video - frames or bytes. The functions used are gst_element_query - () and gst_pad_query (). + frames or bytes. The function most commonly used for this is + gst_element_query (), although some convenience + wrappers are provided as well (such as + gst_element_query_position ()). You can generally + query the pipeline directly, it'll figure out the internal details + for you, like which element to query. - Obviously, using either of the above-mentioned functions requires the - application to know which element or pad to run - the query on. This is tricky, but there are some good sides to the - story. The good thing is that elements (or, rather, pads - since - gst_element_query () internally calls - gst_pad_query ()) forward (dispatch) - events and queries to peer pads (or elements) if they don't handle it - themselves. The bad side is that some elements (or pads) will handle - events, but not the specific formats that you want, and therefore it - still won't work. - - - - Most queries will, fortunately, work fine. Queries are always - dispatched backwards. This means, effectively, that it's easiest to - run the query on your video or audio output element, and it will take - care of dispatching the query to the element that knows the answer - (such as the current position or the media length; usually the demuxer - or decoder). + Internally, queries will be sent to the sinks, and + dispatched backwards until one element can handle it; + that result will be sent back to the function caller. Usually, that + is the demuxer, although with live sources (from a webcam), it is the + source itself. #include <gst/gst.h> + + + +static gboolean +cb_print_position (GstElement *pipeline) +{ + GstFormat fmt = GST_FORMAT_TIME; + gint64 pos, len; + + if (gst_element_query_position (pipeline, &fmt, &pos, &len)) { + g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r", + GST_TIME_ARGS (pos), GST_TIME_ARGS (len)); + } + + /* call me again */ + return TRUE; +} gint main (gint argc, gchar *argv[]) { - GstElement *sink, *pipeline; - -[..] +[..] - + loop = g_main_loop_new (NULL, FALSE); +--> + /* run pipeline */ - do { - gint64 len, pos; - GstFormat fmt = GST_FORMAT_TIME; - - if (gst_element_query (sink, GST_QUERY_POSITION, &fmt, &pos) && - gst_element_query (sink, GST_QUERY_TOTAL, &fmt, &len)) { - g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r", - GST_TIME_ARGS (pos), GST_TIME_ARGS (len)); - } - } while (gst_bin_iterate (GST_BIN (pipeline))); - -[..] +[..] - +--> + } - - - If you are having problems with the dispatching behaviour, your best - bet is to manually decide which element to start running the query on. - You can get a list of supported formats and query-types with - gst_element_get_query_types () and - gst_element_get_formats (). - + @@ -118,27 +145,39 @@ main (gint argc, Events work in a very similar way as queries. Dispatching, for example, works exactly the same for events (and also has the same - limitations). Although there are more ways in which applications - and elements can interact using events, we will only focus on seeking - here. This is done using the seek-event. A seek-event contains a - seeking offset, a seek method (which indicates relative to what the - offset was given), a seek format (which is the unit of the offset, - e.g. time, audio samples, video frames or bytes) and optionally a - set of seeking-related flags (e.g. whether internal buffers should be - flushed). The behaviour of a seek is also wrapped in the function - gst_element_seek (). + limitations), and they can similarly be sent to the toplevel pipeline + and it will figure out everything for you. Although there are more + ways in which applications and elements can interact using events, + we will only focus on seeking here. This is done using the seek-event. + A seek-event contains a seeking offset, a seek method (which indicates + relative to what the offset was given), a seek format (which is the + unit of the offset, e.g. time, audio samples, video frames or bytes) + and optionally a set of seeking-related flags (e.g. whether internal + buffers should be flushed). The behaviour of a seek is also wrapped + in the function gst_element_seek (). static void -seek_to_time (GstElement *audiosink, - gint64 time_nanonseconds) +seek_to_time (GstElement *pipeline, + gint64 time_nanoseconds) { - gst_element_seek (audiosink, + gst_element_seek (pipeline, GST_SEEK_METHOD_SET | GST_FORMAT_TIME | GST_SEEK_FLAG_FLUSH, time_nanoseconds); } + + It is possible to do multiple seeks in short time-intervals, such as + a direct response to slider movement. After a seek, internally, the + pipeline will be paused (if it was playing), the position will be + re-set internally, the demuxers and decoders will decode from the new + position onwards and this will continue until all sinks have data + again. If it was playing originally, it will be set to playing again, + too. Since the new position is immediately available in a video output, + you will see the new frame, even if your pipeline is not in the playing + state. + diff --git a/docs/manual/advanced-schedulers.xml b/docs/manual/advanced-schedulers.xml deleted file mode 100644 index fb95960002..0000000000 --- a/docs/manual/advanced-schedulers.xml +++ /dev/null @@ -1,152 +0,0 @@ - - Scheduling - - By now, you've seen several example applications. All of them would set - up a pipeline and call gst_bin_iterate () to start - media processing. You might have started wondering what happens during - pipeline iteration. This whole process of media processing is called - scheduling. Scheduling is considered one of the most complex parts of - &GStreamer;. Here, we will do no more than give a global overview of - scheduling, most of which will be purely informative. It might help in - understanding the underlying parts of &GStreamer;. - - - The scheduler is responsible for managing the plugins at runtime. Its - main responsibilities are: - - - - Managing data throughput between pads and elements in a pipeline. - This might sometimes imply temporary data storage between elements. - - - - - Calling functions in elements that do the actual data processing. - - - - - Monitoring state changes and enabling/disabling elements in the - chain. - - - - - Selecting and distributing the global clock. - - - - - - - The scheduler is a pluggable component; this means that alternative - schedulers can be written and plugged into GStreamer. There is usually - no need for interaction in the process of choosing the scheduler, though. - The default scheduler in &GStreamer; is called opt. Some - of the concepts discussed here are specific to opt. - - - - Managing elements and data throughput - - To understand some specifics of scheduling, it is important to know - how elements work internally. Largely, there are four types of elements: - _chain ()-based elements, _loop - ()-based elements, _get ()-based - elements and decoupled elements. Each of those have a set of features - and limitations that are important for how they are scheduled. - - - - - _chain ()-based elements are elements that - have a _chain ()-function defined for each of - their sinkpads. Those functions will receive data whenever input - data is available. In those functions, the element can - push data over its source pad(s) to peer - elements. _chain ()-based elements cannot - pull additional data from their sinkpad(s). - Most elements in &GStreamer; are _chain - ()-based. - - - - - _loop ()-based elements are elements that have - a _loop ()-function defined for the whole - element. Inside this function, the element can pull buffers from - its sink pad(s) and push data over its source pad(s) as it sees fit. - Such elements usually require specific control over their input. - Muxers and demuxers are usually _loop ()-based. - - - - - _get ()-based elements are elements with only - source pads. For each source pad, a _get - ()-function is defined, which is called whenever the peer - element needs additional input data. Most source elements are, in - fact, _get ()-based. Such an element cannot - actively push data. - - - - - Decoupled elements are elements whose source pads are - _get ()-based and whose sink pads are - _chain ()-based. The _chain - ()-function cannot push data over its source pad(s), - however. One such element is the queue element, - which is a thread boundary element. Since only one side of such - elements are interesting for one particular scheduler, we can - safely handle those elements as if they were either - _get ()- or _chain - ()-based. Therefore, we will further omit this type - of elements in the discussion. - - - - - Obviously, the type of elements that are linked together have - implications for how the elements will be scheduled. If a get-based - element is linked to a loop-based element and the loop-based element - requests data from its sinkpad, we can just call the get-function and - be done with it. However, if two loop-based elements are linked to - each other, it's a lot more complicated. Similarly, a loop-based - element linked to a chain-based element is a lot easier than two - loop-based elements linked to each other. - - - The default &GStreamer; scheduler, opt, uses a concept - of chains and groups. A group is a series of elements that - do not require any context switches or intermediate data stores to - be executed. In practice, this implies zero or one loop-based elements, - one get-based element (at the beginning) and an infinite amount of - chain-based elements. If there is a loop-based element, then the - scheduler will simply call this elements loop-function to iterate. - If there is no loop-based element, then data will be pulled from the - get-based element and will be pushed over the chain-based elements. - - - A chain is a series of groups that depend on each other for data. - For example, two linked loop-based elements would end up in different - groups, but in the same chain. Whenever the first loop-based element - pushes data over its source pad, the data will be temporarily stored - inside the scheduler until the loop-function returns. When it's done, - the loop-function of the second element will be called to process this - data. If it pulls data from its sinkpad while no data is available, - the scheduler will emulate a get-function and, in this - function, iterate the first group until data is available. - - - The above is roughly how scheduling works in &GStreamer;. This has - some implications for ideal pipeline design. An pipeline would - ideally contain at most one loop-based element, so that all data - processing is immediate and no data is stored inside the scheduler - during group switches. You would think that this decreases overhead - significantly. In practice, this is not so bad, however. It's something - to keep in the back of your mind, nothing more. - - - diff --git a/docs/manual/advanced-threads.xml b/docs/manual/advanced-threads.xml index d70751f256..52f6b9af63 100644 --- a/docs/manual/advanced-threads.xml +++ b/docs/manual/advanced-threads.xml @@ -1,43 +1,22 @@ Threads - GStreamer has support for multithreading through the use of - the GstThread - object. This object is in fact a special GstBin - that will start a new thread (using Glib's - GThread system) when started. + &GStreamer; is inherently multi-threaded, and is fully thread-safe. + Most threading internals are hidden from the application, which should + make application development easier. However, in some cases, applications + may want to have influence on some parts of those. &GStreamer; allows + applications to force the use of multiple threads over some parts of + a pipeline. - - To create a new thread, you can simply use gst_thread_new - (). From then on, you can use it similar to how you would - use a GstBin. You can add elements to it, - change state and so on. The largest difference between a thread and - other bins is that the thread does not require iteration. Once set to - the GST_STATE_PLAYING state, it will iterate - its contained children elements automatically. - - - shows how a thread can be - visualised. - -
- A thread - - - - - -
- When would you want to use a thread? + When would you want to force a thread? - There are several reasons to use threads. However, there's also some - reasons to limit the use of threads as much as possible. We will go - into the drawbacks of threading in &GStreamer; in the next section. - Let's first list some situations where threads can be useful: + There are several reasons to force the use of threads. However, + for performance reasons, you never want to use one thread for every + element out there, since that will create some overhead. + Let's now list some situations where threads can be particularly + useful: @@ -56,15 +35,6 @@ will run independently and their synchronization will be better. - - - Data pre-rolls. You can use threads and queues (thread boundaries) - to cache a few seconds of data before playing. By using this - approach, the whole pipeline will already be setup and data will - already be decoded. When activating the rest of the pipeline, the - switch from PAUSED to PLAYING will be instant. - -
a two-threaded decoder with a queue @@ -76,8 +46,9 @@
Above, we've mentioned the queue element several times - now. A queue is a thread boundary element. It does so by using a - classic provider/receiver model as learned in threading classes at + now. A queue is the thread boundary element through which you can + force the use of threads. It does so by using a classic + provider/receiver model as learned in threading classes at universities all around the world. By doing this, it acts both as a means to make data throughput between threads threadsafe, and it can also act as a buffer. Queues have several GObject @@ -87,164 +58,33 @@ there's more data than the upper treshold, it will block input or (if configured to do so) drop data. -
- - - Constraints placed on the pipeline by the GstThread - Within the pipeline, everything is the same as in any other bin. The - difference lies at the thread boundary, at the link between the - thread and the outside world (containing bin). Since &GStreamer; is - fundamentally buffer-oriented rather than byte-oriented, the natural - solution to this problem is an element that can "buffer" the buffers - between the threads, in a thread-safe fashion. This element is the - queue element. A queue should be placed in between any - two elements whose pads are linked together while the elements live in - different threads. It doesn't matter if the queue is placed in the - containing bin or in the thread itself, but it needs to be present - on one side or the other to enable inter-thread communication. - - - If you are writing a GUI application, making the top-level bin a - thread will make your GUI more responsive. If it were a pipeline - instead, it would have to be iterated by your application's event - loop, which increases the latency between events (say, keyboard - presses) and responses from the GUI. In addition, any slight hang - in the GUI would delay iteration of the pipeline, which (for example) - could cause pops in the output of the sound card, if it is an audio - pipeline. - - - A problem with using threads is, however, thread contexts. If you - connect to a signal that is emitted inside a thread, then the signal - handler for this thread will be executed in that same - thread! This is very important to remember, because many - graphical toolkits can not run multi-threaded. Gtk+, for example, - only allows threaded access to UI objects if you explicitely use - mutexes. Not doing so will result in random crashes and X errors. - A solution many people use is to place an idle handler in the signal - handler, and have the actual signal emission code be executed in the - idle handler, which will be executed from the mainloop. - - - Generally, if you use threads, you will encounter some problems. Don't - hesistate to ask us for help in case of problems. + To use a queues (and therefore force the use of two distinct threads + in the pipeline), one can simply create a queue element + and put this in as part of the pipeline. &GStreamer; will take care of + all threading details internally. - - A threaded example application - - As an example we show the helloworld program that we coded in - using a thread. Note that - the whole application lives in a thread (as opposed to half - of the application living in a thread and the other half being - another thread or a pipeline). Therefore, it does not need a - queue element in this specific case. + + Scheduling in &GStreamer; + + + Scheduling of pipelines in &GStreamer; is done by using a thread for + each group, where a group is a set of elements separated + by queue elements. Within such a group, scheduling is + either push-based or pull-based, depending on which mode is supported + by the particular element. If elements support random access to data, + such as file sources, then elements downstream in the pipeline become + the entry point of this group (i.e. the element controlling the + scheduling of other elements). The entry point pulls data from upstream + and pushes data downstream, thereby calling data handling functions on + either type of element. + + + In practice, most elements in &GStreamer;, such as decoders, encoders, + etc. only support push-based scheduling, which means that in practice, + &GStreamer; uses a push-based scheduling model. - - -#include <gst/gst.h> - -GstElement *thread, *source, *decodebin, *audiosink; - -static gboolean -idle_eos (gpointer data) -{ - g_print ("Have idle-func in thread %p\n", g_thread_self ()); - gst_main_quit (); - - /* do this function only once */ - return FALSE; -} - -/* - * EOS will be called when the src element has an end of stream. - * Note that this function will be called in the thread context. - * We will place an idle handler to the function that really - * quits the application. - */ -static void -cb_eos (GstElement *thread, - gpointer data) -{ - g_print ("Have eos in thread %p\n", g_thread_self ()); - g_idle_add ((GSourceFunc) idle_eos, NULL); -} - -/* - * On error, too, you'll want to forward signals to the main - * thread, especially when using GUI applications. - */ - -static void -cb_error (GstElement *thread, - GstElement *source, - GError *error, - gchar *debug, - gpointer data) -{ - g_print ("Error in thread %p: %s\n", g_thread_self (), error->message); - g_idle_add ((GSourceFunc) idle_eos, NULL); -} - -/* - * Link new pad from decodebin to audiosink. - * Contains no further error checking. - */ - -static void -cb_newpad (GstElement *decodebin, - GstPad *pad, - gboolean last, - gpointer data) -{ - gst_pad_link (pad, gst_element_get_pad (audiosink, "sink")); - gst_bin_add (GST_BIN (thread), audiosink); - gst_bin_sync_children_state (GST_BIN (thread)); -} - -gint -main (gint argc, - gchar *argv[]) -{ - /* init GStreamer */ - gst_init (&argc, &argv); - - /* make sure we have a filename argument */ - if (argc != 2) { - g_print ("usage: %s <Ogg/Vorbis filename>\n", argv[0]); - return -1; - } - - /* create a new thread to hold the elements */ - thread = gst_thread_new ("thread"); - g_signal_connect (thread, "eos", G_CALLBACK (cb_eos), NULL); - g_signal_connect (thread, "error", G_CALLBACK (cb_error), NULL); - - /* create elements */ - source = gst_element_factory_make ("filesrc", "source"); - g_object_set (G_OBJECT (source), "location", argv[1], NULL); - decodebin = gst_element_factory_make ("decodebin", "decoder"); - g_signal_connect (decodebin, "new-decoded-pad", - G_CALLBACK (cb_newpad), NULL); - audiosink = gst_element_factory_make ("alsasink", "audiosink"); - - /* setup */ - gst_bin_add_many (GST_BIN (thread), source, decodebin, NULL); - gst_element_link (source, decodebin); - gst_element_set_state (audiosink, GST_STATE_PAUSED); - gst_element_set_state (thread, GST_STATE_PLAYING); - - /* no need to iterate. We can now use a mainloop */ - gst_main (); - - /* unset */ - gst_element_set_state (thread, GST_STATE_NULL); - gst_object_unref (GST_OBJECT (thread)); - - return 0; -} -
diff --git a/docs/manual/appendix-porting.xml b/docs/manual/appendix-porting.xml new file mode 100644 index 0000000000..8dda43cee2 --- /dev/null +++ b/docs/manual/appendix-porting.xml @@ -0,0 +1,90 @@ + + Porting 0.8 applications to 0.9 + + This section of the appendix will discuss shortly what changes to + applications will be needed to quickly and conveniently port most + applications from &GStreamer;-0.8 to &GStreamer;-0.9, with references + to the relevant sections in this Application Development Manual + where needed. With this list, it should be possible to port simple + applications to &GStreamer;-0.9 in less than a day. + + + + List of changes + + + + Most functions returning an object or an object property have + been changed to return its own reference rather than a constant + reference of the one owned by the object itself. The reason for + this change is primarily threadsafety. This means, effectively, + that return values of functions such as + gst_element_get_pad (), + gst_pad_get_name () and many more like these + have to be free'ed or unreferenced after use. Check the API + references of each function to know for sure whether return + values should be free'ed or not. + + + + + Applications should no longer use signal handlers to be notified + of errors, end-of-stream and other similar pipeline events. + Instead, they should use the GstBus, which + has been discussed in . The bus will + take care that the messages will be delivered in the context of + mainloop, which is almost certainly the application's main thread. + The big advantage of this is that applications no longer need to + be thread-aware; they don't need to use g_idle_add + () in the signal handler and do the actual real work + in the idle-callback. &GStreamer; now does all that internally. + + + + + Related to this, gst_bin_iterate () has been + removed. Pipelines will iterate in their own thread, and applications + can simply run a GMainLoop (or call the + mainloop of their UI toolkit, such as gtk_main + ()). + + + + + State changes can be delayed; ASYNC. + + + + + In 0.8, events and queries had to manually be sent to sinks in + pipelines (unless you were using playbin). This is no longer + the case in 0.9. In 0.9, queries and events can be sent to + toplevel pipelines, and the pipeline will do the dispatching + internally for you. This means less bookkeeping in your + application. For a short code example, see . Related, seeking is now + threadsafe, and your video output will show the new video + position's frame while seeking, providing a better user + experience. + + + + + The GstThread object has been removed. + Applications can now simply put elements in a pipeline with + optionally some queue elements in between for + buffering, and &GStreamer; will take care of creating threads + internally. It is still possible to have parts of a pipeline + run in different threads than others, by using the + queue element. See + for details. + + + + + Filtered caps -> caps-filter. + + + + + diff --git a/docs/manual/basics-bins.xml b/docs/manual/basics-bins.xml index d7398c8749..3f1772ab1b 100644 --- a/docs/manual/basics-bins.xml +++ b/docs/manual/basics-bins.xml @@ -23,7 +23,7 @@ optimal plan for that data flow. Plan generation is one of the most complicated procedures in &GStreamer;. You will learn more about this process, called scheduling, in . + linkend="section-threads-scheduling"/>.
diff --git a/docs/manual/basics-bus.xml b/docs/manual/basics-bus.xml new file mode 100644 index 0000000000..4a65f377dd --- /dev/null +++ b/docs/manual/basics-bus.xml @@ -0,0 +1,186 @@ + + Bus + + A bus is a simple system that takes care of forwarding messages from + the pipeline threads to an application in its own thread context. The + advantage of a bus is that an application does not need to be + thread-aware in order to use &GStreamer;, even though &GStreamer; + itself is heavily threaded. + + + Every pipeline contains a bus by default, so applications do not need + to create a bus or anything. The only thing applications should do is + set a message handler on a bus, which is similar to a signal handler + to an object. When the mainloop is running, the bus will periodically + be checked for new messages, and the callback will be called when any + message is available. + + + + How to use a bus + + To use a bus, attach a message handler to the default bus of a pipeline + using gst_bus_add_watch (). This handler will be + called whenever the pipeline emits a message to the bus. In this + handler, check the signal type (see next section) and do something + accordingly. The return value of the handler should be TRUE to remove + the message from the bus. + + +#include <gst/gst.h> + +static GMainLoop *loop; + +static gboolean +my_bus_callback (GstBus *bus, + GstMessage *message, + gpointer data) +{ + switch (GST_MESSAGE_TYPE (message)) { + case GST_MESSAGE_ERROR: { + GError *err; + gchar *debug; + + gst_message_parse_error (message, &err, &debug); + g_print ("Error: %s\n", err->message); + g_error_free (err); + g_free (debug); + + g_main_loop_quit (loop); + break; + } + case GST_MESSAGE_EOS: + /* end-of-stream */ + g_main_loop_quit (loop); + break; + default: + /* unhandled message */ + break; + } + + /* remove message from the queue */ + return TRUE; +} + +gint +main (gint argc, + gchar *argv[]) +{ + GMainLoop *loop; + GstElement *pipeline; + + /* init */ + gst_init (&argc, &argv); + + /* create pipeline, add handler */ + pipeline = gst_pipeline_new ("my_pipeline"); + gst_bus_add_watch (gst_pipeline_get_bus (GST_PIPELINE (pipeline)), + my_bus_callback, NULL); + +[..] + + /* in the mainloop, all messages posted to the bus by the pipeline + * will automatically be sent to our callback. */ + loop = g_main_loop_new (NULL, FALSE); + g_main_loop_run (loop); + + return 0; +} + + + + It is important to know that the handler will be called in the thread + context of the mainloop. This means that the interaction between the + pipeline and application over the bus is + asynchronous, and thus not suited for some + real-time purposes, such as cross-fading between audio tracks, doing + (theoretically) gapless playback or video effects. All such things + should be done in the pipeline context, which is easiest by writing + a &GStreamer; plug-in. It is very useful for its primary purpose, + though: passing messages from pipeline to application. + + + + + Message types + + &GStreamer; has a few pre-defined message types that can be passed + over the bus. The messages are extendible, however. Plug-ins can + define additional messages, and applications can decide to either + have specific code for those or ignore them. All applications are + strongly recommended to at least handle error messages by providing + visual feedback to the user. + + + All messages have a message source, type and timestamp. The message + source can be used to see which element emitted the message. For some + messages, for example, only the ones emitted by the top-level pipeline + will be interesting to most applications (e.g. for state-change + notifications). Below is a list of all messages and a short explanation + of what they do and how to parse message-specific content. + + + + + Error, warning and information notifications: those are used + by elements if a message should be shown to the user about the + state of the pipeline. Error messages are fatal and terminate + the data-passing. The error should be repaired to resume pipeline + acvitity. Warnings are not fatal, but imply a problem nevertheless. + Information messages are for non-problem notifications. All those + messages contain a GError with the main + error type and message, and optionally a debug string. Both + can be extracted using gst_message_parse_error + (), _parse_warning () and + _parse_info (). Both error and debug string + should be free'ed after use. + + + + + End-of-stream notification: this is emitted when the stream has + ended. The state of the pipeline will not change, but further + media handling will stall. Applications can use this to skip to + the next song in their playlist. After end-of-stream, it is also + possible to seek back in the stream. Playback will then continue + automatically. This message has no specific arguments. + + + + + Tags: emitted when metadata was found in the stream. This can be + emitted multiple times for a pipeline (e.g. once for descriptive + metadata such as artist name or song title, and another one for + stream-information, such as samplerate and bitrate). Applications + should cache metadata internally. gst_message_parse_tag + () should be used to parse the taglist, which should + be dereferenced after use. + + + + + State-changes: emitted after a successful state change. + gst_message_parse_state_changed () can be + used to parse the old and new state of this transition. + + + + + Buffering: emitted during caching of network-streams. One can + manually extract the progress (in percent) from the message by + extracting the buffer-percent property from the + structure returned by gst_message_parse_structure + (). + + + + + Other application-specific messages: any information on those can + be extracted by getting a structure (see above) and reading + properties. In most cases, such messages can conveniently be + ignored. + + + + + diff --git a/docs/manual/basics-elements.xml b/docs/manual/basics-elements.xml index 23ef63ddfb..03c7ddd611 100644 --- a/docs/manual/basics-elements.xml +++ b/docs/manual/basics-elements.xml @@ -252,7 +252,7 @@ main (int argc, char *argv[]) { GstElement *element; - const gchar *name; + gchar *name; /* init GStreamer */ gst_init (&argc, &argv); @@ -263,6 +263,7 @@ main (int argc, /* get name */ g_object_get (G_OBJECT (element), "name", &name, NULL); g_print ("The name of the element is '%s'.\n", name); + g_free (name); gst_object_unref (GST_OBJECT (element)); @@ -489,17 +490,30 @@ main (int argc, GST_STATE_PAUSED: in this state, an element has opened the stream, but is not actively processing - it. An element should not modify the stream's position, data or - anything else in this state. When set back to PLAYING, it should - continue processing at the point where it left off as soon as - possible. + it. An element is allowed to modify a stream's position, read + and process data and such to prepare for playback as soon as + state is changed to PLAYING, but it is not + allowed to play the data which would make the clock run. + In summary, PAUSED is the same as PLAYING but without a running + clock. + + + Elements going into the PAUSED state should prepare themselves + for moving over to the PLAYING state as soon as possible. Video + or audio outputs would, for example, wait for data to arrive and + queue it so they can play it right after the state change. Also, + video sinks can already play the first frame (since this does + not affect the clock yet). Autopluggers could use this same + state transition to already plug together a pipeline. Most other + elements, such as codecs or filters, do not need to explicitely + do anything in this state, however. GST_STATE_PLAYING: in the PLAYING state, an element does exactly the same as in the PAUSED state, except - that it actually processes data. + that the clock now runs. @@ -511,12 +525,12 @@ main (int argc, will internally set the element to READY and PAUSED in between. - Even though an element in GST_STATE_PLAYING - is ready for data processing, it will not necessarily do that. If - the element is placed in a thread (see ), it will process data automatically. - In other cases, however, you will need to iterate - the element's container. + When moved to GST_STATE_PLAYING, pipelines + will process data automatically. They do not need to be iterated in + any form. Internally, &GStreamer; will start threads that take this + task on to them. &GStreamer; will also take care of switching + messages from the pipeline's thread into the application's own + thread, by using a . diff --git a/docs/manual/basics-helloworld.xml b/docs/manual/basics-helloworld.xml index e5a2d701c9..6453f62b34 100644 --- a/docs/manual/basics-helloworld.xml +++ b/docs/manual/basics-helloworld.xml @@ -48,9 +48,9 @@ pipeline until we've played the whole song. We've previously learned how to add elements to a container bin in , and we've learned about element states - in . We will use the function - gst_bin_sync_children_state () to synchronize - the state of a bin on all of its contained children. + in . We will also attach + a message handler to the pipeline bus so we can retrieve errors + and detect the end-of-stream. Let's now add all the code together to get our very first audio @@ -65,29 +65,59 @@ * example, we will use them, however. */ -GstElement *pipeline, *source, *parser, *decoder, *sink; +GstElement *pipeline, *source, *parser, *decoder, *conv, *sink; + +static gboolean +bus_call (GstBus *bus, + GstMessage *msg, + gpointer data) +{ + GMainLoop *loop = data; + + switch (GST_MESSAGE_TYPE (msg)) { + case GST_MESSAGE_EOS: + g_print ("End-of-stream\n"); + g_main_loop_quit (loop); + break; + case GST_MESSAGE_ERROR: { + gchar *debug; + GError *err; + + gst_message_parse_error (msg, &err, &debug); + g_free (debug); + + g_print ("Error: %s\n", err->message); + g_error_free (err); + + g_main_loop_quit (loop); + break; + } + default: + break; + } + + return TRUE; +} static void new_pad (GstElement *element, GstPad *pad, gpointer data) { - /* We can now link this pad with the audio decoder and - * add both decoder and audio output to the pipeline. */ + /* We can now link this pad with the audio decoder */ + g_print ("Dynamic pad created, linking parser/decoder\n"); gst_pad_link (pad, gst_element_get_pad (decoder, "sink")); - gst_bin_add_many (GST_BIN (pipeline), decoder, sink, NULL); - - /* This function synchronizes a bins state on all of its - * contained children. */ - gst_bin_sync_children_state (GST_BIN (pipeline)); } int main (int argc, char *argv[]) { + GMainLoop *loop; + /* initialize GStreamer */ gst_init (&argc, &argv); + loop = g_main_loop_new (NULL, FALSE); /* check input arguments */ if (argc != 2) { @@ -100,37 +130,40 @@ main (int argc, source = gst_element_factory_make ("filesrc", "file-source"); parser = gst_element_factory_make ("oggdemux", "ogg-parser"); decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder"); + conv = gst_element_factory_make ("audioconvert", "converter"); sink = gst_element_factory_make ("alsasink", "alsa-output"); + if (!pipeline || !source || !parser || !decoder || !conv || !sink) { + g_print ("One element could not be created\n"); + return -1; + } - /* set filename property on the file source */ + /* set filename property on the file source. Also add a message + * handler. */ g_object_set (G_OBJECT (source), "location", argv[1], NULL); + gst_bus_add_watch (gst_pipeline_get_bus (GST_PIPELINE (pipeline)), + bus_call, loop); /* link together - note that we cannot link the parser and * decoder yet, becuse the parser uses dynamic pads. For that, * we set a new-pad signal handler. */ gst_element_link (source, parser); - gst_element_link (decoder, sink); + gst_element_link_many (decoder, conv, sink, NULL); g_signal_connect (parser, "new-pad", G_CALLBACK (new_pad), NULL); - /* put all elements in a bin - or at least the ones we will use - * instantly. */ - gst_bin_add_many (GST_BIN (pipeline), source, parser, NULL); + /* put all elements in a bin */ + gst_bin_add_many (GST_BIN (pipeline), + source, parser, decoder, conv, sink, NULL); - /* Now set to playing and iterate. We will set the decoder and - * audio output to ready so they initialize their memory already. - * This will decrease the amount of time spent on linking these - * elements when the Ogg parser emits the new-pad signal. */ - gst_element_set_state (decoder, GST_STATE_READY); - gst_element_set_state (sink, GST_STATE_READY); + /* Now set to playing and iterate. */ + g_print ("Setting to PLAYING\n"); gst_element_set_state (pipeline, GST_STATE_PLAYING); - - /* and now iterate - the rest will be automatic from here on. - * When the file is finished, gst_bin_iterate () will return - * FALSE, thereby terminating this loop. */ - while (gst_bin_iterate (GST_BIN (pipeline))) ; + g_print ("Running\n"); + g_main_loop_run (loop); /* clean up nicely */ + g_print ("Returned, stopping playback\n"); gst_element_set_state (pipeline, GST_STATE_NULL); + g_print ("Deleting pipeline\n"); gst_object_unref (GST_OBJECT (pipeline)); return 0; diff --git a/docs/manual/basics-pads.xml b/docs/manual/basics-pads.xml index 41efbce2a9..429f74089d 100644 --- a/docs/manual/basics-pads.xml +++ b/docs/manual/basics-pads.xml @@ -71,7 +71,11 @@ cb_new_pad (GstElement *element, GstPad *pad, gpointer data) { - g_print ("A new pad %s was created\n", gst_pad_get_name (pad)); + gchar *name; + + name = gst_pad_get_name (pad); + g_print ("A new pad %s was created\n", name); + g_free (name); /* here, you would setup a new pad link for the newly created pad */ [..] @@ -79,9 +83,11 @@ cb_new_pad (GstElement *element, } int -main(int argc, char *argv[]) +main (int argc, + char *argv[]) { GstElement *pipeline, *source, *demux; + GMainLoop *loop; /* init */ gst_init (&argc, &argv); @@ -96,14 +102,15 @@ main(int argc, char *argv[]) /* put together a pipeline */ gst_bin_add_many (GST_BIN (pipeline), source, demux, NULL); - gst_element_link (source, demux); + gst_element_link_pads (source, "src", demux, "sink"); /* listen for newly created pads */ g_signal_connect (demux, "new-pad", G_CALLBACK (cb_new_pad), NULL); /* start the pipeline */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); - while (gst_bin_iterate (GST_BIN (pipeline))); + loop = g_main_loop_new (NULL, FALSE); + g_main_loop_run (loop); [..] #include <gst/gst.h> @@ -518,16 +546,20 @@ main (int argc, char *argv[]) { GstElement *bin, *sink; + GstPad *pad; /* init */ gst_init (&argc, &argv); - /* create element, add to bin, add ghostpad */ + /* create element, add to bin */ sink = gst_element_factory_make ("fakesink", "sink"); bin = gst_bin_new ("mybin"); gst_bin_add (GST_BIN (bin), sink); - gst_element_add_ghost_pad (bin, - gst_element_get_pad (sink, "sink"), "sink"); + + /* add ghostpad */ + pad = gst_element_get_pad (sink, "sink"); + gst_element_add_pad (bin, gst_ghost_pad_new ("sink", pad)); + gst_object_unref (GST_OBJECT (pad)); [..] + @@ -170,6 +171,7 @@ &INIT; &ELEMENTS; &BINS; + &BUS; &PADS; &DATA; &HELLOWORLD; @@ -187,9 +189,18 @@ able to create a simple application. However, &GStreamer; provides much more candy than just the basics of playing back audio files. In this chapter, you will learn more of the - low-level features and internals of &GStreamer;, such as threads, - scheduling, synchronization, metadata, interfaces and dynamic - parameters. + low-level features and internals of &GStreamer;. + + + Some parts of this part will serve mostly as an explanation of + how &GStreamer; works internally; they are not actually needed for + actual application development. This includes chapter such as the + ones covering scheduling, autoplugging and synchronization. Other + chapters, however, discuss more advanced ways of + pipeline-application interaction, and can turn out to be very useful + for certain applications. This includes the chapters on metadata, + querying and events, interfaces, dynamic parameters and pipeline + data manipulation. @@ -199,7 +210,6 @@ &CLOCKS; &DPARAMS; &THREADS; - &SCHEDULERS; &AUTOPLUGGING; &DATAACCESS; @@ -245,6 +255,10 @@ shortly explain how applications included with &GStreamer; can help making your life easier, and some information on debugging. + + In addition, we also provide a porting guide which will explain + easily how to port &GStreamer;-0.8 applications to &GStreamer;-0.9. + &CHECKLIST; + &PORTING; &INTEGRATION; &LICENSING; &WIN32; diff --git a/docs/manual/thread.fig b/docs/manual/thread.fig deleted file mode 100644 index eb55648b06..0000000000 --- a/docs/manual/thread.fig +++ /dev/null @@ -1,51 +0,0 @@ -#FIG 3.2 -Landscape -Center -Inches -Letter -100.00 -Single --2 -1200 2 -2 1 0 1 0 7 50 0 -1 0.000 0 0 -1 1 0 2 - 1 1 1.00 77.53 103.38 - 3759 3501 4212 3501 -2 2 0 1 0 6 50 0 20 0.000 0 0 -1 0 0 5 - 4212 3371 4858 3371 4858 3824 4212 3824 4212 3371 -2 2 0 1 0 6 50 0 20 0.000 0 0 -1 0 0 5 - 5245 3371 5892 3371 5892 3824 5245 3824 5245 3371 -2 1 0 1 0 7 50 0 -1 0.000 0 0 -1 1 0 2 - 1 1 1.00 77.53 103.38 - 5892 3501 6408 3501 -2 2 0 1 0 6 50 0 20 0.000 0 0 -1 0 0 5 - 6408 3371 7055 3371 7055 3824 6408 3824 6408 3371 -2 2 0 1 0 6 50 0 20 0.000 0 0 -1 0 0 5 - 7442 3371 8088 3371 8088 3824 7442 3824 7442 3371 -2 2 0 1 0 6 50 0 20 0.000 0 0 -1 0 0 5 - 8541 3371 9187 3371 9187 3824 8541 3824 8541 3371 -2 1 0 1 0 7 50 0 -1 0.000 0 0 -1 1 0 2 - 1 1 1.00 77.53 103.38 - 8088 3501 8541 3501 -2 2 0 1 0 6 49 0 20 0.000 0 0 -1 0 0 5 - 3113 3371 3759 3371 3759 3824 3113 3824 3113 3371 -2 2 0 1 0 7 50 0 20 0.000 0 0 -1 0 0 5 - 2079 2661 3759 2661 3759 4082 2079 4082 2079 2661 -2 2 0 1 0 7 51 0 20 0.000 0 0 -1 0 0 5 - 4212 2661 5892 2661 5892 4082 4212 4082 4212 2661 -2 2 0 1 0 7 51 0 20 0.000 0 0 -1 0 0 5 - 6408 2661 8088 2661 8088 4082 6408 4082 6408 2661 -2 2 0 1 0 7 51 0 20 0.000 0 0 -1 0 0 5 - 8541 2661 10221 2661 10221 4082 8541 4082 8541 2661 -2 2 0 1 0 7 100 0 19 0.000 0 0 -1 0 0 5 - 1950 1950 10350 1950 10350 4405 1950 4405 1950 1950 -4 0 0 50 0 16 10 0.0000 4 116 284 4341 3694 sink\001 -4 0 0 50 0 16 10 0.0000 4 90 220 5504 3694 src\001 -4 0 0 50 0 16 10 0.0000 4 116 284 6602 3694 sink\001 -4 0 0 50 0 16 10 0.0000 4 90 220 7701 3694 src\001 -4 0 0 50 0 16 10 0.0000 4 116 284 8670 3694 sink\001 -4 0 0 50 0 16 10 0.0000 4 142 866 2208 2919 disk_source\001 -4 0 0 50 0 16 10 0.0000 4 129 401 4341 2919 parse\001 -4 0 0 50 0 16 10 0.0000 4 116 594 6538 2919 decoder\001 -4 0 0 50 0 16 10 0.0000 4 155 801 8670 2919 play_audio\001 -4 0 0 48 0 16 10 0.0000 4 90 220 3307 3694 src\001 -4 0 0 50 0 16 10 0.0000 4 116 452 2144 2208 thread\001 diff --git a/examples/manual/Makefile.am b/examples/manual/Makefile.am index dd6889035b..ad62768159 100644 --- a/examples/manual/Makefile.am +++ b/examples/manual/Makefile.am @@ -20,6 +20,7 @@ gnome_CFLAGS = $(GST_OBJ_CFLAGS) $(LIBGNOMEUI_CFLAGS) EXTRA_DIST = extract.pl EXAMPLES = \ + bus \ dynamic \ $(GNOME) \ elementcreate \ @@ -34,7 +35,6 @@ EXAMPLES = \ init \ popt \ query \ - threads \ typefind \ fakesrc \ playbin \ @@ -49,6 +49,10 @@ bin.c : $(top_srcdir)/docs/manual/basics-bins.xml $(PERL_PATH) $(srcdir)/extract.pl $@ \ $(top_srcdir)/docs/manual/basics-bins.xml +bus.c: $(top_srcdir)/docs/manual/basics-bus.xml + $(PERL_PATH) $(srcdir)/extract.pl $@ \ + $(top_srcdir)/docs/manual/basics-bus.xml + pad.c ghostpad.c: $(top_srcdir)/docs/manual/basics-pads.xml $(PERL_PATH) $(srcdir)/extract.pl $@ \ $(top_srcdir)/docs/manual/basics-pads.xml @@ -69,10 +73,6 @@ query.c: $(top_srcdir)/docs/manual/advanced-position.xml $(PERL_PATH) $(srcdir)/extract.pl $@ \ $(top_srcdir)/docs/manual/advanced-position.xml -threads.c: $(top_srcdir)/docs/manual/advanced-threads.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ \ - $(top_srcdir)/docs/manual/advanced-threads.xml - typefind.c dynamic.c: $(top_srcdir)/docs/manual/advanced-autoplugging.xml $(PERL_PATH) $(srcdir)/extract.pl $@ \ $(top_srcdir)/docs/manual/advanced-autoplugging.xml diff --git a/tests/old/examples/manual/Makefile.am b/tests/old/examples/manual/Makefile.am index dd6889035b..ad62768159 100644 --- a/tests/old/examples/manual/Makefile.am +++ b/tests/old/examples/manual/Makefile.am @@ -20,6 +20,7 @@ gnome_CFLAGS = $(GST_OBJ_CFLAGS) $(LIBGNOMEUI_CFLAGS) EXTRA_DIST = extract.pl EXAMPLES = \ + bus \ dynamic \ $(GNOME) \ elementcreate \ @@ -34,7 +35,6 @@ EXAMPLES = \ init \ popt \ query \ - threads \ typefind \ fakesrc \ playbin \ @@ -49,6 +49,10 @@ bin.c : $(top_srcdir)/docs/manual/basics-bins.xml $(PERL_PATH) $(srcdir)/extract.pl $@ \ $(top_srcdir)/docs/manual/basics-bins.xml +bus.c: $(top_srcdir)/docs/manual/basics-bus.xml + $(PERL_PATH) $(srcdir)/extract.pl $@ \ + $(top_srcdir)/docs/manual/basics-bus.xml + pad.c ghostpad.c: $(top_srcdir)/docs/manual/basics-pads.xml $(PERL_PATH) $(srcdir)/extract.pl $@ \ $(top_srcdir)/docs/manual/basics-pads.xml @@ -69,10 +73,6 @@ query.c: $(top_srcdir)/docs/manual/advanced-position.xml $(PERL_PATH) $(srcdir)/extract.pl $@ \ $(top_srcdir)/docs/manual/advanced-position.xml -threads.c: $(top_srcdir)/docs/manual/advanced-threads.xml - $(PERL_PATH) $(srcdir)/extract.pl $@ \ - $(top_srcdir)/docs/manual/advanced-threads.xml - typefind.c dynamic.c: $(top_srcdir)/docs/manual/advanced-autoplugging.xml $(PERL_PATH) $(srcdir)/extract.pl $@ \ $(top_srcdir)/docs/manual/advanced-autoplugging.xml