diff --git a/docs/manual/advanced-autoplugging.xml b/docs/manual/advanced-autoplugging.xml index fe70819526..230828a10e 100644 --- a/docs/manual/advanced-autoplugging.xml +++ b/docs/manual/advanced-autoplugging.xml @@ -1,5 +1,5 @@ - - Putting together a pipeline + + More on factories The small application we created in the previous chapter used the concept of a factory to create the elements. In this chapter we will @@ -203,6 +203,9 @@ struct _GstType { This function will return 0 if the extension was not known. + + For more information, see . + @@ -251,392 +254,4 @@ struct _GstType { - - - Dynamic pipelines - - In this chapter we will see how you can create a dynamic pipeline. A - dynamic pipeline is a pipeline that is updated or created while data - is flowing through it. We will create a partial pipeline first and add - more elements while the pipeline is playing. Dynamic pipelines cause - all sorts of scheduling issues and will remain a topic of research for - a long time in GStreamer. - - - We will show how to create an MPEG1 video player using dynamic pipelines. - As you have seen in the pad section, we can attach a signal to an element - when a pad is created. We will use this to create our MPEG1 player. - - - - We'll start with a simple main function: - - - -/* example-begin dynamic.c */ -#include <string.h> -#include <gst/gst.h> - -void -eof (GstElement *src) -{ - g_print ("have eos, quitting\n"); - exit (0); -} - -gboolean -idle_func (gpointer data) -{ - gst_bin_iterate (GST_BIN (data)); - return TRUE; -} - -void -new_pad_created (GstElement *parse, GstPad *pad, GstElement *pipeline) -{ - GstElement *decode_video = NULL; - GstElement *decode_audio, *play, *color, *show; - GstElement *audio_queue, *video_queue; - GstElement *audio_thread, *video_thread; - - g_print ("***** a new pad %s was created\n", gst_pad_get_name (pad)); - - gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED); - - /* link to audio pad */ - if (strncmp (gst_pad_get_name (pad), "audio_", 6) == 0) { - - /* construct internal pipeline elements */ - decode_audio = gst_element_factory_make ("mad", "decode_audio"); - g_return_if_fail (decode_audio != NULL); - play = gst_element_factory_make ("osssink", "play_audio"); - g_return_if_fail (play != NULL); - - /* create the thread and pack stuff into it */ - audio_thread = gst_thread_new ("audio_thread"); - g_return_if_fail (audio_thread != NULL); - - /* construct queue and link everything in the main pipeline */ - audio_queue = gst_element_factory_make ("queue", "audio_queue"); - g_return_if_fail (audio_queue != NULL); - - gst_bin_add_many (GST_BIN (audio_thread), - audio_queue, decode_audio, play, NULL); - - /* set up pad links */ - gst_element_add_ghost_pad (audio_thread, - gst_element_get_pad (audio_queue, "sink"), - "sink"); - gst_element_link (audio_queue, decode_audio); - gst_element_link (decode_audio, play); - - gst_bin_add (GST_BIN (pipeline), audio_thread); - - gst_pad_link (pad, gst_element_get_pad (audio_thread, "sink")); - - /* set up thread state and kick things off */ - g_print ("setting to READY state\n"); - gst_element_set_state (GST_ELEMENT (audio_thread), GST_STATE_READY); - - } - else if (strncmp (gst_pad_get_name (pad), "video_", 6) == 0) { - - /* construct internal pipeline elements */ - decode_video = gst_element_factory_make ("mpeg2dec", "decode_video"); - g_return_if_fail (decode_video != NULL); - - color = gst_element_factory_make ("colorspace", "color"); - g_return_if_fail (color != NULL); - - - show = gst_element_factory_make ("xvideosink", "show"); - g_return_if_fail (show != NULL); - - /* construct queue and link everything in the main pipeline */ - video_queue = gst_element_factory_make ("queue", "video_queue"); - g_return_if_fail (video_queue != NULL); - - /* create the thread and pack stuff into it */ - video_thread = gst_thread_new ("video_thread"); - g_return_if_fail (video_thread != NULL); - gst_bin_add_many (GST_BIN (video_thread), video_queue, - decode_video, color, show, NULL); - - /* set up pad links */ - gst_element_add_ghost_pad (video_thread, - gst_element_get_pad (video_queue, "sink"), - "sink"); - gst_element_link (video_queue, decode_video); - gst_element_link_many (decode_video, color, show, NULL); - - gst_bin_add (GST_BIN (pipeline), video_thread); - - gst_pad_link (pad, gst_element_get_pad (video_thread, "sink")); - - /* set up thread state and kick things off */ - g_print ("setting to READY state\n"); - gst_element_set_state (GST_ELEMENT (video_thread), GST_STATE_READY); - } - gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); -} - -int -main (int argc, char *argv[]) -{ - GstElement *pipeline, *src, *demux; - - gst_init (&argc, &argv); - - pipeline = gst_pipeline_new ("pipeline"); - g_return_val_if_fail (pipeline != NULL, -1); - - src = gst_element_factory_make ("filesrc", "src"); - g_return_val_if_fail (src != NULL, -1); - if (argc < 2) - g_error ("Please specify a video file to play !"); - - g_object_set (G_OBJECT (src), "location", argv[1], NULL); - - demux = gst_element_factory_make ("mpegdemux", "demux"); - g_return_val_if_fail (demux != NULL, -1); - - gst_bin_add_many (GST_BIN (pipeline), src, demux, NULL); - - g_signal_connect (G_OBJECT (demux), "new_pad", - G_CALLBACK (new_pad_created), pipeline); - - g_signal_connect (G_OBJECT (src), "eos", - G_CALLBACK (eof), NULL); - - gst_element_link (src, demux); - - gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); - - g_idle_add (idle_func, pipeline); - - gst_main (); - - return 0; -} -/* example-end dynamic.c */ - - - We create two elements: a file source and an MPEG demuxer. - There's nothing special about this piece of code except for - the signal 'new_pad' that we linked to the mpegdemux - element using: - - - g_signal_connect (G_OBJECT (demux), "new_pad", - G_CALLBACK (new_pad_created), pipeline); - - - When an elementary stream has been detected in the system stream, - mpegdemux will create a new pad that will provide the data of the - elementary stream. A function 'new_pad_created' will be called when - the pad is created. - - - In the above example, we created new elements based on the name of - the newly created pad. We then added them to a new thread. - There are other possibilities to check the type of the pad, for - example by using the MIME type and the properties of the pad. - - - - - - Type Detection - - Sometimes the capabilities of a pad are not specificied. The filesrc - element, for example, does not know what type of file it is reading. Before - you can attach an element to the pad of the filesrc, you need to determine - the media type in order to be able to choose a compatible element. - - - To solve this problem, a plugin can provide the GStreamer - core library with a type definition. The type definition - will contain the following information: - - - - The MIME type we are going to define. - - - - - An optional string with a list of possible file extensions this - type usually is associated with. the list entries are separated with - a space. eg, ".mp3 .mpa .mpg". - - - - - An optional typefind function. - - - - - - The typefind functions give a meaning to the MIME types that are used - in GStreamer. The typefind function is a function with the following definition: - - -typedef GstCaps *(*GstTypeFindFunc) (GstBuffer *buf, gpointer priv); - - - This typefind function will inspect a GstBuffer with data and will output - a GstCaps structure describing the type. If the typefind function does not - understand the buffer contents, it will return NULL. - - - GStreamer has a typefind element in the set - of core elements - that can be used to determine the type of a given pad. - - - The next example will show how a typefind element can be inserted into a pipeline - to detect the media type of a file. It will output the capabilities of the pad into - an XML representation. - - -#include <gst/gst.h> - -void type_found (GstElement *typefind, GstCaps* caps); - -int -main(int argc, char *argv[]) -{ - GstElement *bin, *filesrc, *typefind; - - gst_init (&argc, &argv); - - if (argc != 2) { - g_print ("usage: %s <filename>\n", argv[0]); - exit (-1); - } - - /* create a new bin to hold the elements */ - bin = gst_bin_new ("bin"); - g_assert (bin != NULL); - - /* create a disk reader */ - filesrc = gst_element_factory_make ("filesrc", "disk_source"); - g_assert (filesrc != NULL); - g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL); - - /* create the typefind element */ - typefind = gst_element_factory_make ("typefind", "typefind"); - g_assert (typefind != NULL); - - /* add objects to the main pipeline */ - gst_bin_add_many (GST_BIN (bin), filesrc, typefind, NULL); - - g_signal_connect (G_OBJECT (typefind), "have_type", - G_CALLBACK (type_found), NULL); - - gst_element_link (filesrc, typefind); - - /* start playing */ - gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PLAYING); - - gst_bin_iterate (GST_BIN (bin)); - - gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL); - - exit (0); -} - - - We create a very simple pipeline with only a filesrc and the typefind - element in it. The sinkpad of the typefind element has been linked - to the source pad of the filesrc. - - - We attached a signal 'have_type' to the typefind element which will be called - when the type of the media stream as been detected. - - - The typefind function will loop over all the registered types and will - execute each of the typefind functions. As soon as a function returns - a GstCaps pointer, the type_found function will be called: - - - -void -type_found (GstElement *typefind, GstCaps* caps) -{ - xmlDocPtr doc; - xmlNodePtr parent; - - doc = xmlNewDoc ("1.0"); - doc->root = xmlNewDocNode (doc, NULL, "Capabilities", NULL); - - parent = xmlNewChild (doc->root, NULL, "Caps1", NULL); - gst_caps_save_thyself (caps, parent); - - xmlDocDump (stdout, doc); -} - - - In the type_found function we can print or inspect the type that has been - detected using the GstCaps APIs. In this example, we just print out the - XML representation of the caps structure to stdout. - - - A more useful option would be to use the registry to look up an element - that can handle this particular caps structure, or we can also use the - autoplugger to link this caps structure to, for example, a videosink. - - - - - - Another approach to autoplugging - - The autoplug API is interesting, but often impractical. It is static; - it cannot deal with dynamic pipelines. An element that will - automatically figure out and decode the type is more useful. - Enter the spider. - - - The spider element - - The spider element is a generalized autoplugging element. At this point (April 2002), it's - the best we've got; it can be inserted anywhere within a pipeline to perform caps - conversion, if possible. Consider the following gst-launch line: - - $ gst-launch filesrc location=my.mp3 ! spider ! osssink - - The spider will detect the type of the stream, autoplug it to the osssink's caps, and play - the pipeline. It's neat. - - - - Spider features - - - - - Automatically typefinds the incoming stream. - - - - - Has request pads on the source side. This means that it can - autoplug one source stream into many sink streams. For example, - an MPEG1 system stream can have audio as well as video; that - pipeline would be represented in gst-launch syntax as - - - $ gst-launch filesrc location=my.mpeg1 ! spider ! { queue ! osssink } spider.src_%d! - { queue ! xvideosink } - - - - - - - - diff --git a/docs/manual/advanced-clocks.xml b/docs/manual/advanced-clocks.xml index 4781155314..68b3720203 100644 --- a/docs/manual/advanced-clocks.xml +++ b/docs/manual/advanced-clocks.xml @@ -1,7 +1,5 @@ Clocks in GStreamer - - WRITEME diff --git a/docs/manual/advanced-interfaces.xml b/docs/manual/advanced-interfaces.xml deleted file mode 100644 index f107cec7ad..0000000000 --- a/docs/manual/advanced-interfaces.xml +++ /dev/null @@ -1,112 +0,0 @@ - - Interfaces - - - In , you have learned how - to use GObject properties as a simple way to do - interaction between applications and elements. This method suffices for - the simple'n'straight settings, but fails for anything more complicated - than a getter and setter. For the more complicated use cases, &GStreamer; - uses interfaces based on the Glib GInterface type. - - - - Most of the interfaces handled here will not contain any example code. - See the API references for details. Here, we will just describe the - scope and purpose of each interface. - - - - The Mixer interface - - - The mixer interface provides a uniform way to control the volume on a - hardware (or software) mixer. The interface is primarily intended to - be implemented by elements for audio inputs and outputs that talk - directly to the hardware (e.g. OSS or ALSA plugins). - - - Using this interface, it is possible to control a list of tracks - (such as Line-in, Microphone, etc.) from a mixer element. They can - be muted, their volume can be changed and, for input tracks, their - record flag can be set as well. - - - Example plugins implementing this interface include the OSS elements - (osssrc, osssink, ossmixer) and the ALSA plugins (alsasrc, alsasink - and alsamixer). - - - - - The Tuner interface - - - The tuner interface is a uniform way to control inputs and outputs - on a multi-input selection device. This is primarily used for input - selection on elements for TV- and capture-cards. - - - Using this interface, it is possible to select one track from a list - of tracks supported by that tuner-element. The tuner will than select - that track for media-processing internally. This can, for example, be - used to switch inputs on a TV-card (e.g. from Composite to S-video). - - - This interface is currently only implemented by the Video4linux and - Video4linux2 elements. - - - - - The Color Balance interface - - - The colorbalance interface is a way to control video-related properties - on an element, such as brightness, contrast and so on. It's sole - reason for existance is that, as far as its authors know, there's no - way to dynamically register properties using - GObject. - - - The colorbalance interface is implemented by several plugins, including - xvimagesink and the Video4linux and Video4linux2 elements. - - - - - The Property Probe interface - - - The property probe is a way to autodetect allowed values for a - GObject property. It's primary use (and - the only thing that we currently use it for) is to autodetect - devices in several elements. For example, the OSS elements use - this interface to detect all OSS devices on a system. Applications - can then probe this property and get a list of - detected devices. Given the overlap between HAL and the practical - implementations of this interface, this might in time be deprecated - in favour of HAL. - - - This interface is currently implemented by many elements, including - the ALSA, OSS, Video4linux and Video4linux2 elements. - - - - - The X Overlay interface - - - The X Overlay interface was created to solve the problem of embedding - video streams in an application window. The application provides an - X-window to the element implementing this interface to draw on, and - the element will then use this X-window to draw on rather than creating - a new toplevel window. This is useful to embed video in video players. - - - This interface is implemented by, amongst others, the Video4linux and - Video4linux2 elements and by ximagesink, xvimagesink and sdlvideosink. - - - diff --git a/docs/manual/advanced-metadata.xml b/docs/manual/advanced-metadata.xml deleted file mode 100644 index 7855d30d18..0000000000 --- a/docs/manual/advanced-metadata.xml +++ /dev/null @@ -1,54 +0,0 @@ - - Metadata - - - &GStreamer; makes a clear distinction between two types of metadata, and - has support for both types. The first is stream tags, which describe the - content of a stream in a non-technical way. Examples include the author - of a song, the title of that very same song or the album it is a part of. - The other type of metadata is stream-info, which is a somewhat technical - description of the properties of a stream. This can include video size, - audio samplerate, codecs used and so on. Tags are handled using the - &GStreamer; tagging system. Stream-info can be retrieved from a - GstPad. - - - - Stream information - - - Stream information can most easily be read by reading them from a - GstPad. This has already been discussed before - in . Therefore, we will skip - it here. - - - - - Tag reading - - - Tag reading is remarkably simple in &GStreamer; Every element supports - the found-tag signal, which will be fired each the time - the element reads tags from the stream. A GstBin - will conveniently forward tags found by its childs. Therefore, in most - applications, you will only need to connect to the - found-tag signal on the top-most bin in your pipeline, - and you will automatically retrieve all tags from the stream. - - - Note, however, that the found-tag might be fired - multiple times and by multiple elements in the pipeline. It is the - application's responsibility to put all those tags together and - display them to the user in a nice, coherent way. - - - - - Tag writing - - - WRITEME - - - diff --git a/docs/manual/advanced-position.xml b/docs/manual/advanced-position.xml deleted file mode 100644 index 00cf063391..0000000000 --- a/docs/manual/advanced-position.xml +++ /dev/null @@ -1,117 +0,0 @@ - - Position tracking and seeking - - - So far, we've looked at how to create a pipeline to do media processing - and how to make it run ("iterate"). Most application developers will be - interested in providing feedback to the user on media progress. Media - players, for example, will want to show a slider showing the progress in - the song, and usually also a label indicating stream length. Transcoding - applications will want to show a progress bar on how much % of the task - is done. &GStreamer; has built-in support for doing all this using a - concept known as querying. Since seeking is very - similar, it will be discussed here as well. Seeking is done using the - concept of events. - - - - Querying: getting the position or length of a stream - - - Querying is defined as requesting a specific stream-property related - to progress tracking. This includes getting the length of a stream (if - available) or getting the current position. Those stream properties - can be retrieved in various formats such as time, audio samples, video - frames or bytes. The functions used are gst_element_query - () and gst_pad_query (). - - - - Obviously, using either of the above-mentioned functions requires the - application to know which element or pad to run - the query on. This is tricky, but there are some good sides to the - story. The good thing is that elements (or, rather, pads - since - gst_element_query () internally calls - gst_pad_query ()) forward (dispatch) - events and queries to peer pads (or elements) if they don't handle it - themselves. The bad side is that some elements (or pads) will handle - events, but not the specific formats that you want, and therefore it - still won't work. - - - - Most queries will, fortunately, work fine. Queries are always - dispatched backwards. This means, effectively, that it's easiest to - run the query on your video or audio output element, and it will take - care of dispatching the query to the element that knows the answer - (such as the current position or the media length; usually the demuxer - or decoder). - - - -#include <gst/gst.h> - -gint -main (gint argc, - gchar *argv[]) -{ - GstElement *sink, *pipeline; -[..] - - /* run pipeline */ - do { - gint64 len, pos; - GstFormat fmt = GST_FORMAT_TIME; - - if (gst_element_query (sink, GST_QUERY_POSITION, &fmt, &pos) && - gst_element_query (sink, GST_QUERY_TOTAL, &fmt, &len)) { - g_print ("Time: %" GST_FORMAT_TIME " / %" GST_FORMAT_TIME "\r", - GST_TIME_ARGS (pos), GST_TIME_ARGS (len)); - } - } while (gst_bin_iterate (GST_BIN (pipeline))); - -[..] -} - - - - If you are having problems with the dispatching behaviour, your best - bet is to manually decide which element to start running the query on. - You can get a list of supported formats and query-types with - gst_element_get_query_types () and - gst_element_get_formats (). - - - - - Events: seeking (and more) - - - Events work in a very similar way as queries. Dispatching, for - example, works exactly the same for events (and also has the same - limitations). Although there are more ways in which applications - and elements can interact using events, we will only focus on seeking - here. This is done using the seek-event. A seek-event contains a - seeking offset, a seek method (which indicates relative to what the - offset was given), a seek format (which is the unit of the offset, - e.g. time, audio samples, video frames or bytes) and optionally a - set of seeking-related flags (e.g. whether internal buffers should be - flushed). The behaviour of a seek is also wrapped in the function - gst_element_seek (). - - - -#include <gst/gst.h> - -static void -seek_to_time (GstElement *audiosink, - gint64 time_nanonseconds) -{ - gst_element_seek (audiosink, - GST_SEEK_METHOD_SET | GST_FORMAT_TIME | - GST_SEEK_FLAG_FLUSH, time_nanoseconds); -} - - - - diff --git a/docs/manual/advanced-threads.xml b/docs/manual/advanced-threads.xml index 18a6db7e95..05ea3cd8d1 100644 --- a/docs/manual/advanced-threads.xml +++ b/docs/manual/advanced-threads.xml @@ -48,7 +48,7 @@ fundamentally buffer-oriented rather than byte-oriented, the natural solution to this problem is an element that can "buffer" the buffers between the threads, in a thread-safe fashion. This element is the - queue, described more fully in . It doesn't + queue, described more fully in . It doesn't matter if the queue is placed in the containing bin or in the thread itself, but it needs to be present on one side or the other to enable inter-thread communication. @@ -165,132 +165,4 @@ main (int argc, char *argv[]) - - - Queue - - A queue is a filter element. - Queues can be used to link two elements in such way that the data can - be buffered. - - - A buffer that is sinked to a Queue will not automatically be pushed to the - next linked element but will be buffered. It will be pushed to the next - element as soon as a gst_pad_pull () is called on the queue's source pad. - - - Queues are mostly used in conjunction with a thread bin to - provide an external link for the thread's elements. You could have one - thread feeding buffers into a queue and another - thread repeatedly pulling on the queue to feed its - internal elements. - - - - Below is a figure of a two-threaded decoder. We have one thread (the main execution - thread) reading the data from a file, and another thread decoding the data. - -
- a two-threaded decoder with a queue - - - - - -
- - - The standard GStreamer queue implementation has some - properties that can be changed using the g_objet_set () method. To set the - maximum number of buffers that can be queued to 30, do: - - - g_object_set (G_OBJECT (queue), "max_level", 30, NULL); - - - - The following MP3 player shows you how to create the above pipeline - using a thread and a queue. - - - -/* example-begin queue.c */ -#include <stdlib.h> -#include <gst/gst.h> - -gboolean playing; - -/* eos will be called when the src element has an end of stream */ -void -eos (GstElement *element, gpointer data) -{ - g_print ("have eos, quitting\n"); - - playing = FALSE; -} - -int -main (int argc, char *argv[]) -{ - GstElement *filesrc, *audiosink, *queue, *decode; - GstElement *bin; - GstElement *thread; - - gst_init (&argc,&argv); - - if (argc != 2) { - g_print ("usage: %s <mp3 filename>\n", argv[0]); - exit (-1); - } - - /* create a new thread to hold the elements */ - thread = gst_thread_new ("thread"); - g_assert (thread != NULL); - - /* create a new bin to hold the elements */ - bin = gst_bin_new ("bin"); - g_assert (bin != NULL); - - /* create a disk reader */ - filesrc = gst_element_factory_make ("filesrc", "disk_source"); - g_assert (filesrc != NULL); - g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL); - g_signal_connect (G_OBJECT (filesrc), "eos", - G_CALLBACK (eos), thread); - - queue = gst_element_factory_make ("queue", "queue"); - g_assert (queue != NULL); - - /* and an audio sink */ - audiosink = gst_element_factory_make ("osssink", "play_audio"); - g_assert (audiosink != NULL); - - decode = gst_element_factory_make ("mad", "decode"); - - /* add objects to the main bin */ - gst_bin_add_many (GST_BIN (thread), decode, audiosink, NULL); - - gst_bin_add_many (GST_BIN (bin), filesrc, queue, thread, NULL); - - - gst_element_link (filesrc, queue); - gst_element_link_many (queue, decode, audiosink, NULL); - - /* start playing */ - gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PLAYING); - - playing = TRUE; - - while (playing) { - gst_bin_iterate (GST_BIN (bin)); - } - - gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL); - - return 0; -} -/* example-end queue.c */ - - -
diff --git a/docs/manual/appendix-programs.xml b/docs/manual/appendix-programs.xml index c87cdf30a0..a1b2e5f996 100644 --- a/docs/manual/appendix-programs.xml +++ b/docs/manual/appendix-programs.xml @@ -19,7 +19,7 @@ syntax. - A simple commandline looks like: + A simple commandline to play a mp3 audio file looks like: gst-launch filesrc location=hello.mp3 ! mad ! osssink @@ -33,6 +33,8 @@ gst-launch filesrc location=redpill.vob ! mpegdemux name=demux \ demux.video_00! { mpeg2dec ! xvideosink } + lists more gst-launch commandlines. + You can also use the parser in you own @@ -132,6 +134,10 @@ main (int argc, char *argv[]) the g_value_convert routines. No error message will be displayed on an invalid conversion, due to limitations in the value convert API. + + The list of properties an element supports can be found out using + gst-inspect elemnt-name. + Bins, Threads, and Pipelines @@ -141,12 +147,88 @@ main (int argc, char *argv[]) A pipeline description between parentheses is placed into a bin. The open paren may be preceded by a type name, as in jackbin.( ... ) to make - a bin of a specified type. Square brackets make pipelines, and curly braces make + a bin of a specified type. Square brackets '[ ]' make pipelines, and curly braces '{ }' make threads. The default toplevel bin type is a pipeline, although putting the whole description within parentheses or braces can override this default. + + More Examples + + This chapter collects some more complex pipelines. The examples are split into several lines, + so make sure to include the trailing backslashes. + When modifying the pipelines and seeking for the right element to insert, a grep of the gst-inspect + output often gives a starting point: + +gst-inspect | grep "avi" + + Another way is to do: + +gst-launch filesrc location=video.avi ! decodebin name=d ! xvimagesink d. ! { queue ! alsasink } -v + + and look on the output, which plugins it chooses. + + + Play a remote mp3 audio file: + +gst-launch gnomevfssrc location=http://www.server.org/hello.mp3 ! mad ! alsasink + + + + Play a local mp3 audio file with visualisation: + +gst-launch filesrc location=Hello.mp3 ! mad ! tee name=t ! \ + { queue ! osssink } \ + { t. ! queue ! synaesthesia ! ffmpegcolorspace ! xvimagesink } + + + + Play a local ogg audio file: + +gst-launch filesrc location=file.ogg ! oggdemux ! vorbisdec ! audioconvert ! audioscale ! alsasink + + + + Play a local ogg video file: + +gst-launch filesrc location=file.ogg ! oggdemux name=demux \ + { demux. ! queue ! theoradec ! ffmpegcolorspace ! videoscale ! xvimagesink } \ + { demux. ! queue ! vorbisdec ! audioconvert ! audioscale ! alsasink } + + + + Play a local avi video file: + +gst-launch filesrc location=video.avi ! mpegdemux name=demux \ + demux.audio_00! { queue ! ac3parse ! a52dec ! osssink } \ + demux.video_00! { queue ! mpeg2dec ! xvideosink } + + + + Transcoding an audio file from one format into another: + +gst-launch filesrc location=file.ogg ! oggdemux ! vorbisdec ! audioconvert ! flacenc ! filesink location=file.flac + + +gst-launch filesrc location=file.mp3 ! id3demus ! mad ! audioconvert ! rawvorbisenc ! oggmux ! filesink location=file.ogg + + + + Transcoding an dvd video into a ogg video: + +gst-launch-0.8 oggmux name=mux ! filesink location=/tmp/file.ogg \ + { dvdreadsrc location=/dev/cdrom ! dvddemux name=demux.audio_00 ! \ + { queue ! a52dec ! audioconvert ! rawvorbisenc ! queue ! mux. } \ + { demux.video_00 ! queue ! mpeg2dec ! ffcolorspace ! videoscale ! video/x-raw-yuv,width=384,height=288 ! tee name=t ! \ + { queue ! theoraenc ! queue ! mux. } \ + } \ + } \ + { t. ! queue ! ffcolorspace ! ximagesink } + + + + diff --git a/docs/manual/appendix-quotes.xml b/docs/manual/appendix-quotes.xml index 710135c734..e2704282e6 100644 --- a/docs/manual/appendix-quotes.xml +++ b/docs/manual/appendix-quotes.xml @@ -12,6 +12,18 @@ + + + 2 Nov 2004 + + + zaheerm: +wtay: unfair u fixed the bug i was using as a feature! + + + + + 14 Oct 2004 diff --git a/docs/manual/basics-bins.xml b/docs/manual/basics-bins.xml index 6c8ab9cbf3..c1d6b3bbd2 100644 --- a/docs/manual/basics-bins.xml +++ b/docs/manual/basics-bins.xml @@ -1,147 +1,49 @@ Bins - A bin is a container element. You can add elements to a bin. Since a - bin is an element itself, a bin can be handled in the same way as any - other element. Therefore, the whole previous chapter () applies to bins as well. + A bin is a container element. You can add elements to a bin. Since a bin is + an element itself, it can also be added to another bin. + + + Bins allow you to combine a group of linked elements into one logical element. You do + not deal with the individual elements anymore but with just one element, the bin. + We will see that this is extremely powerful when you are going to construct + complex pipelines since it allows you to break up the pipeline in smaller chunks. + + + The bin will also manage the elements contained in it. It will figure out how + the data will flow in the bin and generate an optimal plan for that data flow. Plan + generation is one of the most complicated procedures in GStreamer. - - What are bins - - Bins allow you to combine a group of linked elements into one - logical element. You do not deal with the individual elements - anymore but with just one element, the bin. We will see that - this is extremely powerful when you are going to construct - complex pipelines since it allows you to break up the pipeline - in smaller chunks. - - - The bin will also manage the elements contained in it. It will - figure out how the data will flow in the bin and generate an - optimal plan for that data flow. Plan generation is one of the - most complicated procedures in &GStreamer;. You will learn more - about this process, called scheduling, in . - +
+ Visualisation of a bin with some elements in it + + + + + +
-
- Visualisation of a bin with some elements in it - - - - - -
+ + There are two specialized bins available to the GStreamer programmer: - - There are two specialized types of bins available to the - &GStreamer; programmer: - - A pipeline: a generic container that allows scheduling of the + a pipeline: a generic container that allows scheduling of the containing elements. The toplevel bin has to be a pipeline. - Every application thus needs at least one of these. Applications - can iterate pipelines using gst_bin_iterate - () to make it process data while in the playing state. + Every application thus needs at least one of these. - A thread: a bin that will be run in a separate execution thread. + a thread: a bin that will be run in a separate execution thread. You will have to use this bin if you have to carefully synchronize audio and video, or for buffering. You will learn more about threads in . -
- - - Creating a bin - - Bins are created in the same way that other elements are created, - i.e. using an element factory. There are also convenience functions - available (gst_bin_new (), - gst_thread_new () and gst_pipeline_new - ()). To add elements to a bin or remove elements from a - bin, you can use gst_bin_add () and - gst_bin_remove (). Note that the bin that you - add an element to will take ownership of that element. If you - destroy the bin, the element will be dereferenced with it. If you - remove an element from a bin, it will be dereferenced automatically. - - -int -main (int argc, - char *argv[]) -{ - GstElement *bin, *pipeline, *source, *sink; - - /* init */ - gst_init (&argc, &argv); - - /* create */ - pipeline = gst_pipeline_new ("my_pipeline"); - bin = gst_pipeline_new ("my_bin"); - source = gst_element_factory_make ("fakesrc", "source"); - sink = gst_element_factory_make ("fakesink", "sink"); - - /* set up pipeline */ - gst_bin_add_many (GST_BIN (bin), source, sink, NULL); - gst_bin_add (GST_BIN (pipeline), bin); - gst_element_link (source, sink); - -[..] -} - - - There are various functions to lookup elements in a bin. You can - also get a list of all elements that a bin contains using the function - gst_bin_get_list (). See the API references of - GstBin - for details. - - - - - Custom bins - - The application programmer can create custom bins packed with elements - to perform a specific task. This allows you, for example, to write - an Ogg/Vorbis decoder with just the following lines of code: - - -int -main (int argc - char *argv[]) -{ - GstElement *player; - - /* init */ - gst_init (&argc, &argv); - - /* create player */ - player = gst_element_factory_make ("oggvorbisplayer", "player"); - - /* set the source audio file */ - g_object_set (G_OBJECT (player), "location", "helloworld.ogg", NULL); - - /* start playback */ - gst_element_set_state (GST_ELEMENT (mp3player), GST_STATE_PLAYING); -[..] -} - - - Custom bins can be created with a plugin or an XML description. You - will find more information about creating custom bin in the Plugin - Writers Guide. - - +
diff --git a/docs/manual/basics-data.xml b/docs/manual/basics-data.xml index c6511104a0..2fef01a8c0 100644 --- a/docs/manual/basics-data.xml +++ b/docs/manual/basics-data.xml @@ -1,40 +1,29 @@ - - Buffers and Events - - The data flowing through a pipeline consists of a combination of - buffers and events. Buffers contain the actual pipeline data. Events - contain control information, such as seeking information and - end-of-stream notifiers. All this will flow through the pipeline - automatically when it's running. This chapter is mostly meant to - explain the concept to you; you don't need to do anything for this. + + Buffers + + Buffers contain the data that will flow through the pipeline you have + created. A source element will typically create a new buffer and pass + it through a pad to the next element in the chain. When using the + GStreamer infrastructure to create a media pipeline you will not have + to deal with buffers yourself; the elements will do that for you. + + A buffer consists of: - - Buffers - - Buffers contain the data that will flow through the pipeline you have - created. A source element will typically create a new buffer and pass - it through a pad to the next element in the chain. When using the - GStreamer infrastructure to create a media pipeline you will not have - to deal with buffers yourself; the elements will do that for you. - - - A buffer consists, amongst others, of: - - A pointer to a piece of memory. + a pointer to a piece of memory. - The size of the memory. + the size of the memory. - A timestamp for the buffer. + a timestamp for the buffer. @@ -45,55 +34,33 @@
- - The simple case is that a buffer is created, memory allocated, data - put in it, and passed to the next element. That element reads the - data, does something (like creating a new buffer and decoding into - it), and unreferences the buffer. This causes the data to be free'ed - and the buffer to be destroyed. A typical video or audio decoder - works like this. - - - There are more complex scenarios, though. Elements can modify buffers - in-place, i.e. without allocating a new one. Elements can also write - to hardware memory (such as from video-capture sources) or memory - allocated from the X-server using XShm). Buffers can be read-only, - and so on. - -
+ - - Events - - Events are control particles that are sent both up- and downstream in - a pipeline along with buffers. Downstream events notify fellow elements - of stream states. Possible events include discontinuities, flushes, - end-of-stream notifications and so on. Upstream events are used both - in application-element interaction as well as event-event interaction - to request changes in stream state, such as seeks. For applications, - only upstream events are important. Downstream events are just - explained to get a more complete picture of the data concept. - - - Since most applications seek in time units, our example below does so - too: - - -static void -seek_to_time (GstElement *element, - guint64 time_ns) -{ - GstEvent *event; + + + GStreamer provides functions to create custom buffer create/destroy algorithms, called + a GstBufferPool. This makes it possible to efficiently + allocate and destroy buffer memory. It also makes it possible to exchange memory between + elements by passing the GstBufferPool. A video element can, + for example, create a custom buffer allocation algorithm that creates buffers with XSHM + as the buffer memory. An element can use this algorithm to create and fill the buffer + with data. + + + + The simple case is that a buffer is created, memory allocated, data put + in it, and passed to the next element. That element reads the data, does + something (like creating a new buffer and decoding into it), and + unreferences the buffer. This causes the data to be freed and the buffer + to be destroyed. A typical MPEG audio decoder works like this. + + + + A more complex case is when the filter modifies the data in place. It + does so and simply passes on the buffer to the next element. This is just + as easy to deal with. An element that works in place has to be careful when + the buffer is used in more than one element; a copy on write has to made in this + situation. + - event = gst_event_new_seek (GST_SEEK_METHOD_SET | - GST_FORMAT_TIME, - time_ns); - gst_element_send_event (element, event); -} - - - The function gst_element_seek () is a shortcut - for this. This is mostly just to show how it all works. - - diff --git a/docs/manual/basics-elements.xml b/docs/manual/basics-elements.xml index 1a9490df46..0ce686c79a 100644 --- a/docs/manual/basics-elements.xml +++ b/docs/manual/basics-elements.xml @@ -1,82 +1,85 @@ - + Elements - The most important object in &GStreamer; for the application programmer - is the GstElement - object. An element is the basic building block for a media pipeline. All - the different high-level components you will use are derived from - GstElement. Every decoder, encoder, demuxer, video - or audio output is in fact a GstElement + The most important object in GStreamer for the + application programmer is the GstElement + object. - - What are elements? - - For the application programmer, elements are best visualized as black - boxes. On the one end, you might put something in, the element does - something with it and something else comes out at the other side. For - a decoder element, ifor example, you'd put in encoded data, and the - element would output decoded data. In the next chapter (see ), you will learn more about data input and - output in elements, and how you can set that up in your application. + + What is an element ? + + An element is the basic building block for the media pipeline. + All the different high-level components you are going to use are + derived from + GstElement. This means that a + lot of functions you are going to use operate on objects of this class. + + Elements, from the perspective of GStreamer, are viewed as "black boxes" + with a number of different aspects. One of these aspects is the presence + of "pads" (see ), or link points. + This terminology arises from soldering; pads are where wires can be + attached. + + + + Types of elements + Source elements - Source elements generate data for use by a pipeline, for example - reading from disk or from a sound card. shows how we will visualise - a source element. We always draw a source pad to the right of - the element. + Source elements generate data for use by a pipeline, for example + reading from disk or from a sound card. + + + shows how we will visualise + a source element. + We always draw a source pad to the right of the element.
Visualisation of a source element - +
Source elements do not accept data, they only generate data. You can - see this in the figure because it only has a source pad (on the - right). A source pad can only generate data. + see this in the figure because it only has a source pad. A source + pad can only generate data.
- Filters, convertors, demuxers, muxers and codecs + Filters and codecs - Filters and filter-like elements have both input and outputs pads. - They operate on data that they receive on their input (sink) pads, - and will provide data on their output (source) pads. Examples of - such elements are a volume element (filter), a video scaler - (convertor), an Ogg demuxer or a Vorbis decoder. + Filter elements have both input and output pads. They operate on + data they receive in their sink pads and produce data on their source + pads. For example, MPEG decoders and volume filters would fall into + this category. - Filter-like elements can have any number of source or sink pads. A - video demuxer, for example, would have one sink pad and several - (1-N) source pads, one for each elementary stream contained in the - container format. Decoders, on the other hand, will only have one - source and sink pads. + Elements are not constrained as to the number of pads they might have; + for example, a video mixer might have two input pads (the images of + the two different video streams) and one output pad.
Visualisation of a filter element - +
- shows how we will - visualise a filter-like element. This specific element has one source - and one sink element. Sink pads, receiving input data, are depicted - at the left of the element; source pads are still on the right. + shows how we will visualise + a filter element. + This element has one sink (input) pad and one source (output) pad. + Sink pads are drawn on the left of the element.
Visualisation of a filter element with @@ -84,19 +87,17 @@ <mediaobject> <imageobject> <imagedata fileref="images/filter-element-multi.ℑ" - format="&IMAGE;" /> + format="&IMAGE;" /> </imageobject> </mediaobject> </figure> <para> - <xref linkend="section-element-multifilterimg"/> shows another - filter-like element, this one having more than one output (source) - pad. An example of one such element could, for example, be an Ogg - demuxer for an Ogg stream containing both audio and video. One - source pad will contain the elementary video stream, another will - contain the elementary audio stream. Demuxers will generally fire - signals when a new pad is created. The application programmer can - then handle the new elementary stream in the signal handler. + <xref linkend="section-element-multifilterimg"/> shows the visualisation of a filter element with + more than one output pad. An example of such a filter is the AVI + demultiplexer. This element will parse the input data and + extract the audio and video data. Most of these filters dynamically + send out a signal when a new pad is created so that the application + programmer can link an arbitrary element to the newly created pad. </para> </sect2> @@ -112,400 +113,10 @@ <title>Visualisation of a sink element - +
- - - Creating a <classname>GstElement</classname> - - The simplest way to create an element is to use gst_element_factory_make - (). This function takes a factory name and an - element name for the newly created element. The name of the element - is something you can use later on to look up the element in a bin, - for example. The name will also be used in debug output. You can - pass NULL as the name argument to get a unique, - default name. - - - When you don't need the element anymore, you need to unref it using - gst_object_unref - (). This decreases the reference count for the - element by 1. An element has a refcount of 1 when it gets created. - An element gets destroyed completely when the refcount is decreased - to 0. - - - The following example &EXAFOOT; shows how to create an element named - source from the element factory named - fakesrc. It checks if the creation succeeded. - After checking, it unrefs the element. - - - -int -main (int argc, - char *argv[]) -{ - GstElement *element; - - /* init GStreamer */ - gst_init (&argc, &argv); - - /* create element */ - element = gst_element_factory_make ("fakesrc", "source"); - if (!element) { - g_print ("Failed to create element of type 'fakesrc'\n"); - return -1; - } - - gst_object_unref (GST_OBJECT (element)); - - return 0; -} - ]]> - - gst_element_factory_make is actually a shorthand - for a combination of two functions. A GstElement - object is created from a factory. To create the element, you have to - get access to a GstElementFactory - object using a unique factory name. This is done with gst_element_factory_find - (). - - - The following code fragment is used to get a factory that can be used - to create the fakesrc element, a fake data source. - The function gst_element_factory_create - () will use the element factory to create an - element with the given name. - - - - - - Using an element as a <classname>GObject</classname> - - A GstElement - can have several properties which are implemented using standard - GObject properties. The usual - GObject methods to query, set and get - property values and GParamSpecs are - therefore supported. - - - Every GstElement inherits at least one - property from its parent GstObject: the - "name" property. This is the name you provide to the functions - gst_element_factory_make () or - gst_element_factory_create (). You can get - and set this property using the functions - gst_object_set_name and - gst_object_get_name or use the - GObject property mechanism as shown below. - - - -int -main (int argc, - char *argv[]) -{ - GstElement *element; - const gchar *name; - - /* init GStreamer */ - gst_init (&argc, &argv); - - /* create element */ - element = gst_element_factory_make ("fakesrc", "source"); - - /* get name */ - g_object_get (G_OBJECT (element), "name", &name, NULL); - g_print ("The name of the element is '%s'.\n", name); - - gst_object_unref (GST_OBJECT (element)); - - return 0; -} - ]]> - - Most plugins provide additional properties to provide more information - about their configuration or to configure the element. - gst-inspect is a useful tool to query the properties - of a particular element, it will also use property introspection to give - a short explanation about the function of the property and about the - parameter types and ranges it supports. See the appendix for details - about gst-inspect. - - - For more information about GObject - properties we recommend you read the GObject manual and an introduction to The - Glib Object system. - - - A - GstElement also provides various - GObject signals that can be used as a flexible - callback mechanism. Here, too, you can use gst-inspect - to see which signals a specific elements supports. Together, signals - and properties are the most basic way in which elements and - applications interact. - - - - - More about element factories - - In the previous section, we briefly introduced the GstElementFactory - object already as a way to create instances of an element. Element - factories, however, are much more than just that. Element factories - are the basic types retrieved from the &GStreamer; registry, they - describe all plugins and elements that &GStreamer; can create. This - means that element factories are useful for automated element - instancing, such as what autopluggers do, and for creating lists - of available elements, such as what pipeline editing applications - (e.g. &GStreamer; - Editor) do. - - - - Getting information about an element using a factory - - Tools like gst-inspect will provide some generic - information about an element, such as the person that wrote the - plugin, a descriptive name (and a shortname), a rank and a category. - The category can be used to get the type of the element that can - be created using this element factory. Examples of categories include - Codec/Decoder/Video (video decoder), - Codec/Encoder/Video (video encoder), - Source/Video (a video generator), - Sink/Video (a video output), and all these - exist for audio as well, of course. Then, there's also - Codec/Demuxer and - Codec/Muxer and a whole lot more. - gst-inspect will give a list of all factories, and - gst-inspect <factory-name> will list all - of the above information, and a lot more. - - - -int -main (int argc, - char *argv[]) -{ - GstElementFactory *factory; - - /* init GStreamer */ - gst_init (&argc, &argv); - - /* get factory */ - factory = gst_element_factory_find ("sinesrc"); - if (!factory) { - g_print ("You don't have the 'sinesrc' element installed, go get it!\n"); - return -1; - } - - /* display information */ - g_print ("The '%s' element is a member of the category %s.\n" - "Description: %s\n", - gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)), - gst_element_factory_get_klass (factory), - gst_element_factory_get_description (factory)); - - return 0; -} - ]]> - - You can use gst_registry_pool_feature_list (GST_TYPE_ELEMENT_FACTORY) - to get a list of all the element factories that &GStreamer; knows - about. - - - - - Finding out what pads an element can contain - - Perhaps the most powerful feature of element factories is that - they contain a full description of the pads that the element - can generate, and the capabilities of those pads (in layman words: - what types of media can stream over those pads), without actually - having to load those plugins into memory. This can be used - to provide a codec selection list for encoders, or it can be used - for autoplugging purposes for media players. All current - &GStreamer;-based media players and autopluggers work this way. - We'll look closer at these features as we learn about - GstPad and GstCaps - in the next chapter: - - - - - - Linking elements - - By linking a source element with zero or more filter-like - elements and finally a sink element, you set up a media - pipeline. Data will flow through the elements. This is the - basic concept of media handling in &GStreamer;. - - - - By linking these three elements, we have created a very simple - chain of elements. The effect of this will be that the output of - the source element (element1) will be used as input - for the filter-like element (element2). The - filter-like element will do something with the data and send the - result to the final sink element (element3). - - - Imagine the above graph as a simple Ogg/Vorbis audio decoder. The - source is a disk source which reads the file from disc. The second - element is a Ogg/Vorbis audio decoder. The sink element is your - soundcard, playing back the decoded audio data. We will use this - simple graph to construct an Ogg/Vorbis player later in this manual. - - - In code, the above graph is written like this: - - -int -main (int argc, - char *argv[]) -{ - GstElement *source, *filter, *sink; - - /* init */ - gst_init (&argc, &argv); - - /* create elements */ - source = gst_element_factory_make ("fakesrc", "source"); - filter = gst_element_factory_make ("identity", "filter"); - sink = gst_element_factory_make ("fakesink", "sink"); - - /* link */ - gst_element_link_many (source, filter, sink, NULL); - -[..] -} - - - For more specific behaviour, there are also the functions - gst_element_link () and - gst_element_link_pads (). You can also obtain - references to individual pads and link those using various - gst_pad_link_* () functions. See the API - references for more details. - - - - - Element States - - After being created, an element will not actually perform any actions - yet. You need to change elements state to make it do something. - &GStreamer; knows four element states, each with a very specific - meaning. Those four states are: - - - - - GST_STATE_NULL: this is the default state. - This state will deallocate all resources held by the element. - - - - - GST_STATE_READY: in the ready state, an - element has allocated all of its global resources, that is, - resources that can be kept within streams. You can think about - opening devices, allocating buffers and so on. However, the - stream is not opened in this state, so the stream positions is - automatically zero. If a stream was previously opened, it should - be closed in this state, and position, properties and such should - be reset. - - - - - GST_STATE_PAUSED: in this state, an - element has opened the stream, but is not actively processing - it. An element should not modify the stream's position, data or - anything else in this state. When set back to PLAYING, it should - continue processing at the point where it left off as soon as - possible. - - - - - GST_STATE_PLAYING: in the PLAYING state, - an element does exactly the same as in the PAUSED state, except - that it actually processes data. - - - - - You can change the state of an element using the function - gst_element_set_state (). If you set an element - to another state, &GStreamer; will internally traverse all intermediate - states. So if you set an element from NULL to PLAYING, &GStreamer; - will internally set the element to READY and PAUSED in between. - - - Even though an element in GST_STATE_PLAYING - is ready for data processing, it will not necessarily do that. If - the element is placed in a thread (see ), it will process data automatically. - In other cases, however, you will need to iterate - the element's container. - -
diff --git a/docs/manual/basics-helloworld.xml b/docs/manual/basics-helloworld.xml index 6ac6961aef..68611b4fcf 100644 --- a/docs/manual/basics-helloworld.xml +++ b/docs/manual/basics-helloworld.xml @@ -1,141 +1,179 @@ - + Your first application - - This chapter will summarize everything you've learned in the previous - chapters. It describes all aspects of a simple &GStreamer; application, - including initializing libraries, creating elements, packing elements - together in a pipeline and playing this pipeline. By doing all this, - you will be able to build a simple Ogg/Vorbis audio player. + + This chapter describes the most rudimentary aspects of a + GStreamer application, including initializing + the libraries, creating elements, packing them into a pipeline and playing, + pausing and stopping the pipeline. - + Hello world - We're going to create a simple first application, a simple Ogg/Vorbis - command-line audio player. For this, we will use only standard - &GStreamer; components. The player will read a file specified on - the command-line. Let's get started! - - - We've learned, in , that the first thing - to do in your application is to initialize &GStreamer; by calling - gst_init (). Also, make sure that the application - includes gst/gst.h so all function names and - objects are properly defined. Use #include - <gst/gst.h> to do that. - - - Next, you'll want to create the different elements using - gst_element_factory_make (). For an Ogg/Vorbis - audio player, we'll need a source element that reads files from a - disk. &GStreamer; includes this element under the name - filesrc. Next, we'll need something to parse the - file and decoder it into raw audio. &GStreamer; has two elements - for this: the first parses Ogg streams into elementary streams (video, - audio) and is called oggdemux. The second is a Vorbis - audio decoder, it's conveniently called vorbisdec. - Since oggdemux creates dynamic pads for each elementary - stream, you'll need to set a new-pad event handler - on the oggdemux element, like you've learned in - , to link the Ogg parser and - the Vorbis decoder elements together. At last, we'll also need an - audio output element, we will use alsasink, which - outputs sound to an ALSA audio device. - - - The last thing left to do is to add all elements into a container - element, a GstPipeline, and iterate this - pipeline until we've played the whole song. We've previously - learned how to add elements to a container bin in , and we've learned about element states - in . We will use the function - gst_bin_sync_children_state () to synchronize - the state of a bin on all of its contained children. - - - Let's now add all the code together to get our very first audio - player: + We will create a simple first application, a complete MP3 player, using + standard GStreamer components. The player + will read from a file that is given as the first argument to the program. + +/* example-begin helloworld.c */ #include <gst/gst.h> -/* - * Global objects are usually a bad thing. For the purpose of this - * example, we will use them, however. - */ - -GstElement *pipeline, *source, *parser, *decoder, *sink; - -static void -new_pad (GstElement *element, - GstPad *pad, - gpointer data) +int +main (int argc, char *argv[]) { - /* We can now link this pad with the audio decoder and - * add both decoder and audio output to the pipeline. */ - gst_pad_link (pad, gst_element_get_pad (decoder, "sink")); - gst_bin_add_many (GST_BIN (pipeline), decoder, sink, NULL); + GstElement *pipeline, *filesrc, *decoder, *audiosink; - /* This function synchronizes a bins state on all of its - * contained children. */ - gst_bin_sync_children_state (GST_BIN (pipeline)); -} + gst_init(&argc, &argv); -int -main (int argc, - char *argv[]) -{ - /* initialize GStreamer */ - gst_init (&argc, &argv); - - /* check input arguments */ if (argc != 2) { - g_print ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]); - return -1; + g_print ("usage: %s <mp3 filename>\n", argv[0]); + exit (-1); } - /* create elements */ - pipeline = gst_pipeline_new ("audio-player"); - source = gst_element_factory_make ("filesrc", "file-source"); - parser = gst_element_factory_make ("oggdemux", "ogg-parser"); - decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder"); - sink = gst_element_factory_make ("alsasink", "alsa-output"); + /* create a new pipeline to hold the elements */ + pipeline = gst_pipeline_new ("pipeline"); - /* set filename property on the file source */ + /* create a disk reader */ + filesrc = gst_element_factory_make ("filesrc", "disk_source"); g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL); - /* link together - note that we cannot link the parser and - * decoder yet, becuse the parser uses dynamic pads. For that, - * we set a new-pad signal handler. */ - gst_element_link (source, parser); - gst_element_link (decoder, sink); - g_signal_connect (parser, "new-pad", G_CALLBACK (new_pad), NULL); + /* now it's time to get the decoder */ + decoder = gst_element_factory_make ("mad", "decoder"); + + /* and an audio sink */ + audiosink = gst_element_factory_make ("osssink", "play_audio"); - /* put all elements in a bin - or at least the ones we will use - * instantly. */ - gst_bin_add_many (GST_BIN (pipeline), source, parser, NULL); + /* add objects to the main pipeline */ + gst_bin_add_many (GST_BIN (pipeline), filesrc, decoder, audiosink, NULL); - /* Now set to playing and iterate. We will set the decoder and - * audio output to ready so they initialize their memory already. - * This will decrease the amount of time spent on linking these - * elements when the Ogg parser emits the new-pad signal. */ - gst_element_set_state (decoder, GST_STATE_READY); - gst_element_set_state (sink, GST_STATE_READY); + /* link src to sink */ + gst_element_link_many (filesrc, decoder, audiosink, NULL); + + /* start playing */ gst_element_set_state (pipeline, GST_STATE_PLAYING); - /* and now iterate - the rest will be automatic from here on. - * When the file is finished, gst_bin_iterate () will return - * FALSE, thereby terminating this loop. */ - while (gst_bin_iterate (GST_BIN (pipeline))) ; + while (gst_bin_iterate (GST_BIN (pipeline))); - /* clean up nicely */ + /* stop the pipeline */ gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (GST_OBJECT (pipeline)); - return 0; + /* we don't need a reference to these objects anymore */ + gst_object_unref (GST_OBJECT (pipeline)); + /* unreffing the pipeline unrefs the contained elements as well */ + + exit (0); } +/* example-end helloworld.c */ - + + + Let's go through this example step by step. + + + + The first thing you have to do is to include the standard + GStreamer headers and + initialize the framework. + + + +#include <gst/gst.h> + + ... + +int +main (int argc, char *argv[]) +{ + ... + gst_init(&argc, &argv); + ... + + + + + We are going to create three elements and one pipeline. Since all + elements share the same base type, GstElement, + we can define them as: + + + ... + GstElement *pipeline, *filesrc, *decoder, *audiosink; + ... + + + + Next, we are going to create an empty pipeline. As you have seen in + the basic introduction, this pipeline will hold and manage all the + elements we are going to pack into it. + + + /* create a new pipeline to hold the elements */ + pipeline = gst_pipeline_new ("pipeline"); + + + We use the standard constructor for a pipeline: gst_pipeline_new (). + + + + We then create a disk source element. The disk source element is able to + read from a file. We use the standard GObject property mechanism to set + a property of the element: the file to read from. + + + /* create a disk reader */ + filesrc = gst_element_factory_make ("filesrc", "disk_source"); + g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL); + + + + You can check if the filesrc != NULL to verify the creation of the + disk source element. + + + + + We now create the MP3 decoder element. This assumes that the 'mad' plugin + is installed on the system where this application is executed. + + + /* now it's time to get the decoder */ + decoder = gst_element_factory_make ("mad", "decoder"); + + + gst_element_factory_make() takes two arguments: a string that will + identify the element you need and a second argument: how you want + to name the element. The name of the element is something you can + choose yourself and might be used to retrieve the element from a + bin/pipeline. + + + + Finally we create our audio sink element. This element will be able + to play back the audio using OSS. + + + /* and an audio sink */ + audiosink = gst_element_factory_make ("osssink", "play_audio"); + + + + We then add the elements to the pipeline. + + + /* add objects to the main pipeline */ + gst_bin_add_many (GST_BIN (pipeline), filesrc, decoder, audiosink, NULL); + + + + We link the different pads of the elements together like this: + + + /* link src to sink */ + gst_element_link_many (filesrc, decoder, audiosink, NULL); + + We now have created a complete pipeline. We can visualise the pipeline as follows: @@ -147,27 +185,76 @@ main (int argc, + + + + Everything is now set up to start streaming. We use the following + statements to change the state of the pipeline: + + + /* start playing */ + gst_element_set_state (pipeline, GST_STATE_PLAYING); + + + + + GStreamer will take care of the READY and PAUSED state for + you when going from NULL to PLAYING. + + + + + Since we do not use threads, nothing will happen yet. We have to + call gst_bin_iterate() to execute one iteration of the pipeline. + + + while (gst_bin_iterate (GST_BIN (pipeline))); + + + The gst_bin_iterate() function will return TRUE as long as something + interesting happened inside the pipeline. When the end-of-file has been + reached the _iterate function will return FALSE and we can end the loop. + + + /* stop the pipeline */ + gst_element_set_state (pipeline, GST_STATE_NULL); + + gst_object_unref (GST_OBJECT (pipeline)); + + exit (0); + + + + Don't forget to set the state of the pipeline to NULL. This will free + all of the resources held by the elements. + + + - - Compiling and Running helloworld.c + + Compiling helloworld.c - To compile the helloworld example, use: gcc -Wall - $(pkg-config --cflags --libs gstreamer-&GST_MAJORMINOR;) - helloworld.c -o helloworld. &GStreamer; makes use of - pkg-config to get compiler and linker flags - needed to compile this application. If you're running a - non-standard installation, make sure the - PKG_CONFIG_PATH environment variable is - set to the correct location ($libdir/pkgconfig). + To compile the helloworld example, use: + + + gcc -Wall `pkg-config gstreamer-&GST_MAJORMINOR; --cflags --libs` helloworld.c \ + -o helloworld + + + We use pkg-config to get the compiler flags needed to compile + this application. Make sure to have your PKG_CONFIG_PATH environment + variable set to the correct location if you are building this application against the uninstalled location. - You can run this example application with ./helloworld - file.ogg. Substitute file.ogg - with your favourite Ogg/Vorbis file. + You can run the example with + (substitute helloworld.mp3 with you favorite MP3 file): + + ./helloworld helloworld.mp3 + @@ -175,23 +262,19 @@ main (int argc, This concludes our first example. As you see, setting up a pipeline is very low-level but powerful. You will see later in this manual how - you can create a more powerful media player with even less effort - using higher-level interfaces. We will discuss all that in . We will first, however, go more in-depth - into more advanced &GStreamer; internals. + you can create a custom MP3 element with a higher-level API. - It should be clear from the example that we can very easily replace - the filesrc element with some other element that - reads data from a network, or some other data source element that - is better integrated with your desktop environment. Also, you can - use other decoders and parsers to support other media types. You - can use another audio sink if you're not running Linux, but Mac OS X, - Windows or FreeBSD, or you can instead use a filesink to write audio - files to disk instead of playing them back. By using an audio card - source, you can even do audio capture instead of playback. All this - shows the reusability of &GStreamer; elements, which is its greatest - advantage. + It should be clear from the example that we can very easily replace the + filesrc element with the gnomevfssrc element, giving you instant streaming + from any gnomevfs URL. + + + We can also choose to use another type of sink instead of the audiosink. + We could use a filesink to write the raw samples to a file, for example. + It should also be clear that inserting filters, like a stereo effect, + into the pipeline is not that hard to do. The most important thing is + that you can reuse already existing elements. diff --git a/docs/manual/basics-init.xml b/docs/manual/basics-init.xml index 8e3b17b00c..054b7b5198 100644 --- a/docs/manual/basics-init.xml +++ b/docs/manual/basics-init.xml @@ -1,31 +1,31 @@ - - Initializing &GStreamer; + + Initializing <application>GStreamer</application> - When writing a &GStreamer; application, you can simply include - gst/gst.h to get access to the library - functions. Besides that, you will also need to intialize the - &GStreamer; library. + When writing a GStreamer application, you can + simply include gst/gst.h to get + access to the library functions. + + + Before the GStreamer libraries can be used, + gst_init has to be called from the main application. + This call will perform the necessary initialization of the library as + well as parse the GStreamer-specific command line options. + + + A typical program + &EXAFOOT; + would have code to initialize GStreamer that + looks like this: - - Simple initialization - - Before the &GStreamer; libraries can be used, - gst_init has to be called from the main - application. This call will perform the necessary initialization - of the library as well as parse the &GStreamer;-specific command - line options. - - - A typical program &EXAFOOT; would have code to initialize - &GStreamer; that looks like this: - - -#include <gst/gst.h> + + int -main (int argc, - char *argv[]) +main (int argc, char *argv[]) { guint major, minor, micro; @@ -37,35 +37,35 @@ main (int argc, return 0; } - - - Use the GST_VERSION_MAJOR, - GST_VERSION_MINOR and GST_VERSION_MICRO - macros to get the &GStreamer; version you are building against, or - use the function gst_version to get the version - your application is linked against. &GStreamer; currently uses a - scheme where versions with the same major and minor versions are - API-/ and ABI-compatible. - - - It is also possible to call the gst_init function - with two NULL arguments, in which case no command line - options will be parsed by GStreamer. - - - +/* example-end init.c */ +]]> + + + Use the GST_VERSION_MAJOR, + GST_VERSION_MINOR and GST_VERSION_MICRO + macros to get the GStreamer version you are + building against, or use the function gst_version + to get the version your application is linked against. + + + + It is also possible to call the gst_init function + with two NULL arguments, in which case no command line + options will be parsed by GStreamer. + The popt interface - You can also use a popt table to initialize your own parameters as - shown in the next example: +You can also use a popt table to initialize your own parameters as shown in the +next example: +/* example-begin popt.c */ + #include <gst/gst.h> int -main (int argc, - char *argv[]) +main(int argc, char *argv[]) { gboolean silent = FALSE; gchar *savefile = NULL; @@ -83,6 +83,7 @@ main (int argc, return 0; } +/* example-end popt.c */ As shown in this fragment, you can use a GStreamer options. + diff --git a/docs/manual/basics-pads.xml b/docs/manual/basics-pads.xml index e72d1ae460..e7a44f6a2c 100644 --- a/docs/manual/basics-pads.xml +++ b/docs/manual/basics-pads.xml @@ -1,534 +1,244 @@ - - Pads and capabilities + + Pads - As we have seen in , the pads are - the element's interface to the outside world. Data streams from one - element's source pad to another element's sink pad. The specific - type of media that the element can handle will be exposed by the - pad's capabilities. We will talk more on capabilities later in this - chapter (see ). + As we have seen in , the pads are the element's + interface to the outside world. + + + The specific type of media that the element can handle will be exposed by the pads. + The description of this media type is done with capabilities(see + ) - - Pads - - A pad type is defined by two properties: its direction and its - availability. As we've mentioned before, &GStreamer; defines two - pad directions: source pads and sink pads. This terminology is - defined from the view of within the element: elements receive data - on their sink pads and generate data on their source pads. - Schematically, sink pads are drawn on the left side of an element, - whereas source pads are drawn on the right side of an element. In - such graphs, data flows from left to right. - - - In reality, there is no objection to data flowing from a - source pad to the sink pad of an element upstream (to the - left of this element in drawings). Data will, however, always - flow from a source pad of one element to the sink pad of - another. - - - - - - Pad directions are very simple compared to pad availability. A pad - can have any of three availabilities: always, sometimes and on - request. The meaning of those three types is exactly as it says: - always pads always exist, sometimes pad exist only in certain - cases (and can disappear randomly), and on-request pads appear - only if explicitely requested by applications. - - - - Dynamic (or sometimes) pads + + Pads are either source or sink pads. The terminology is defined from the + view of the element itself: elements accept data on their sink pads, and + send data out on their source pads. Sink pads are drawn on the left, + while source pads are drawn on the right of an element. In general, + data flows from left to right in the graph. + In reality, there is no objection to data flowing from a + source pad to the sink pad of an element upstream. Data will, however, + always flow from a source pad of one element to the sink pad of + another. + + + + + Types of pad + + + Dynamic pads + Some elements might not have all of their pads when the element is - created. This can happen, for example, with an Ogg demuxer element. - The element will read the Ogg stream and create dynamic pads for - each contained elementary stream (vorbis, theora) when it detects - such a stream in the Ogg stream. Likewise, it will delete the pad - when the stream ends. This principle is very useful for demuxer - elements, for example. + created. This + can happen, for example, with an MPEG system demultiplexer. The + demultiplexer will create its pads at runtime when it detects the + different elementary streams in the MPEG system stream. - Running gst-inspect oggdemux will show - that the element has only one pad: a sink pad called 'sink'. The - other pads are dormant. You can see this in the pad - template because there is an Exists: Sometimes - property. Depending on the type of Ogg file you play, the pads will - be created. We will see that this is very important when you are - going to create dynamic pipelines. You can attach a signal handler - to an element to inform you when the element has created a new pad - from one of its sometimes pad templates. The - following piece of code is an example of how to do this: + Running gst-inspect mpegdemux will show that + the element has only one pad: a sink pad called 'sink'. The other pads are + "dormant". You can see this in the pad template because there is + an 'Exists: Sometimes' + property. Depending on the type of MPEG file you play, the pads will + be created. We + will see that this is very important when you are going to create dynamic + pipelines later on in this manual. - -static void -cb_new_pad (GstElement *element, - GstPad *pad, - gpointer data) -{ - g_print ("A new pad %s was created\n", gst_pad_get_name (pad)); - - /* here, you would setup a new pad link for the newly created pad */ -[..] -} - -int -main(int argc, char *argv[]) -{ - GstElement *pipeline, *source, *demux; - - /* init */ - gst_init (&argc, &argv); - - /* create elements */ - pipeline = gst_pipeline_new ("my_pipeline"); - source = gst_element_factory_make ("filesrc", "source"); - g_object_set (source, "location", argv[1], NULL); - demux = gst_element_factory_make ("oggdemux", "demuxer"); - - /* you would normally check that the elements were created properly */ - - /* put together a pipeline */ - gst_bin_add_many (GST_BIN (pipeline), source, demux, NULL); - gst_element_link (source, demux); - - /* listen for newly created pads */ - g_signal_connect (demux, "new-pad", G_CALLBACK (cb_new_pad), NULL); - - /* start the pipeline */ - gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); - while (gst_bin_iterate (GST_BIN (pipeline))); - -[..] -} - - Request pads An element can also have request pads. These pads are not created automatically but are only created on demand. This is very useful - for multiplexers, aggregators and tee elements. Aggregators are - elements that merge the content of several input streams together - into one output stream. Tee elements are the reverse: they are - elements that have one input stream and copy this stream to each - of their output pads, which are created on request. Whenever an - application needs another copy of the stream, it can simply request - a new output pad from the tee element. + for multiplexers, aggregators and tee elements. - The following piece of code shows how you can request a new output - pad from a tee element: - - -static void -some_function (GstElement *tee) -{ - GstPad * pad; - - pad = gst_element_get_request_pad (tee, "src%d"); - g_print ("A new pad %s was created\n", gst_pad_get_name (pad)); - - /* here, you would link the pad */ -[..] -} - - - The gst_element_get_request_pad () method - can be used to get a pad from the element based on the name of - the pad template. It is also possible to request a pad that is - compatible with another pad template. This is very useful if - you want to link an element to a multiplexer element and you - need to request a pad that is compatible. The method - gst_element_get_compatible_pad () can be - used to request a compatible pad, as shown in the next example. - It will request a compatible pad from an Ogg multiplexer from - any input. + The tee element, for example, has one input pad and a request padtemplate for the + output pads. Whenever an element wants to get an output pad from the tee element, it + has to request the pad. - -static void -link_to_multiplexer (GstPad *tolink_pad, - GstElement *mux) -{ - GstPad *pad; - - pad = gst_element_get_compatible_pad (mux, tolink_pad); - gst_pad_link (tolinkpad, pad); - - g_print ("A new pad %s was created and linked to %s\n", - gst_pad_get_name (pad), gst_pad_get_name (tolink_pad)); -} - + Capabilities of a pad - Since the pads play a very important role in how the element is - viewed by the outside world, a mechanism is implemented to describe - the data that can flow or currently flows through the pad by using - capabilities. Here,w e will briefly describe what capabilities are - and how to use them, enough to get an understanding of the concept. - For an in-depth look into capabilities and a list of all capabilities - defined in &GStreamer;, see the Plugin - Writers Guide. + Since the pads play a very important role in how the element is viewed by the + outside world, a mechanism is implemented to describe the data that can + flow through the pad by using capabilities. - - Capabilities are attached to pad templates and to pads. For pad - templates, it will describe the types of media that may stream - over a pad created from this template. For pads, it can either - be a list of possible caps (usually a copy of the pad template's - capabilities), in which case the pad is not yet negotiated, or it - is the type of media that currently streams over this pad, in - which case the pad has been negotiated already. + + We will briefly describe what capabilities are, enough for you to get a basic understanding + of the concepts. You will find more information on how to create capabilities in the + Plugin Writer's Guide. - - Dissecting capabilities - - A pads capabilities are described in a GstCaps - object. Internally, a GstCaps - will contain one or more GstStructure - that will describe one media type. A negotiated pad will have - capabilities set that contain exactly one - structure. Also, this structure will contain only - fixed values. These constraints are not - true for unnegotiated pads or pad templates. + + Capabilities + + Capabilities are attached to a pad in order to describe + what type of media the pad can handle. + + + Capabilities is shorthand for "capability chain". A capability chain + is a chain of one capability or more. - As an example, below is a dump of the capabilities of the - vorbisdec element, which you will get by running - gst-inspect vorbisdec. You will see two pads: - a source and a sink pad. Both of these pads are always available, - and both have capabilities attached to them. The sink pad will - accept vorbis-encoded audio data, with the mime-type - audio/x-vorbis. The source pad will be used - to send raw (decoded) audio samples to the next element, with - a raw audio mime-type (either audio/x-raw-int or - audio/x-raw-float). The source pad will also - contain properties for the audio samplerate and the amount of - channels, plus some more that you don't need to worry about - for now. + The basic entity is a capability, and is defined by a name, a MIME + type and a set of properties. A capability can be chained to + another capability, which is why we commonly refer to a chain of + capability entities as "capabilities". + + + It is important to understand that the term "capabilities" refers + to a chain of one capability or more. This will be clearer when + you see the structure definition of a GstCaps + element. + + + + + Below is a dump of the capabilities of the element mad, as shown by + gst-inspect. + You can see two pads: sink and src. Both pads have capability information attached to them. + + + The sink pad (input pad) is called 'sink' and takes data of MIME type 'audio/mp3'. It also has + three properties: layer, bitrate and framed. + + + The source pad (output pad) is called 'src' and outputs data of + MIME type 'audio/raw'. It also has four properties: format, depth, + rate and channels. -Pad Templates: - SRC template: 'src' - Availability: Always - Capabilities: - audio/x-raw-float - rate: [ 8000, 50000 ] - channels: [ 1, 2 ] - endianness: 1234 - width: 32 - buffer-frames: 0 - +Pads: SINK template: 'sink' Availability: Always Capabilities: - audio/x-vorbis + 'mad_sink': + MIME type: 'audio/mp3': + + SRC template: 'src' + Availability: Always + Capabilities: + 'mad_src': + MIME type: 'audio/raw': + format: String: int + endianness: Integer: 1234 + width: Integer: 16 + depth: Integer: 16 + channels: Integer range: 1 - 2 + law: Integer: 0 + signed: Boolean: TRUE + rate: Integer range: 11025 - 48000 - - - Properties and values + + What are properties ? Properties are used to describe extra information for capabilities. A property consists of a key (a string) and a value. There are different possible value types that can be used: + - Basic types, this can be pretty much any - GType registered with Glib. Those - properties indicate a specific, non-dynamic value for this - property. Examples include: + basic types: - An integer value (G_TYPE_INT): - the property has this exact value. + an integer value: the property has this exact value. - A boolean value (G_TYPE_BOOLEAN): - the property is either TRUE or FALSE. + a boolean value: the property is either TRUE or FALSE. - A float value (G_TYPE_FLOAT): - the property has this exact floating point value. + a fourcc value: this is a value that is commonly used to + describe an encoding for video, + as used for example by the AVI specification. + + fourcc values consist of four bytes. + The FOURCC + Definition List is the most complete resource + on the allowed fourcc values. + - A string value (G_TYPE_STRING): - the property contains a UTF-8 string. + a float value: the property has this exact floating point value. + + + + + a string value. + - Range types are GTypes registered by - &GStreamer; to indicate a range of possible values. They are - used for indicating allowed audio samplerate values or - supported video sizes. The two types defined in &GStreamer; - are: + range types: - An integer range value - (GST_TYPE_INT_RANGE): the property - denotes a range of possible integers, with a lower and an - upper boundary. The vorbisdec element, for - example, has a rate property that can be between 8000 and - 50000. + an integer range value: the property denotes a range of + possible integers. For example, the wavparse element has + a source pad where the "rate" property can go from 8000 to + 48000. - A float range value - (GST_TYPE_FLOAT_RANGE): the property - denotes a range of possible floating point values, with a - lower and an upper boundary. + a float range value: the property denotes a range of possible + floating point values. - A list value (GST_TYPE_LIST): the - property can take any value from a list of basic values - given in this list. + a list value: the property can take any value from a list of + basic value types or range types. + + + + + + + What capabilities are used for + + Capabilities describe in great detail the type of media that is handled by the pads. + They are mostly used for: + + + + + Autoplugging: automatically finding plugins for a set of capabilities + + + + + Compatibility detection: when two pads are linked, GStreamer + can verify if the two pads are talking about the same media types. + The process of linking two pads and checking if they are compatible + is called "caps negotiation". - - - What capabilities are used for - - Capabilities describe the type of data that is streamed between - two pads, or that one pad (template) supports. This makes them - very useful for various purposes: - - - - - Autoplugging: automatically finding elements to link to a - pad based on its capabilities. All autopluggers use this - method. - - - - - Compatibility detection: when two pads are linked, &GStreamer; - can verify if the two pads are talking about the same media - type. The process of linking two pads and checking if they - are compatible is called caps negotiation. - - - - - Metadata: by reading the capabilities from a pad, applications - can provide information about the type of media that is being - streamed over the pad, which is information about the stream - thatis currently being played back. - - - - - Filtering: an application can use capabilities to limit the - possible media types that can stream between two pads to a - specific subset of their supported stream types. An application - can, for example, use filtered caps to set a - specific (non-fixed) video size that will stream between two - pads. - - - - - - Using capabilities for metadata - - A pad can have a set (i.e. one or more) of capabilities attached - to it. You can get values of properties in a set of capabilities - by querying individual properties of one structure. You can get - a structure from a caps using - gst_caps_get_structure (): - - -static void -read_video_props (GstCaps *caps) -{ - gint width, height; - const GstStructure *str; - - str = gst_caps_get_structure (caps); - if (!gst_structure_get_int (str, "width", &width) || - !gst_structure_get_int (str, "height", &height)) { - g_print ("No width/height available\n"); - return; - } - - g_print ("The video size of this set of capabilities is %dx%d\n", - width, height); -} - - - - - Creating capabilities for filtering - - While capabilities are mainly used inside a plugin to describe the - media type of the pads, the application programmer also has to have - basic understanding of capabilities in order to interface with the - plugins, especially when using filtered caps. When you're using - filtered caps or fixation, you're limiting the allowed types of - media that can stream between two pads to a subset of their supported - media types. You do this by filtering using your own set of - capabilities. In order to do this, you need to create your own - GstCaps. The simplest way to do this is by - using the convenience function gst_caps_new_simple - (): - - -static void -link_pads_with_filter (GstPad *one, - GstPad *other) -{ - GstCaps *caps; - - caps = gst_caps_new_simple ("video/x-raw-yuv", - "width", G_TYPE_INT, 384, - "height", G_TYPE_INT, 288, - "framerate", G_TYPE_DOUBLE, 25., - NULL); - gst_pad_link_filtered (one, other, caps); -} - - - In some cases, you will want to create a more elaborate set of - capabilities to filter a link between two pads. Then, this function - is too simplistic and you'll want to use the method - gst_caps_new_full (): - - -static void -link_pads_with_filter (GstPad *one, - GstPad *other) -{ - GstCaps *caps; - - caps = gst_caps_new_full ( - gst_structure_new ("video/x-raw-yuv", - "width", G_TYPE_INT, 384, - "height", G_TYPE_INT, 288, - "framerate", G_TYPE_DOUBLE, 25., - NULL), - gst_structure_new ("video/x-raw-rgb", - "width", G_TYPE_INT, 384, - "height", G_TYPE_INT, 288, - "framerate", G_TYPE_DOUBLE, 25., - NULL), - NULL); - - gst_pad_link_filtered (one, other, caps); -} - - - See the API references for the full API of - GstStructure and - GstCaps. - - - - - - Ghost pads - - You can see from how a bin - has no pads of its own. This is where "ghost pads" come into play. - -
- Visualisation of a <ulink type="http" - url="../../gstreamer/html/GstBin.html"><classname>GstBin</classname></ulink> - element without ghost pads - - - - - -
- - A ghost pad is a pad from some element in the bin that can be - accessed directly from the bin as well. Compare it to a symbolic - link in UNIX filesystems. Using ghost pads on bins, the bin also - has a pad and can transparently be used as an element in other - parts of your code. - - -
- Visualisation of a <ulink type="http" - url="../../gstreamer/html/GstBin.html"><classname>GstBin</classname></ulink> - element with a ghost pad - - - - - -
- - is a representation of a - ghost pad. The sink pad of element one is now also a pad of the bin. - Obviously, ghost pads can be added to any type of elements, not just - to a GstBin. - - - A ghostpad is created using the function - gst_element_add_ghost_pad (): - - -int -main (int argc, - char *argv[]) -{ - GstElement *bin, *sink; - - /* init */ - gst_init (&argc, &argv); - - /* create element, add to bin, add ghostpad */ - sink = gst_element_factory_make ("fakesink", "sink"); - bin = gst_bin_new ("mybin"); - gst_bin_add (GST_BIN (bin), sink); - gst_element_add_ghost_pad (bin, - gst_element_get_pad (sink, "sink"), "sink"); - -[..] -} - - - In the above example, the bin now also has a pad: the pad called - sink of the given element. The bin can, from here - on, be used as a substitute for the sink element. You could, for - example, link another element to the bin. - -
diff --git a/docs/manual/basics-plugins.xml b/docs/manual/basics-plugins.xml index 0a12fea3a2..3189384766 100644 --- a/docs/manual/basics-plugins.xml +++ b/docs/manual/basics-plugins.xml @@ -28,57 +28,4 @@ - - - All plugins should implement one function, plugin_init, - that creates all the element factories and registers all the type - definitions contained in the plugin. - Without this function, a plugin cannot be registered. - - - The plugins are maintained in the plugin system. Optionally, the - type definitions and the element factories can be saved into an XML - representation so that the plugin system does not have to load all - available plugins in order to know their definition. - - - - The basic plugin structure has the following fields: - - -typedef struct _GstPlugin GstPlugin; - -struct _GstPlugin { - gchar *name; /* name of the plugin */ - gchar *longname; /* long name of plugin */ - gchar *filename; /* filename it came from */ - - GList *types; /* list of types provided */ - gint numtypes; - GList *elements; /* list of elements provided */ - gint numelements; - GList *autopluggers; /* list of autopluggers provided */ - gint numautopluggers; - - gboolean loaded; /* if the plugin is in memory */ -}; - - - - You can query a GList of available plugins with the - function gst_plugin_get_list as this example shows: - - - GList *plugins; - - plugins = gst_plugin_get_list (); - - while (plugins) { - GstPlugin *plugin = (GstPlugin *)plugins->data; - - g_print ("plugin: %s\n", gst_plugin_get_name (plugin)); - - plugins = g_list_next (plugins); - } -
diff --git a/docs/manual/highlevel-components.xml b/docs/manual/highlevel-components.xml index 75dc71ee5c..2875d46d75 100644 --- a/docs/manual/highlevel-components.xml +++ b/docs/manual/highlevel-components.xml @@ -2,267 +2,14 @@ Components - &GStreamer; includes several higher-level components to simplify your - applications life. All of the components discussed here (for now) are - targetted at media playback. The idea of each of these components is - to integrate as closely as possible with a &GStreamer; pipeline, but - to hide the complexity of media type detection and several other - rather complex topics that have been discussed in . + FIXME: This chapter is way out of date. - - We currently recommend people to use either playbin (see ) or decodebin (see ), depending on their needs. The - other components discussed here are either outdated or deprecated. The - documentation is provided for legacy purposes. Use of those other - components is not recommended. + + GStreamer includes components that people can include + in their programs. - - Playbin - - - Playbin is an element that can be created using the standard &GStreamer; - API (e.g. gst_element_factory_make ()). The factory - is conveniently called playbin. By being a - GstElement, playbin automatically supports all - of the features of this class, including error handling, tag support, - state handling, getting stream positions, seeking, and so on. - - - - Setting up a playbin pipeline is as simple as creating an instance of - the playbin element, setting a file location (this has to be a valid - URI, so <protocol>://<location>, e.g. - file:///tmp/my.ogg or http://www.example.org/stream.ogg) using the - uri property on playbin, and then setting the element - to the GST_STATE_PLAYING state. Internally, - playbin uses threads, so there's no need to iterate the element or - anything. However, one thing to keep in mind is that signals fired - by playbin might come from another than the main thread, so be sure - to keep this in mind in your signal handles. Most application - programmers will want to use a function such as g_idle_add - () to make sure that the signal is handled in the main - thread. - - - -#include <gst/gst.h> - -static void -cb_eos (GstElement *play, - gpointer data) -{ - gst_main_quit (); -} - -static void -cb_error (GstElement *play, - GstElement *src, - GError *err, - gchar *debug, - gpointer data) -{ - g_print ("Error: %s\n", err->message); -} - -gint -main (gint argc, - gchar *argv[]) -{ - GstElement *play; - - /* init GStreamer */ - gst_init (&argc, &argv); - - /* make sure we have a URI */ - if (argc != 2) { - g_print ("Usage: %s <URI>\n", argv[0]); - return -1; - } - - /* set up */ - play = gst_element_factory_make ("playbin", "play); - g_object_set (G_OBJECT (play), "uri", argv[1], NULL); - g_signal_connect (play, "eos", G_CALLBACK (cb_eos), NULL); - g_signal_connect (play, "error", G_CALLBACK (cb_error), NULL); - if (gst_element_set_state (play, GST_STATE_PLAYING) != GST_STATE_SUCCESS) { - g_print ("Failed to play\n"); - return -1; - } - - /* now run */ - gst_main (); - - /* also clean up */ - gst_element_set_state (play, GST_STATE_NULL); - gst_object_unref (GST_OBJECT (play)); - - return 0; -} - - - - Playbin has several features that have been discussed previously: - - - - - Settable video and audio output (using the video-sink - and audio-sink properties). - - - - - Mostly controllable and trackable as a - GstElement, including error handling, eos - handling, tag handling, state handling, media position handling and - seeking. - - - - - Buffers network-sources. - - - - - Supports visualizations for audio-only media. - - - - - - - Decodebin - - - Decodebin is the actual autoplugger backend of playbin, which was - discussed in the previous section. Decodebin will, in short, accept - input from a source that is linked to its sinkpad and will try to - detect the media type contained in the stream, and set up decoder - routines for each of those. It will automatically select decoders. - For each decoded stream, it will emit the new-decoded-pad - signal, to let the client know about the newly found decoded stream. - For unknown streams (which might be the whole stream), it will emit - the unknown-type signal. The application is then - responsible for reporting the error to the user. - - - - The example code below will play back an audio stream of an input - file. For readability, it does not include any error handling of - any sort. - - - -#include <gst/gst.h> - -GstElement *pipeline, *audio; -GstPad *audiopad; - -static void -cb_newpad (GstElement *decodebin, - GstPad *pad, - gboolean last, - gpointer data) -{ - GstCaps *caps; - GstStructure *str; - - /* only link audio; only link once */ - if (GST_PAD_IS_LINKED (audiopad)) - return; - caps = gst_pad_get_caps (pad); - str = gst_caps_get_structure (caps, 0); - if (!strstr (gst_structure_get_name (str), "audio")) - return; - - /* link'n'play */ - gst_pad_link (pad, audiopad); - gst_bin_add (GST_BIN (pipeline), audio); - gst_bin_sync_children_state (GST_BIN (pipeline)); -} - -gint -main (gint argc, - gchar *argv[]) -{ - GstElement *src, *dec, *conv, *scale, *sink; - - /* init GStreamer */ - gst_init (&argc, &argv); - - /* make sure we have input */ - if (argc != 2) { - g_print ("Usage: %s <filename>\n", argv[0]); - return -1; - } - - /* setup */ - pipeline = gst_pipeline_new ("pipeline"); - src = gst_element_factory_make ("filesrc", "source"); - g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL); - dec = gst_element_factory_make ("decodebin", "decoder"); - g_signal_connect (dec, "new-decoded-pad", G_CALLBACK (cb_newpad), NULL); - audio = gst_bin_new ("audiobin"); - conv = gst_element_factory_make ("audioconvert", "aconv"); - audiopad = gst_element_get_pad (conv, "sink"); - scale = gst_element_factory_make ("audioscale", "scale"); - sink = gst_element_factory_make ("alsasink", "sink"); - gst_bin_add_many (GST_BIN (audio), conv, scale, sink, NULL); - gst_element_link_many (conv, scale, sink); - gst_bin_add_many (GST_BIN (pipeline), src, dec, NULL); - gst_element_link (src, dec); - - /* run */ - gst_element_set_state (audio, GST_STATE_PAUSED); - gst_element_set_state (pipeline, GST_STATE_PLAYING); - while (gst_bin_iterate (GST_BIN (pipeline))) ; - - /* cleanup */ - gst_element_set_state (pipeline, GST_STATE_NULL); - gst_object_unref (GST_OBJECT (pipeline)); - - return 0; -} - - - - Although decodebin is a good autoplugger, there's a whole lot of - things that it does not do and is not intended to do: - - - - - Taking care of input streams with a known media type (e.g. a DVD, - an audio-CD or such). - - - - - Selection of streams (e.g. which audio track to play in case of - multi-language media streams). - - - - - Overlaying subtitles over a decoded video stream. - - - - - - - Spider - - - Bla - - - GstPlay @@ -271,6 +18,14 @@ main (gint argc, + + GstMediaPlay + + GstMediaPlay is a complete player widget. + + + + GstEditor diff --git a/docs/manual/intro-motivation.xml b/docs/manual/intro-motivation.xml index 5430bf92f4..f36b110a08 100644 --- a/docs/manual/intro-motivation.xml +++ b/docs/manual/intro-motivation.xml @@ -1,18 +1,12 @@ - Motivation & Goals + Motivation - Linux has historically lagged behind other operating systems in the - multimedia arena. Microsoft's Windows and - Apple's MacOS both have strong support for - multimedia devices, multimedia content creation, playback, and - realtime processing. Linux, on the other hand, has a poorly integrated - collection of multimedia utilities and applications available, which - can hardly compete with the professional level of software available - for MS Windows and MacOS. - - - GStreamer was designed to provide a solution to the current Linux media - problems. + Linux has historically lagged behind other operating systems in the multimedia + arena. Microsoft's Windows and Apple's MacOS both have strong support + for multimedia devices, multimedia content creation, + playback, and realtime processing. Linux, on the other hand, has a poorly integrated + collection of multimedia utilities and applications available, which can hardly compete + with the professional level of software available for MS Windows and MacOS. @@ -23,41 +17,37 @@ Multitude of duplicate code - The Linux user who wishes to hear a sound file must hunt through - their collection of sound file players in order to play the tens - of sound file formats in wide use today. Most of these players - basically reimplement the same code over and over again. + The Linux user who wishes to hear a sound file must hunt through their collection of + sound file players in order to play the tens of sound file formats in wide use today. + Most of these players basically reimplement the same code over and over again. - The Linux developer who wishes to embed a video clip in their - application must use crude hacks to run an external video player. - There is no library available that a developer can use to create - a custom media player. + The Linux developer who wishes to embed a video clip in their application must use + crude hacks to run an external video player. There is no library available that a + developer can use to create a custom media player. - + 'One goal' media players/libraries - Your typical MPEG player was designed to play MPEG video and audio. - Most of these players have implemented a complete infrastructure - focused on achieving their only goal: playback. No provisions were - made to add filters or special effects to the video or audio data. + Your typical MPEG player was designed to play MPEG video and audio. Most of + these players have implemented a complete infrastructure focused on + achieving their only goal: playback. No provisions were made to add + filters or special effects to the video or audio data. - If you want to convert an MPEG-2 video stream into an AVI file, - your best option would be to take all of the MPEG-2 decoding - algorithms out of the player and duplicate them into your own - AVI encoder. These algorithms cannot easily be shared across - applications. + If you want to convert an MPEG2 video stream into an AVI file, your best + option would be to take all of the MPEG2 decoding algorithms out + of the player and duplicate them into your own AVI encoder. These + algorithms cannot easily be shared across applications. - Attempts have been made to create libraries for handling various - media types. Because they focus on a very specific media type - (avifile, libmpeg2, ...), significant work is needed to integrate - them due to a lack of a common API. &GStreamer; allows you to - wrap these libraries with a common API, which significantly - simplifies integration and reuse. + Attempts have been made to create libraries for handling various media types. + Because they focus on a very specific media type (avifile, libmpeg2, ...), + significant work is needed to integrate them due to a lack of a common API. + GStreamer allows you to wrap these libraries with a common API, which + significantly simplifies integration and reuse. @@ -66,9 +56,9 @@ Your typical media player might have a plugin for different media types. Two media players will typically implement their own plugin - mechanism so that the codecs cannot be easily exchanged. The plugin - system of the typical media player is also very tailored to the - specific needs of the application. + mechanism so that the codecs cannot be easily exchanged. The plugin system + of the typical media player is also very tailored to the specific needs + of the application. The lack of a unified plugin mechanism also seriously hinders the @@ -76,27 +66,14 @@ code to all the different plugin mechanisms. - While &GStreamer; also uses it own plugin system it offers a very rich + While GStreamer also uses it own plugin system it offers a very rich framework for the plugin developper and ensures the plugin can be used in a wide range of applications, transparently interacting with other - plugins. The framework that &GStreamer; provides for the plugins is + plugins. The framework that GStreamer provides for the plugins is flexible enough to host even the most demanding plugins. - - Poor user experience - - Because of the problems mentioned above, application authors have - so far often been urged to spend a considerable amount of time in - writing their own backends, plugin mechanisms and so on. The result - has often been, unfortunately, that both the backend as well as the - user interface were only half-finished. Demotivated, the application - authors would start rewriting the whole thing and complete the circle. - This leads to a poor end user experience. - - - Provision for network transparency @@ -107,15 +84,15 @@ No provisions have been made for technologies such as the GNOME object embedding using Bonobo. + type="http">GNOME object embedding using Bonobo. - The &GStreamer; core does not use network transparent technologies - at the lowest level as it only adds overhead for the local case. + The GStreamer core does not use network transparent technologies at the + lowest level as it only adds overhead for the local case. That said, it shouldn't be hard to create a wrapper around the - core components. There are tcp plugins now that implement a - &GStreamer; Data Protocol that allows pipelines to be slit over - TCP. These are located in the gst-plugins module directory gst/tcp. + core components. There are tcp plugins now that implement a GStreamer + Data Protocol that allows pipelines to be slit over TCP. These are + located in the gst-plugins module directory gst/tcp. @@ -131,172 +108,4 @@ - - - The design goals - - We describe what we try to achieve with &GStreamer;. - - - Clean and powerful - - &GStreamer; wants to provide a clean interface to: - - - - - The application programmer who wants to build a media pipeline. - The programmer can use an extensive set of powerful tools to create - media pipelines without writing a single line of code. Performing - complex media manipulations becomes very easy. - - - - - The plugin programmer. Plugin programmers are provided a clean and - simple API to create self contained plugins. An extensive debugging - and tracing mechanism has been integrated. GStreamer also comes with - an extensive set of real-life plugins that serve as examples too. - - - - - - - Object oriented - - &GStreamer; adheres to the GLib 2.0 object model. A programmer - familiar with GLib 2.0 or older versions of GTK+ will be - comfortable with &GStreamer;. - - - &GStreamer; uses the mechanism of signals and object properties. - - - All objects can be queried at runtime for their various properties and - capabilities. - - - &GStreamer; intends to be similar in programming methodology to GTK+. - This applies to the object model, ownership of objects, reference - counting, ... - - - - - Extensible - - All &GStreamer; Objects can be extended using the GObject - inheritance methods. - - - All plugins are loaded dynamically and can be extended and upgraded - independently. - - - - - Allow binary only plugins - - Plugins are shared libraries that are loaded at runtime. Since all - the properties of the plugin can be set using the GObject properties, - there is no need (and in fact no way) to have any header files - installed for the plugins. - - - Special care has been taken to make plugins completely selfcontained. - All relevant aspects of plugins can be queried at run-time. - - - - - High performance - - High performance is obtained by: - - - - - using GLib's g_mem_chunk and fast - non-blocking allocation algorithms where possible to - minimize dynamic memory allocation. - - - - - extremely light-weight links between plugins. Data can travel - the pipeline with minimal overhead. Data passing between - plugins only involves a pointer dereference in a typical - pipeline. - - - - - providing a mechanism to directly work on the target memory. - A plugin can for example directly write to the X server's - shared memory space. Buffers can also point to arbitrary - memory, such as a sound card's internal hardware buffer. - - - - - refcounting and copy on write minimize usage of memcpy. - Sub-buffers efficiently split buffers into manageable pieces. - - - - - the use of cothreads to minimize the threading overhead. - Cothreads are a simple and fast user-space method for - switching between subtasks. Cothreads were measured to - consume as little as 600 cpu cycles. - - - - - allowing hardware acceleration by using specialized plugins. - - - - - using a plugin registry with the specifications of the plugins so - that the plugin loading can be delayed until the plugin is actually - used. - - - - - all critical data passing is free of locks and mutexes. - - - - - - - Clean core/plugins separation - - The core of &GStreamer; is essentially media-agnostic. It only knows - about bytes and blocks, and only contains basic elements. - The core of &GStreamer; is functional enough to even implement - low-level system tools, like cp. - - - All of the media handling functionality is provided by plugins - external to the core. These tell the core how to handle specific - types of media. - - - - - Provide a framework for codec experimentation - - &GStreamer; also wants to be an easy framework where codec - developers can experiment with different algorithms, speeding up - the development of open and free multimedia codecs like Theora and - Vorbis. - - - -
diff --git a/docs/manual/intro-preface.xml b/docs/manual/intro-preface.xml index c1746eb0b1..fd38af17f1 100644 --- a/docs/manual/intro-preface.xml +++ b/docs/manual/intro-preface.xml @@ -6,30 +6,29 @@ - What is &GStreamer;? + What is GStreamer? - &GStreamer; is a framework for creating streaming media applications. + GStreamer is a framework for creating streaming media applications. The fundamental design comes from the video pipeline at Oregon Graduate Institute, as well as some ideas from DirectShow. - &GStreamer;'s development framework makes it possible to write any - type of streaming multimedia application. The &GStreamer; framework - is designed to make it easy to write applications that handle audio - or video or both. It isn't restricted to audio and video, and can - process any kind of data flow. + GStreamer's development framework makes it possible to write any type of + streaming multimedia application. The GStreamer framework is designed + to make it easy to write applications that handle audio or video or both. + It isn't restricted to audio and video, and can process any kind of + data flow. The pipeline design is made to have little overhead above what the - applied filters induce. This makes &GStreamer; a good framework for - designing even high-end audio applications which put high demands on - latency. + applied filters induce. This makes GStreamer a good framework for designing + even high-end audio applications which put high demands on latency. - One of the the most obvious uses of &GStreamer; is using it to build - a media player. &GStreamer; already includes components for building a + One of the the most obvious uses of GStreamer is using it to build + a media player. GStreamer already includes components for building a media player that can support a very wide variety of formats, including - MP3, Ogg/Vorbis, MPEG-1/2, AVI, Quicktime, mod, and more. &GStreamer;, + MP3, Ogg Vorbis, MPEG1, MPEG2, AVI, Quicktime, mod, and more. GStreamer, however, is much more than just another media player. Its main advantages are that the pluggable components can be mixed and matched into arbitrary pipelines so that it's possible to write a full-fledged video or audio @@ -45,73 +44,16 @@ - The &GStreamer; core function is to provide a framework for plugins, - data flow and media type handling/negotiation. It also provides an - API to write applications using the various plugins. - - - - - Structure of this Manual - - This book is about &GStreamer; from a developer's point of view; it - describes how to write a &GStreamer; application using the &GStreamer; - libraries and tools. For an explanation about writing plugins, we - suggest the Plugin - Writers Guide. + The GStreamer core function is to provide a framework for plugins, data flow + and media type handling/negotiation. + It also provides an API to write applications using the various plugins. - gives you an overview of &GStreamer;'s - motivation design goals. - - - - rapidly covers the basics of &GStreamer; - application programming. At the end of that chapter, you should be - able to build your own audio player using &GStreamer; - - - - In , we will move on to complicated - subjects which make &GStreamer; stand out of its competitors. We - will discuss application-pipeline interaction using dynamic parameters - and interfaces, we will discuss threading and threaded pipelines, - scheduling and clocks (and synchronization). Most of those topics are - not just there to introduce you to their API, but primarily to give - a deeper insight in solving application programming problems with - &GStreamer; and understanding their concepts. - - - - Next, in , we will go into higher-level - programming APIs for &GStreamer;. You don't exactly need to know all - the details from the previous parts to understand this, but you will - need to understand basic &GStreamer; concepts nevertheless. We will, - amongst others, discuss XML, playbin and autopluggers. - - - - In , you will find some random - information on integrating with GNOME, KDE, OS X or Windows, some - debugging help and general tips to improve and simplify &GStreamer; - programming. - - - - In order to understand this manual, you will need to have a basic - understanding of the C language. Since &GStreamer; uses GLib - 2.0, the reader is assumed to understand the basics of the - GObject object model. It is recommended to have - skimmed through the introduction of the GObject - tutorial before reading this. You may also want to have a look - at Eric Harlow's book Developing Linux Applications with - GTK+ and GDK. - + This book is about GStreamer from a developer's point of view; it describes + how to write a GStreamer application using the GStreamer libraries and tools. + For an explanation about writing plugins, we suggest the Plugin Writers Guide. +