remove newly added files pending reintegration

Original commit message from CVS:
remove newly added files pending reintegration
This commit is contained in:
Thomas Vander Stichele 2004-12-15 07:45:16 +00:00
parent e831cfd629
commit a9c15fa51b
18 changed files with 720 additions and 2696 deletions

View file

@ -1,5 +1,5 @@
<chapter id="chapter-autoplugging">
<title>Putting together a pipeline</title>
<chapter id="chapter-factories">
<title>More on factories</title>
<para>
The small application we created in the previous chapter used the
concept of a factory to create the elements. In this chapter we will
@ -203,6 +203,9 @@ struct _GstType {
<para>
This function will return 0 if the extension was not known.
</para>
<para>
For more information, see <xref linkend="chapter-autoplug"/>.
</para>
</sect2>
</sect1>
@ -251,392 +254,4 @@ struct _GstType {
</para>
</sect1>
<sect1 id="chapter-dynamic">
<title>Dynamic pipelines</title>
<para>
In this chapter we will see how you can create a dynamic pipeline. A
dynamic pipeline is a pipeline that is updated or created while data
is flowing through it. We will create a partial pipeline first and add
more elements while the pipeline is playing. Dynamic pipelines cause
all sorts of scheduling issues and will remain a topic of research for
a long time in GStreamer.
</para>
<para>
We will show how to create an MPEG1 video player using dynamic pipelines.
As you have seen in the pad section, we can attach a signal to an element
when a pad is created. We will use this to create our MPEG1 player.
</para>
<para>
We'll start with a simple main function:
</para>
<programlisting>
/* example-begin dynamic.c */
#include &lt;string.h&gt;
#include &lt;gst/gst.h&gt;
void
eof (GstElement *src)
{
g_print ("have eos, quitting\n");
exit (0);
}
gboolean
idle_func (gpointer data)
{
gst_bin_iterate (GST_BIN (data));
return TRUE;
}
void
new_pad_created (GstElement *parse, GstPad *pad, GstElement *pipeline)
{
GstElement *decode_video = NULL;
GstElement *decode_audio, *play, *color, *show;
GstElement *audio_queue, *video_queue;
GstElement *audio_thread, *video_thread;
g_print ("***** a new pad %s was created\n", gst_pad_get_name (pad));
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED);
/* link to audio pad */
if (strncmp (gst_pad_get_name (pad), "audio_", 6) == 0) {
/* construct internal pipeline elements */
decode_audio = gst_element_factory_make ("mad", "decode_audio");
g_return_if_fail (decode_audio != NULL);
play = gst_element_factory_make ("osssink", "play_audio");
g_return_if_fail (play != NULL);
/* create the thread and pack stuff into it */
audio_thread = gst_thread_new ("audio_thread");
g_return_if_fail (audio_thread != NULL);
/* construct queue and link everything in the main pipeline */
audio_queue = gst_element_factory_make ("queue", "audio_queue");
g_return_if_fail (audio_queue != NULL);
gst_bin_add_many (GST_BIN (audio_thread),
audio_queue, decode_audio, play, NULL);
/* set up pad links */
gst_element_add_ghost_pad (audio_thread,
gst_element_get_pad (audio_queue, "sink"),
"sink");
gst_element_link (audio_queue, decode_audio);
gst_element_link (decode_audio, play);
gst_bin_add (GST_BIN (pipeline), audio_thread);
gst_pad_link (pad, gst_element_get_pad (audio_thread, "sink"));
/* set up thread state and kick things off */
g_print ("setting to READY state\n");
gst_element_set_state (GST_ELEMENT (audio_thread), GST_STATE_READY);
}
else if (strncmp (gst_pad_get_name (pad), "video_", 6) == 0) {
/* construct internal pipeline elements */
decode_video = gst_element_factory_make ("mpeg2dec", "decode_video");
g_return_if_fail (decode_video != NULL);
color = gst_element_factory_make ("colorspace", "color");
g_return_if_fail (color != NULL);
show = gst_element_factory_make ("xvideosink", "show");
g_return_if_fail (show != NULL);
/* construct queue and link everything in the main pipeline */
video_queue = gst_element_factory_make ("queue", "video_queue");
g_return_if_fail (video_queue != NULL);
/* create the thread and pack stuff into it */
video_thread = gst_thread_new ("video_thread");
g_return_if_fail (video_thread != NULL);
gst_bin_add_many (GST_BIN (video_thread), video_queue,
decode_video, color, show, NULL);
/* set up pad links */
gst_element_add_ghost_pad (video_thread,
gst_element_get_pad (video_queue, "sink"),
"sink");
gst_element_link (video_queue, decode_video);
gst_element_link_many (decode_video, color, show, NULL);
gst_bin_add (GST_BIN (pipeline), video_thread);
gst_pad_link (pad, gst_element_get_pad (video_thread, "sink"));
/* set up thread state and kick things off */
g_print ("setting to READY state\n");
gst_element_set_state (GST_ELEMENT (video_thread), GST_STATE_READY);
}
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
}
int
main (int argc, char *argv[])
{
GstElement *pipeline, *src, *demux;
gst_init (&amp;argc, &amp;argv);
pipeline = gst_pipeline_new ("pipeline");
g_return_val_if_fail (pipeline != NULL, -1);
src = gst_element_factory_make ("filesrc", "src");
g_return_val_if_fail (src != NULL, -1);
if (argc &lt; 2)
g_error ("Please specify a video file to play !");
g_object_set (G_OBJECT (src), "location", argv[1], NULL);
demux = gst_element_factory_make ("mpegdemux", "demux");
g_return_val_if_fail (demux != NULL, -1);
gst_bin_add_many (GST_BIN (pipeline), src, demux, NULL);
g_signal_connect (G_OBJECT (demux), "new_pad",
G_CALLBACK (new_pad_created), pipeline);
g_signal_connect (G_OBJECT (src), "eos",
G_CALLBACK (eof), NULL);
gst_element_link (src, demux);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
g_idle_add (idle_func, pipeline);
gst_main ();
return 0;
}
/* example-end dynamic.c */
</programlisting>
<para>
We create two elements: a file source and an MPEG demuxer.
There's nothing special about this piece of code except for
the signal 'new_pad' that we linked to the mpegdemux
element using:
</para>
<programlisting>
g_signal_connect (G_OBJECT (demux), "new_pad",
G_CALLBACK (new_pad_created), pipeline);
</programlisting>
<para>
When an elementary stream has been detected in the system stream,
mpegdemux will create a new pad that will provide the data of the
elementary stream. A function 'new_pad_created' will be called when
the pad is created.
</para>
<para>
In the above example, we created new elements based on the name of
the newly created pad. We then added them to a new thread.
There are other possibilities to check the type of the pad, for
example by using the MIME type and the properties of the pad.
</para>
</sect1>
<sect1 id="chapter-typedetection">
<title>Type Detection</title>
<para>
Sometimes the capabilities of a pad are not specificied. The filesrc
element, for example, does not know what type of file it is reading. Before
you can attach an element to the pad of the filesrc, you need to determine
the media type in order to be able to choose a compatible element.
</para>
<para>
To solve this problem, a plugin can provide the <application>GStreamer</application>
core library with a type definition. The type definition
will contain the following information:
<itemizedlist>
<listitem>
<para>
The MIME type we are going to define.
</para>
</listitem>
<listitem>
<para>
An optional string with a list of possible file extensions this
type usually is associated with. the list entries are separated with
a space. eg, ".mp3 .mpa .mpg".
</para>
</listitem>
<listitem>
<para>
An optional typefind function.
</para>
</listitem>
</itemizedlist>
</para>
<para>
The typefind functions give a meaning to the MIME types that are used
in GStreamer. The typefind function is a function with the following definition:
</para>
<programlisting>
typedef GstCaps *(*GstTypeFindFunc) (GstBuffer *buf, gpointer priv);
</programlisting>
<para>
This typefind function will inspect a GstBuffer with data and will output
a GstCaps structure describing the type. If the typefind function does not
understand the buffer contents, it will return NULL.
</para>
<para>
<application>GStreamer</application> has a typefind element in the set
of core elements
that can be used to determine the type of a given pad.
</para>
<para>
The next example will show how a typefind element can be inserted into a pipeline
to detect the media type of a file. It will output the capabilities of the pad into
an XML representation.
</para>
<programlisting>
#include &lt;gst/gst.h&gt;
void type_found (GstElement *typefind, GstCaps* caps);
int
main(int argc, char *argv[])
{
GstElement *bin, *filesrc, *typefind;
gst_init (&amp;argc, &amp;argv);
if (argc != 2) {
g_print ("usage: %s &lt;filename&gt;\n", argv[0]);
exit (-1);
}
/* create a new bin to hold the elements */
bin = gst_bin_new ("bin");
g_assert (bin != NULL);
/* create a disk reader */
filesrc = gst_element_factory_make ("filesrc", "disk_source");
g_assert (filesrc != NULL);
g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
/* create the typefind element */
typefind = gst_element_factory_make ("typefind", "typefind");
g_assert (typefind != NULL);
/* add objects to the main pipeline */
gst_bin_add_many (GST_BIN (bin), filesrc, typefind, NULL);
g_signal_connect (G_OBJECT (typefind), "have_type",
G_CALLBACK (type_found), NULL);
gst_element_link (filesrc, typefind);
/* start playing */
gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PLAYING);
gst_bin_iterate (GST_BIN (bin));
gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL);
exit (0);
}
</programlisting>
<para>
We create a very simple pipeline with only a filesrc and the typefind
element in it. The sinkpad of the typefind element has been linked
to the source pad of the filesrc.
</para>
<para>
We attached a signal 'have_type' to the typefind element which will be called
when the type of the media stream as been detected.
</para>
<para>
The typefind function will loop over all the registered types and will
execute each of the typefind functions. As soon as a function returns
a GstCaps pointer, the type_found function will be called:
</para>
<programlisting>
void
type_found (GstElement *typefind, GstCaps* caps)
{
xmlDocPtr doc;
xmlNodePtr parent;
doc = xmlNewDoc ("1.0");
doc-&gt;root = xmlNewDocNode (doc, NULL, "Capabilities", NULL);
parent = xmlNewChild (doc-&gt;root, NULL, "Caps1", NULL);
gst_caps_save_thyself (caps, parent);
xmlDocDump (stdout, doc);
}
</programlisting>
<para>
In the type_found function we can print or inspect the type that has been
detected using the GstCaps APIs. In this example, we just print out the
XML representation of the caps structure to stdout.
</para>
<para>
A more useful option would be to use the registry to look up an element
that can handle this particular caps structure, or we can also use the
autoplugger to link this caps structure to, for example, a videosink.
</para>
</sect1>
<sect1 id="section-autoplugging-spider">
<title>Another approach to autoplugging</title>
<para>
The autoplug API is interesting, but often impractical. It is static;
it cannot deal with dynamic pipelines. An element that will
automatically figure out and decode the type is more useful.
Enter the spider.
</para>
<sect2>
<title>The spider element</title>
<para>
The spider element is a generalized autoplugging element. At this point (April 2002), it's
the best we've got; it can be inserted anywhere within a pipeline to perform caps
conversion, if possible. Consider the following gst-launch line:
<programlisting>
$ gst-launch filesrc location=my.mp3 ! spider ! osssink
</programlisting>
The spider will detect the type of the stream, autoplug it to the osssink's caps, and play
the pipeline. It's neat.
</para>
</sect2>
<sect2>
<title>Spider features</title>
<para>
<orderedlist>
<listitem>
<para>
Automatically typefinds the incoming stream.
</para>
</listitem>
<listitem>
<para>
Has request pads on the source side. This means that it can
autoplug one source stream into many sink streams. For example,
an MPEG1 system stream can have audio as well as video; that
pipeline would be represented in gst-launch syntax as
<programlisting>
$ gst-launch filesrc location=my.mpeg1 ! spider ! { queue ! osssink } spider.src_%d!
{ queue ! xvideosink }
</programlisting>
</para>
</listitem>
</orderedlist>
</para>
</sect2>
</sect1>
</chapter>

View file

@ -1,7 +1,5 @@
<chapter id="chapter-clocks">
<title>Clocks in GStreamer</title>
<para>
WRITEME
</para>
</chapter>

View file

@ -1,112 +0,0 @@
<chapter id="chapter-interfaces">
<title>Interfaces</title>
<para>
In <xref linkend="section-elements-properties"/>, you have learned how
to use <classname>GObject</classname> properties as a simple way to do
interaction between applications and elements. This method suffices for
the simple'n'straight settings, but fails for anything more complicated
than a getter and setter. For the more complicated use cases, &GStreamer;
uses interfaces based on the Glib <classname>GInterface</classname> type.
</para>
<para>
Most of the interfaces handled here will not contain any example code.
See the API references for details. Here, we will just describe the
scope and purpose of each interface.
</para>
<sect1 id="section-interfaces-mixer">
<title>The Mixer interface</title>
<para>
The mixer interface provides a uniform way to control the volume on a
hardware (or software) mixer. The interface is primarily intended to
be implemented by elements for audio inputs and outputs that talk
directly to the hardware (e.g. OSS or ALSA plugins).
</para>
<para>
Using this interface, it is possible to control a list of tracks
(such as Line-in, Microphone, etc.) from a mixer element. They can
be muted, their volume can be changed and, for input tracks, their
record flag can be set as well.
</para>
<para>
Example plugins implementing this interface include the OSS elements
(osssrc, osssink, ossmixer) and the ALSA plugins (alsasrc, alsasink
and alsamixer).
</para>
</sect1>
<sect1 id="section-interfaces-tuner">
<title>The Tuner interface</title>
<para>
The tuner interface is a uniform way to control inputs and outputs
on a multi-input selection device. This is primarily used for input
selection on elements for TV- and capture-cards.
</para>
<para>
Using this interface, it is possible to select one track from a list
of tracks supported by that tuner-element. The tuner will than select
that track for media-processing internally. This can, for example, be
used to switch inputs on a TV-card (e.g. from Composite to S-video).
</para>
<para>
This interface is currently only implemented by the Video4linux and
Video4linux2 elements.
</para>
</sect1>
<sect1 id="section-interfaces-colorbalance">
<title>The Color Balance interface</title>
<para>
The colorbalance interface is a way to control video-related properties
on an element, such as brightness, contrast and so on. It's sole
reason for existance is that, as far as its authors know, there's no
way to dynamically register properties using
<classname>GObject</classname>.
</para>
<para>
The colorbalance interface is implemented by several plugins, including
xvimagesink and the Video4linux and Video4linux2 elements.
</para>
</sect1>
<sect1 id="section-interfaces-proprobe">
<title>The Property Probe interface</title>
<para>
The property probe is a way to autodetect allowed values for a
<classname>GObject</classname> property. It's primary use (and
the only thing that we currently use it for) is to autodetect
devices in several elements. For example, the OSS elements use
this interface to detect all OSS devices on a system. Applications
can then <quote>probe</quote> this property and get a list of
detected devices. Given the overlap between HAL and the practical
implementations of this interface, this might in time be deprecated
in favour of HAL.
</para>
<para>
This interface is currently implemented by many elements, including
the ALSA, OSS, Video4linux and Video4linux2 elements.
</para>
</sect1>
<sect1 id="section-interfaces-xoverlay">
<title>The X Overlay interface</title>
<para>
The X Overlay interface was created to solve the problem of embedding
video streams in an application window. The application provides an
X-window to the element implementing this interface to draw on, and
the element will then use this X-window to draw on rather than creating
a new toplevel window. This is useful to embed video in video players.
</para>
<para>
This interface is implemented by, amongst others, the Video4linux and
Video4linux2 elements and by ximagesink, xvimagesink and sdlvideosink.
</para>
</sect1>
</chapter>

View file

@ -1,54 +0,0 @@
<chapter id="chapter-metadata">
<title>Metadata</title>
<para>
&GStreamer; makes a clear distinction between two types of metadata, and
has support for both types. The first is stream tags, which describe the
content of a stream in a non-technical way. Examples include the author
of a song, the title of that very same song or the album it is a part of.
The other type of metadata is stream-info, which is a somewhat technical
description of the properties of a stream. This can include video size,
audio samplerate, codecs used and so on. Tags are handled using the
&GStreamer; tagging system. Stream-info can be retrieved from a
<classname>GstPad</classname>.
</para>
<sect1 id="section-streaminfo">
<title>Stream information</title>
<para>
Stream information can most easily be read by reading them from a
<classname>GstPad</classname>. This has already been discussed before
in <xref linkend="section-caps-metadata"/>. Therefore, we will skip
it here.
</para>
</sect1>
<sect1 id="section-tags-read">
<title>Tag reading</title>
<para>
Tag reading is remarkably simple in &GStreamer; Every element supports
the <quote>found-tag</quote> signal, which will be fired each the time
the element reads tags from the stream. A <classname>GstBin</classname>
will conveniently forward tags found by its childs. Therefore, in most
applications, you will only need to connect to the
<quote>found-tag</quote> signal on the top-most bin in your pipeline,
and you will automatically retrieve all tags from the stream.
</para>
<para>
Note, however, that the <quote>found-tag</quote> might be fired
multiple times and by multiple elements in the pipeline. It is the
application's responsibility to put all those tags together and
display them to the user in a nice, coherent way.
</para>
</sect1>
<sect1 id="section-tags-write">
<title>Tag writing</title>
<para>
WRITEME
</para>
</sect1>
</chapter>

View file

@ -1,117 +0,0 @@
<chapter id="chapter-queryevents">
<title>Position tracking and seeking</title>
<para>
So far, we've looked at how to create a pipeline to do media processing
and how to make it run ("iterate"). Most application developers will be
interested in providing feedback to the user on media progress. Media
players, for example, will want to show a slider showing the progress in
the song, and usually also a label indicating stream length. Transcoding
applications will want to show a progress bar on how much % of the task
is done. &GStreamer; has built-in support for doing all this using a
concept known as <emphasis>querying</emphasis>. Since seeking is very
similar, it will be discussed here as well. Seeking is done using the
concept of <emphasis>events</emphasis>.
</para>
<sect1 id="section-querying">
<title>Querying: getting the position or length of a stream</title>
<para>
Querying is defined as requesting a specific stream-property related
to progress tracking. This includes getting the length of a stream (if
available) or getting the current position. Those stream properties
can be retrieved in various formats such as time, audio samples, video
frames or bytes. The functions used are <function>gst_element_query
()</function> and <function>gst_pad_query ()</function>.
</para>
<para>
Obviously, using either of the above-mentioned functions requires the
application to know <emphasis>which</emphasis> element or pad to run
the query on. This is tricky, but there are some good sides to the
story. The good thing is that elements (or, rather, pads - since
<function>gst_element_query ()</function> internally calls
<function>gst_pad_query ()</function>) forward (<quote>dispatch</quote>)
events and queries to peer pads (or elements) if they don't handle it
themselves. The bad side is that some elements (or pads) will handle
events, but not the specific formats that you want, and therefore it
still won't work.
</para>
<para>
Most queries will, fortunately, work fine. Queries are always
dispatched backwards. This means, effectively, that it's easiest to
run the query on your video or audio output element, and it will take
care of dispatching the query to the element that knows the answer
(such as the current position or the media length; usually the demuxer
or decoder).
</para>
<programlisting>
#include &lt;gst/gst.h&gt;
gint
main (gint argc,
gchar *argv[])
{
GstElement *sink, *pipeline;
[..]
/* run pipeline */
do {
gint64 len, pos;
GstFormat fmt = GST_FORMAT_TIME;
if (gst_element_query (sink, GST_QUERY_POSITION, &amp;fmt, &amp;pos) &amp;&amp;
gst_element_query (sink, GST_QUERY_TOTAL, &amp;fmt, &amp;len)) {
g_print ("Time: %" GST_FORMAT_TIME " / %" GST_FORMAT_TIME "\r",
GST_TIME_ARGS (pos), GST_TIME_ARGS (len));
}
} while (gst_bin_iterate (GST_BIN (pipeline)));
[..]
}
</programlisting>
<para>
If you are having problems with the dispatching behaviour, your best
bet is to manually decide which element to start running the query on.
You can get a list of supported formats and query-types with
<function>gst_element_get_query_types ()</function> and
<function>gst_element_get_formats ()</function>.
</para>
</sect1>
<sect1 id="section-eventsseek">
<title>Events: seeking (and more)</title>
<para>
Events work in a very similar way as queries. Dispatching, for
example, works exactly the same for events (and also has the same
limitations). Although there are more ways in which applications
and elements can interact using events, we will only focus on seeking
here. This is done using the seek-event. A seek-event contains a
seeking offset, a seek method (which indicates relative to what the
offset was given), a seek format (which is the unit of the offset,
e.g. time, audio samples, video frames or bytes) and optionally a
set of seeking-related flags (e.g. whether internal buffers should be
flushed). The behaviour of a seek is also wrapped in the function
<function>gst_element_seek ()</function>.
</para>
<programlisting>
#include &lt;gst/gst.h&gt;
static void
seek_to_time (GstElement *audiosink,
gint64 time_nanonseconds)
{
gst_element_seek (audiosink,
GST_SEEK_METHOD_SET | GST_FORMAT_TIME |
GST_SEEK_FLAG_FLUSH, time_nanoseconds);
}
</programlisting>
</sect1>
</chapter>

View file

@ -48,7 +48,7 @@
fundamentally buffer-oriented rather than byte-oriented, the natural
solution to this problem is an element that can "buffer" the buffers
between the threads, in a thread-safe fashion. This element is the
queue, described more fully in <xref linkend="section-queue"/>. It doesn't
queue, described more fully in <xref linkend="chapter-queues"/>. It doesn't
matter if the queue is placed in the containing bin or in the thread
itself, but it needs to be present on one side or the other to enable
inter-thread communication.
@ -165,132 +165,4 @@ main (int argc, char *argv[])
</programlisting>
</para>
</sect1>
<sect1 id="section-queue">
<title>Queue</title>
<para>
A queue is a filter element.
Queues can be used to link two elements in such way that the data can
be buffered.
</para>
<para>
A buffer that is sinked to a Queue will not automatically be pushed to the
next linked element but will be buffered. It will be pushed to the next
element as soon as a gst_pad_pull () is called on the queue's source pad.
</para>
<para>
Queues are mostly used in conjunction with a thread bin to
provide an external link for the thread's elements. You could have one
thread feeding buffers into a queue and another
thread repeatedly pulling on the queue to feed its
internal elements.
</para>
<para>
Below is a figure of a two-threaded decoder. We have one thread (the main execution
thread) reading the data from a file, and another thread decoding the data.
</para>
<figure float="1" id="section-queues-img">
<title>a two-threaded decoder with a queue</title>
<mediaobject>
<imageobject>
<imagedata fileref="images/queue.&image;" format="&IMAGE;" />
</imageobject>
</mediaobject>
</figure>
<para>
The standard <application>GStreamer</application> queue implementation has some
properties that can be changed using the g_objet_set () method. To set the
maximum number of buffers that can be queued to 30, do:
</para>
<programlisting>
g_object_set (G_OBJECT (queue), "max_level", 30, NULL);
</programlisting>
<para>
The following MP3 player shows you how to create the above pipeline
using a thread and a queue.
</para>
<programlisting>
/* example-begin queue.c */
#include &lt;stdlib.h&gt;
#include &lt;gst/gst.h&gt;
gboolean playing;
/* eos will be called when the src element has an end of stream */
void
eos (GstElement *element, gpointer data)
{
g_print ("have eos, quitting\n");
playing = FALSE;
}
int
main (int argc, char *argv[])
{
GstElement *filesrc, *audiosink, *queue, *decode;
GstElement *bin;
GstElement *thread;
gst_init (&amp;argc,&amp;argv);
if (argc != 2) {
g_print ("usage: %s &lt;mp3 filename&gt;\n", argv[0]);
exit (-1);
}
/* create a new thread to hold the elements */
thread = gst_thread_new ("thread");
g_assert (thread != NULL);
/* create a new bin to hold the elements */
bin = gst_bin_new ("bin");
g_assert (bin != NULL);
/* create a disk reader */
filesrc = gst_element_factory_make ("filesrc", "disk_source");
g_assert (filesrc != NULL);
g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
g_signal_connect (G_OBJECT (filesrc), "eos",
G_CALLBACK (eos), thread);
queue = gst_element_factory_make ("queue", "queue");
g_assert (queue != NULL);
/* and an audio sink */
audiosink = gst_element_factory_make ("osssink", "play_audio");
g_assert (audiosink != NULL);
decode = gst_element_factory_make ("mad", "decode");
/* add objects to the main bin */
gst_bin_add_many (GST_BIN (thread), decode, audiosink, NULL);
gst_bin_add_many (GST_BIN (bin), filesrc, queue, thread, NULL);
gst_element_link (filesrc, queue);
gst_element_link_many (queue, decode, audiosink, NULL);
/* start playing */
gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PLAYING);
playing = TRUE;
while (playing) {
gst_bin_iterate (GST_BIN (bin));
}
gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL);
return 0;
}
/* example-end queue.c */
</programlisting>
</sect1>
</chapter>

View file

@ -19,7 +19,7 @@
syntax.
</para>
<para>
A simple commandline looks like:
A simple commandline to play a mp3 audio file looks like:
<screen>
gst-launch filesrc location=hello.mp3 ! mad ! osssink
@ -33,6 +33,8 @@ gst-launch filesrc location=redpill.vob ! mpegdemux name=demux \
demux.video_00! { mpeg2dec ! xvideosink }
</screen>
<xref linkend="section-programs-gst-launch-more-examples"/> lists more gst-launch commandlines.
</para>
<para>
You can also use the parser in you own
@ -132,6 +134,10 @@ main (int argc, char *argv[])
the g_value_convert routines. No error message will be displayed on an invalid
conversion, due to limitations in the value convert API.
</para>
<para>
The list of properties an element supports can be found out using
<userinput>gst-inspect elemnt-name</userinput>.
</para>
</sect3>
<sect3>
<title>Bins, Threads, and Pipelines</title>
@ -141,12 +147,88 @@ main (int argc, char *argv[])
<para>
A pipeline description between parentheses is placed into a bin. The open paren may be
preceded by a type name, as in <computeroutput>jackbin.( ... )</computeroutput> to make
a bin of a specified type. Square brackets make pipelines, and curly braces make
a bin of a specified type. Square brackets '[ ]' make pipelines, and curly braces '{ }' make
threads. The default toplevel bin type is a pipeline, although putting the whole
description within parentheses or braces can override this default.
</para>
</sect3>
</sect2>
<sect2 id="section-programs-gst-launch-more-examples">
<title>More Examples</title>
<para>
This chapter collects some more complex pipelines. The examples are split into several lines,
so make sure to include the trailing backslashes.
When modifying the pipelines and seeking for the right element to insert, a grep of the gst-inspect
output often gives a starting point:
<screen>
gst-inspect | grep "avi"
</screen>
Another way is to do:
<screen>
gst-launch filesrc location=video.avi ! decodebin name=d ! xvimagesink d. ! { queue ! alsasink } -v
</screen>
and look on the output, which plugins it chooses.
</para>
<para>
Play a remote mp3 audio file:
<screen>
gst-launch gnomevfssrc location=http://www.server.org/hello.mp3 ! mad ! alsasink
</screen>
</para>
<para>
Play a local mp3 audio file with visualisation:
<screen>
gst-launch filesrc location=Hello.mp3 ! mad ! tee name=t ! \
{ queue ! osssink } \
{ t. ! queue ! synaesthesia ! ffmpegcolorspace ! xvimagesink }
</screen>
</para>
<para>
Play a local ogg audio file:
<screen>
gst-launch filesrc location=file.ogg ! oggdemux ! vorbisdec ! audioconvert ! audioscale ! alsasink
</screen>
</para>
<para>
Play a local ogg video file:
<screen>
gst-launch filesrc location=file.ogg ! oggdemux name=demux \
{ demux. ! queue ! theoradec ! ffmpegcolorspace ! videoscale ! xvimagesink } \
{ demux. ! queue ! vorbisdec ! audioconvert ! audioscale ! alsasink }
</screen>
</para>
<para>
Play a local avi video file:
<screen>
gst-launch filesrc location=video.avi ! mpegdemux name=demux \
demux.audio_00! { queue ! ac3parse ! a52dec ! osssink } \
demux.video_00! { queue ! mpeg2dec ! xvideosink }
</screen>
</para>
<para>
Transcoding an audio file from one format into another:
<screen>
gst-launch filesrc location=file.ogg ! oggdemux ! vorbisdec ! audioconvert ! flacenc ! filesink location=file.flac
</screen>
<screen>
gst-launch filesrc location=file.mp3 ! id3demus ! mad ! audioconvert ! rawvorbisenc ! oggmux ! filesink location=file.ogg
</screen>
</para>
<para>
Transcoding an dvd video into a ogg video:
<screen>
gst-launch-0.8 oggmux name=mux ! filesink location=/tmp/file.ogg \
{ dvdreadsrc location=/dev/cdrom ! dvddemux name=demux.audio_00 ! \
{ queue ! a52dec ! audioconvert ! rawvorbisenc ! queue ! mux. } \
{ demux.video_00 ! queue ! mpeg2dec ! ffcolorspace ! videoscale ! video/x-raw-yuv,width=384,height=288 ! tee name=t ! \
{ queue ! theoraenc ! queue ! mux. } \
} \
} \
{ t. ! queue ! ffcolorspace ! ximagesink }
</screen>
</para>
</sect2>
</sect1>
<sect1 id="section-programs-gst-inspect">

View file

@ -12,6 +12,18 @@
<variablelist>
<varlistentry>
<term>2 Nov 2004</term>
<listitem>
<para>
<emphasis>zaheerm</emphasis>:
wtay: unfair u fixed the bug i was using as a feature!
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>14 Oct 2004</term>
<listitem>

View file

@ -1,147 +1,49 @@
<chapter id="chapter-bins">
<title>Bins</title>
<para>
A bin is a container element. You can add elements to a bin. Since a
bin is an element itself, a bin can be handled in the same way as any
other element. Therefore, the whole previous chapter (<xref
linkend="chapter-elements"/>) applies to bins as well.
</para>
<sect1 id="section-bins">
<title>What are bins</title>
<para>
Bins allow you to combine a group of linked elements into one
logical element. You do not deal with the individual elements
anymore but with just one element, the bin. We will see that
this is extremely powerful when you are going to construct
complex pipelines since it allows you to break up the pipeline
in smaller chunks.
A bin is a container element. You can add elements to a bin. Since a bin is
an element itself, it can also be added to another bin.
</para>
<para>
The bin will also manage the elements contained in it. It will
figure out how the data will flow in the bin and generate an
optimal plan for that data flow. Plan generation is one of the
most complicated procedures in &GStreamer;. You will learn more
about this process, called scheduling, in <xref
linkend="chapter-scheduler"/>.
Bins allow you to combine a group of linked elements into one logical element. You do
not deal with the individual elements anymore but with just one element, the bin.
We will see that this is extremely powerful when you are going to construct
complex pipelines since it allows you to break up the pipeline in smaller chunks.
</para>
<para>
The bin will also manage the elements contained in it. It will figure out how
the data will flow in the bin and generate an optimal plan for that data flow. Plan
generation is one of the most complicated procedures in GStreamer.
</para>
<figure float="1" id="section-bin-img">
<title>Visualisation of a bin with some elements in it</title>
<mediaobject>
<imageobject>
<imagedata fileref="images/bin-element.&image;" format="&IMAGE;"/>
<imagedata fileref="images/bin-element.&image;" format="&IMAGE;" />
</imageobject>
</mediaobject>
</figure>
<para>
There are two specialized types of bins available to the
&GStreamer; programmer:
</para>
There are two specialized bins available to the GStreamer programmer:
<itemizedlist>
<listitem>
<para>
A pipeline: a generic container that allows scheduling of the
a pipeline: a generic container that allows scheduling of the
containing elements. The toplevel bin has to be a pipeline.
Every application thus needs at least one of these. Applications
can iterate pipelines using <function>gst_bin_iterate
()</function> to make it process data while in the playing state.
Every application thus needs at least one of these.
</para>
</listitem>
<listitem>
<para>
A thread: a bin that will be run in a separate execution thread.
a thread: a bin that will be run in a separate execution thread.
You will have to use this bin if you have to carefully
synchronize audio and video, or for buffering. You will learn
more about threads in <xref linkend="chapter-threads"/>.
</para>
</listitem>
</itemizedlist>
</sect1>
<sect1 id="section-bin-create">
<title>Creating a bin</title>
<para>
Bins are created in the same way that other elements are created,
i.e. using an element factory. There are also convenience functions
available (<function>gst_bin_new ()</function>,
<function>gst_thread_new ()</function> and <function>gst_pipeline_new
()</function>). To add elements to a bin or remove elements from a
bin, you can use <function>gst_bin_add ()</function> and
<function>gst_bin_remove ()</function>. Note that the bin that you
add an element to will take ownership of that element. If you
destroy the bin, the element will be dereferenced with it. If you
remove an element from a bin, it will be dereferenced automatically.
</para>
<programlisting>
int
main (int argc,
char *argv[])
{
GstElement *bin, *pipeline, *source, *sink;
/* init */
gst_init (&amp;argc, &amp;argv);
/* create */
pipeline = gst_pipeline_new ("my_pipeline");
bin = gst_pipeline_new ("my_bin");
source = gst_element_factory_make ("fakesrc", "source");
sink = gst_element_factory_make ("fakesink", "sink");
/* set up pipeline */
gst_bin_add_many (GST_BIN (bin), source, sink, NULL);
gst_bin_add (GST_BIN (pipeline), bin);
gst_element_link (source, sink);
[..]
}
</programlisting>
<para>
There are various functions to lookup elements in a bin. You can
also get a list of all elements that a bin contains using the function
<function>gst_bin_get_list ()</function>. See the API references of
<ulink type="http"
url="../../GStreamer/html/GstBin.html"><classname>GstBin</classname></ulink>
for details.
</para>
</sect1>
<sect1 id="section-bin-custom">
<title>Custom bins</title>
<para>
The application programmer can create custom bins packed with elements
to perform a specific task. This allows you, for example, to write
an Ogg/Vorbis decoder with just the following lines of code:
</para>
<programlisting>
int
main (int argc
char *argv[])
{
GstElement *player;
/* init */
gst_init (&amp;argc, &amp;argv);
/* create player */
player = gst_element_factory_make ("oggvorbisplayer", "player");
/* set the source audio file */
g_object_set (G_OBJECT (player), "location", "helloworld.ogg", NULL);
/* start playback */
gst_element_set_state (GST_ELEMENT (mp3player), GST_STATE_PLAYING);
[..]
}
</programlisting>
<para>
Custom bins can be created with a plugin or an XML description. You
will find more information about creating custom bin in the <ulink
type="http"
url="http://gstreamer.freedesktop.org/data/doc/gstreamer/head/pwg/html/index.html">Plugin
Writers Guide</ulink>.
</para>
</sect1>
</chapter>

View file

@ -1,15 +1,4 @@
<chapter id="chapter-data">
<title>Buffers and Events</title>
<para>
The data flowing through a pipeline consists of a combination of
buffers and events. Buffers contain the actual pipeline data. Events
contain control information, such as seeking information and
end-of-stream notifiers. All this will flow through the pipeline
automatically when it's running. This chapter is mostly meant to
explain the concept to you; you don't need to do anything for this.
</para>
<sect1 id="section-buffers">
<chapter id="chapter-buffers">
<title>Buffers</title>
<para>
Buffers contain the data that will flow through the pipeline you have
@ -19,22 +8,22 @@
to deal with buffers yourself; the elements will do that for you.
</para>
<para>
A buffer consists, amongst others, of:
</para>
A buffer consists of:
<itemizedlist>
<listitem>
<para>
A pointer to a piece of memory.
a pointer to a piece of memory.
</para>
</listitem>
<listitem>
<para>
The size of the memory.
the size of the memory.
</para>
</listitem>
<listitem>
<para>
A timestamp for the buffer.
a timestamp for the buffer.
</para>
</listitem>
<listitem>
@ -45,55 +34,33 @@
</para>
</listitem>
</itemizedlist>
<para>
The simple case is that a buffer is created, memory allocated, data
put in it, and passed to the next element. That element reads the
data, does something (like creating a new buffer and decoding into
it), and unreferences the buffer. This causes the data to be free'ed
and the buffer to be destroyed. A typical video or audio decoder
works like this.
</para>
<para>
There are more complex scenarios, though. Elements can modify buffers
in-place, i.e. without allocating a new one. Elements can also write
to hardware memory (such as from video-capture sources) or memory
allocated from the X-server using XShm). Buffers can be read-only,
and so on.
</para>
</sect1>
<sect1 id="section-events">
<title>Events</title>
<para>
Events are control particles that are sent both up- and downstream in
a pipeline along with buffers. Downstream events notify fellow elements
of stream states. Possible events include discontinuities, flushes,
end-of-stream notifications and so on. Upstream events are used both
in application-element interaction as well as event-event interaction
to request changes in stream state, such as seeks. For applications,
only upstream events are important. Downstream events are just
explained to get a more complete picture of the data concept.
<!-- FIXME: this is outdated, there is no GstBufferPool in gst-0.8.X -->
GStreamer provides functions to create custom buffer create/destroy algorithms, called
a <classname>GstBufferPool</classname>. This makes it possible to efficiently
allocate and destroy buffer memory. It also makes it possible to exchange memory between
elements by passing the <classname>GstBufferPool</classname>. A video element can,
for example, create a custom buffer allocation algorithm that creates buffers with XSHM
as the buffer memory. An element can use this algorithm to create and fill the buffer
with data.
</para>
<para>
Since most applications seek in time units, our example below does so
too:
</para>
<programlisting>
static void
seek_to_time (GstElement *element,
guint64 time_ns)
{
GstEvent *event;
event = gst_event_new_seek (GST_SEEK_METHOD_SET |
GST_FORMAT_TIME,
time_ns);
gst_element_send_event (element, event);
}
</programlisting>
<para>
The function <function>gst_element_seek ()</function> is a shortcut
for this. This is mostly just to show how it all works.
The simple case is that a buffer is created, memory allocated, data put
in it, and passed to the next element. That element reads the data, does
something (like creating a new buffer and decoding into it), and
unreferences the buffer. This causes the data to be freed and the buffer
to be destroyed. A typical MPEG audio decoder works like this.
</para>
</sect1>
<para>
A more complex case is when the filter modifies the data in place. It
does so and simply passes on the buffer to the next element. This is just
as easy to deal with. An element that works in place has to be careful when
the buffer is used in more than one element; a copy on write has to made in this
situation.
</para>
</chapter>

View file

@ -1,82 +1,85 @@
<chapter id="chapter-elements" xreflabel="Elements">
<chapter id="chapter-elements">
<title>Elements</title>
<para>
The most important object in &GStreamer; for the application programmer
is the <ulink type="http"
url="../../gstreamer/html/GstElement.html"><classname>GstElement</classname></ulink>
object. An element is the basic building block for a media pipeline. All
the different high-level components you will use are derived from
<classname>GstElement</classname>. Every decoder, encoder, demuxer, video
or audio output is in fact a <classname>GstElement</classname>
The most important object in <application>GStreamer</application> for the
application programmer is the <ulink type="http"
url="../../gstreamer/html/GstElement.html"><classname>GstElement</classname>
</ulink>object.
</para>
<sect1 id="section-elements-design" xreflabel="What are elements?">
<title>What are elements?</title>
<sect1 id="section-elements-design">
<title>What is an element ?</title>
<para>
For the application programmer, elements are best visualized as black
boxes. On the one end, you might put something in, the element does
something with it and something else comes out at the other side. For
a decoder element, ifor example, you'd put in encoded data, and the
element would output decoded data. In the next chapter (see <xref
linkend="chapter-pads"/>), you will learn more about data input and
output in elements, and how you can set that up in your application.
An element is the basic building block for the media pipeline.
All the different high-level components you are going to use are
derived from <ulink type="http" url="../../gstreamer/html/GstElement.html">
<classname>GstElement</classname></ulink>. This means that a
lot of functions you are going to use operate on objects of this class.
</para>
<para>
Elements, from the perspective of GStreamer, are viewed as "black boxes"
with a number of different aspects. One of these aspects is the presence
of "pads" (see <xref linkend="chapter-pads"/>), or link points.
This terminology arises from soldering; pads are where wires can be
attached.
</para>
</sect1>
<sect1 id="section-elements-types">
<title>Types of elements</title>
<sect2 id="section-elements-src">
<title>Source elements</title>
<para>
Source elements generate data for use by a pipeline, for example
reading from disk or from a sound card. <xref
linkend="section-element-srcimg"/> shows how we will visualise
a source element. We always draw a source pad to the right of
the element.
reading from disk or from a sound card.
</para>
<para>
<xref linkend="section-element-srcimg"/> shows how we will visualise
a source element.
We always draw a source pad to the right of the element.
</para>
<figure float="1" id="section-element-srcimg">
<title>Visualisation of a source element</title>
<mediaobject>
<imageobject>
<imagedata fileref="images/src-element.&image;"
format="&IMAGE;"/>
<imagedata fileref="images/src-element.&image;" format="&IMAGE;" />
</imageobject>
</mediaobject>
</figure>
<para>
Source elements do not accept data, they only generate data. You can
see this in the figure because it only has a source pad (on the
right). A source pad can only generate data.
see this in the figure because it only has a source pad. A source
pad can only generate data.
</para>
</sect2>
<sect2 id="section-elements-filter">
<title>Filters, convertors, demuxers, muxers and codecs</title>
<title>Filters and codecs</title>
<para>
Filters and filter-like elements have both input and outputs pads.
They operate on data that they receive on their input (sink) pads,
and will provide data on their output (source) pads. Examples of
such elements are a volume element (filter), a video scaler
(convertor), an Ogg demuxer or a Vorbis decoder.
Filter elements have both input and output pads. They operate on
data they receive in their sink pads and produce data on their source
pads. For example, MPEG decoders and volume filters would fall into
this category.
</para>
<para>
Filter-like elements can have any number of source or sink pads. A
video demuxer, for example, would have one sink pad and several
(1-N) source pads, one for each elementary stream contained in the
container format. Decoders, on the other hand, will only have one
source and sink pads.
Elements are not constrained as to the number of pads they might have;
for example, a video mixer might have two input pads (the images of
the two different video streams) and one output pad.
</para>
<figure float="1" id="section-element-filterimg">
<title>Visualisation of a filter element</title>
<mediaobject>
<imageobject>
<imagedata fileref="images/filter-element.&image;"
format="&IMAGE;"/>
<imagedata fileref="images/filter-element.&image;" format="&IMAGE;" />
</imageobject>
</mediaobject>
</figure>
<para>
<xref linkend="section-element-filterimg"/> shows how we will
visualise a filter-like element. This specific element has one source
and one sink element. Sink pads, receiving input data, are depicted
at the left of the element; source pads are still on the right.
<xref linkend="section-element-filterimg"/> shows how we will visualise
a filter element.
This element has one sink (input) pad and one source (output) pad.
Sink pads are drawn on the left of the element.
</para>
<figure float="1" id="section-element-multifilterimg">
<title>Visualisation of a filter element with
@ -89,14 +92,12 @@
</mediaobject>
</figure>
<para>
<xref linkend="section-element-multifilterimg"/> shows another
filter-like element, this one having more than one output (source)
pad. An example of one such element could, for example, be an Ogg
demuxer for an Ogg stream containing both audio and video. One
source pad will contain the elementary video stream, another will
contain the elementary audio stream. Demuxers will generally fire
signals when a new pad is created. The application programmer can
then handle the new elementary stream in the signal handler.
<xref linkend="section-element-multifilterimg"/> shows the visualisation of a filter element with
more than one output pad. An example of such a filter is the AVI
demultiplexer. This element will parse the input data and
extract the audio and video data. Most of these filters dynamically
send out a signal when a new pad is created so that the application
programmer can link an arbitrary element to the newly created pad.
</para>
</sect2>
@ -112,400 +113,10 @@
<title>Visualisation of a sink element</title>
<mediaobject>
<imageobject>
<imagedata fileref="images/sink-element.&image;"
format="&IMAGE;" />
<imagedata fileref="images/sink-element.&image;" format="&IMAGE;" />
</imageobject>
</mediaobject>
</figure>
</sect2>
</sect1>
<sect1 id="section-elements-create">
<title>Creating a <classname>GstElement</classname></title>
<para>
The simplest way to create an element is to use <ulink type="http"
url="&URLAPI;GstElementFactory.html#gst-element-factory-make"><function>gst_element_factory_make
()</function></ulink>. This function takes a factory name and an
element name for the newly created element. The name of the element
is something you can use later on to look up the element in a bin,
for example. The name will also be used in debug output. You can
pass <symbol>NULL</symbol> as the name argument to get a unique,
default name.
</para>
<para>
When you don't need the element anymore, you need to unref it using
<ulink type="http"
url="&URLAPI;GstObject.html#gst-object-unref"><function>gst_object_unref
()</function></ulink>. This decreases the reference count for the
element by 1. An element has a refcount of 1 when it gets created.
An element gets destroyed completely when the refcount is decreased
to 0.
</para>
<para>
The following example &EXAFOOT; shows how to create an element named
<emphasis>source</emphasis> from the element factory named
<emphasis>fakesrc</emphasis>. It checks if the creation succeeded.
After checking, it unrefs the element.
</para>
<programlisting><![CDATA[
#include <gst/gst.h>
int
main (int argc,
char *argv[])
{
GstElement *element;
/* init GStreamer */
gst_init (&argc, &argv);
/* create element */
element = gst_element_factory_make ("fakesrc", "source");
if (!element) {
g_print ("Failed to create element of type 'fakesrc'\n");
return -1;
}
gst_object_unref (GST_OBJECT (element));
return 0;
}
]]></programlisting>
<para>
<function>gst_element_factory_make</function> is actually a shorthand
for a combination of two functions. A <ulink type="http"
url="&URLAPI;GstElement.html"><classname>GstElement</classname></ulink>
object is created from a factory. To create the element, you have to
get access to a <ulink type="http"
url="&URLAPI;GstElementFactory.html"><classname>GstElementFactory</classname></ulink>
object using a unique factory name. This is done with <ulink type="http"
url="&URLAPI;GstElementFactory.html#gst-element-factory-find"><function>gst_element_factory_find
()</function></ulink>.
</para>
<para>
The following code fragment is used to get a factory that can be used
to create the <emphasis>fakesrc</emphasis> element, a fake data source.
The function <ulink type="http"
url="&URLAPI;GstElementFactory.html#gst-element-factory-create"><function>gst_element_factory_create
()</function></ulink> will use the element factory to create an
element with the given name.
</para>
<programlisting><![CDATA[
int
main (int argc,
char *argv[])
{
GstElementFactory *factory;
GstElement * element;
/* init GStreamer */
gst_init (&argc, &argv);
/* create element, method #2 */
factory = gst_element_factory_find ("fakesrc");
if (!factory) {
g_print ("Failed to find fctory of type 'fakesrc'\n");
return -1;
}
element = gst_element_factory_create (factory, "source");
gst_object_unref (GST_OBJECT (element));
return 0;
}
]]></programlisting>
</sect1>
<sect1 id="section-elements-properties">
<title>Using an element as a <classname>GObject</classname></title>
<para>
A <ulink type="http"
url="&URLAPI;GstElement.html"><classname>GstElement</classname></ulink>
can have several properties which are implemented using standard
<classname>GObject</classname> properties. The usual
<classname>GObject</classname> methods to query, set and get
property values and <classname>GParamSpecs</classname> are
therefore supported.
</para>
<para>
Every <classname>GstElement</classname> inherits at least one
property from its parent <classname>GstObject</classname>: the
"name" property. This is the name you provide to the functions
<function>gst_element_factory_make ()</function> or
<function>gst_element_factory_create ()</function>. You can get
and set this property using the functions
<function>gst_object_set_name</function> and
<function>gst_object_get_name</function> or use the
<classname>GObject</classname> property mechanism as shown below.
</para>
<programlisting><![CDATA[
#include <gst/gst.h>
int
main (int argc,
char *argv[])
{
GstElement *element;
const gchar *name;
/* init GStreamer */
gst_init (&argc, &argv);
/* create element */
element = gst_element_factory_make ("fakesrc", "source");
/* get name */
g_object_get (G_OBJECT (element), "name", &name, NULL);
g_print ("The name of the element is '%s'.\n", name);
gst_object_unref (GST_OBJECT (element));
return 0;
}
]]></programlisting>
<para>
Most plugins provide additional properties to provide more information
about their configuration or to configure the element.
<command>gst-inspect</command> is a useful tool to query the properties
of a particular element, it will also use property introspection to give
a short explanation about the function of the property and about the
parameter types and ranges it supports. See the appendix for details
about <command>gst-inspect</command>.
</para>
<para>
For more information about <classname>GObject</classname>
properties we recommend you read the <ulink
url="http://developer.gnome.org/doc/API/2.0/gobject/index.html"
type="http">GObject manual</ulink> and an introduction to <ulink
url="http://le-hacker.org/papers/gobject/index.html" type="http">The
Glib Object system</ulink>.
</para>
<para>
A <ulink type="http" url="&URLAPI;gstreamer/html/GstElementFactory.html">
<classname>GstElement</classname></ulink> also provides various
<classname>GObject</classname> signals that can be used as a flexible
callback mechanism. Here, too, you can use <command>gst-inspect</command>
to see which signals a specific elements supports. Together, signals
and properties are the most basic way in which elements and
applications interact.
</para>
</sect1>
<sect1 id="section-elements-factories">
<title>More about element factories</title>
<para>
In the previous section, we briefly introduced the <ulink type="http"
url="&URLAPI;GstElement.html"><classname>GstElementFactory</classname></ulink>
object already as a way to create instances of an element. Element
factories, however, are much more than just that. Element factories
are the basic types retrieved from the &GStreamer; registry, they
describe all plugins and elements that &GStreamer; can create. This
means that element factories are useful for automated element
instancing, such as what autopluggers do, and for creating lists
of available elements, such as what pipeline editing applications
(e.g. <ulink type="http"
url="http://gstreamer.freedesktop.org/modules/gst-editor.html">&GStreamer;
Editor</ulink>) do.
</para>
<sect2 id="section-elements-factories-details">
<title>Getting information about an element using a factory</title>
<para>
Tools like <command>gst-inspect</command> will provide some generic
information about an element, such as the person that wrote the
plugin, a descriptive name (and a shortname), a rank and a category.
The category can be used to get the type of the element that can
be created using this element factory. Examples of categories include
<classname>Codec/Decoder/Video</classname> (video decoder),
<classname>Codec/Encoder/Video</classname> (video encoder),
<classname>Source/Video</classname> (a video generator),
<classname>Sink/Video</classname> (a video output), and all these
exist for audio as well, of course. Then, there's also
<classname>Codec/Demuxer</classname> and
<classname>Codec/Muxer</classname> and a whole lot more.
<command>gst-inspect</command> will give a list of all factories, and
<command>gst-inspect &lt;factory-name&gt;</command> will list all
of the above information, and a lot more.
</para>
<programlisting><![CDATA[
#include <gst/gst.h>
int
main (int argc,
char *argv[])
{
GstElementFactory *factory;
/* init GStreamer */
gst_init (&argc, &argv);
/* get factory */
factory = gst_element_factory_find ("sinesrc");
if (!factory) {
g_print ("You don't have the 'sinesrc' element installed, go get it!\n");
return -1;
}
/* display information */
g_print ("The '%s' element is a member of the category %s.\n"
"Description: %s\n",
gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)),
gst_element_factory_get_klass (factory),
gst_element_factory_get_description (factory));
return 0;
}
]]></programlisting>
<para>
You can use <function>gst_registry_pool_feature_list (GST_TYPE_ELEMENT_FACTORY)</function>
to get a list of all the element factories that &GStreamer; knows
about.
</para>
</sect2>
<sect2 id="section-elements-factories-padtemplates">
<title>Finding out what pads an element can contain</title>
<para>
Perhaps the most powerful feature of element factories is that
they contain a full description of the pads that the element
can generate, and the capabilities of those pads (in layman words:
what types of media can stream over those pads), without actually
having to load those plugins into memory. This can be used
to provide a codec selection list for encoders, or it can be used
for autoplugging purposes for media players. All current
&GStreamer;-based media players and autopluggers work this way.
We'll look closer at these features as we learn about
<classname>GstPad</classname> and <classname>GstCaps</classname>
in the next chapter: <xref linkend="chapter-pads"/>
</para>
</sect2>
</sect1>
<sect1 id="section-elements-link" xreflabel="Linking elements">
<title>Linking elements</title>
<para>
By linking a source element with zero or more filter-like
elements and finally a sink element, you set up a media
pipeline. Data will flow through the elements. This is the
basic concept of media handling in &GStreamer;.
</para>
<figure float="1" id="section-link">
<title>Visualisation of three linked elements</title>
<mediaobject>
<imageobject>
<imagedata fileref="images/linked-elements.&image;"
format="&IMAGE;"/>
</imageobject>
</mediaobject>
</figure>
<para>
By linking these three elements, we have created a very simple
chain of elements. The effect of this will be that the output of
the source element (<quote>element1</quote>) will be used as input
for the filter-like element (<quote>element2</quote>). The
filter-like element will do something with the data and send the
result to the final sink element (<quote>element3</quote>).
</para>
<para>
Imagine the above graph as a simple Ogg/Vorbis audio decoder. The
source is a disk source which reads the file from disc. The second
element is a Ogg/Vorbis audio decoder. The sink element is your
soundcard, playing back the decoded audio data. We will use this
simple graph to construct an Ogg/Vorbis player later in this manual.
</para>
<para>
In code, the above graph is written like this:
</para>
<programlisting>
int
main (int argc,
char *argv[])
{
GstElement *source, *filter, *sink;
/* init */
gst_init (&amp;argc, &amp;argv);
/* create elements */
source = gst_element_factory_make ("fakesrc", "source");
filter = gst_element_factory_make ("identity", "filter");
sink = gst_element_factory_make ("fakesink", "sink");
/* link */
gst_element_link_many (source, filter, sink, NULL);
[..]
}
</programlisting>
<para>
For more specific behaviour, there are also the functions
<function>gst_element_link ()</function> and
<function>gst_element_link_pads ()</function>. You can also obtain
references to individual pads and link those using various
<function>gst_pad_link_* ()</function> functions. See the API
references for more details.
</para>
</sect1>
<sect1 id="section-elements-states">
<title>Element States</title>
<para>
After being created, an element will not actually perform any actions
yet. You need to change elements state to make it do something.
&GStreamer; knows four element states, each with a very specific
meaning. Those four states are:
</para>
<itemizedlist>
<listitem>
<para>
<classname>GST_STATE_NULL</classname>: this is the default state.
This state will deallocate all resources held by the element.
</para>
</listitem>
<listitem>
<para>
<classname>GST_STATE_READY</classname>: in the ready state, an
element has allocated all of its global resources, that is,
resources that can be kept within streams. You can think about
opening devices, allocating buffers and so on. However, the
stream is not opened in this state, so the stream positions is
automatically zero. If a stream was previously opened, it should
be closed in this state, and position, properties and such should
be reset.
</para>
</listitem>
<listitem>
<para>
<classname>GST_STATE_PAUSED</classname>: in this state, an
element has opened the stream, but is not actively processing
it. An element should not modify the stream's position, data or
anything else in this state. When set back to PLAYING, it should
continue processing at the point where it left off as soon as
possible.
</para>
</listitem>
<listitem>
<para>
<classname>GST_STATE_PLAYING</classname>: in the PLAYING state,
an element does exactly the same as in the PAUSED state, except
that it actually processes data.
</para>
</listitem>
</itemizedlist>
<para>
You can change the state of an element using the function
<function>gst_element_set_state ()</function>. If you set an element
to another state, &GStreamer; will internally traverse all intermediate
states. So if you set an element from NULL to PLAYING, &GStreamer;
will internally set the element to READY and PAUSED in between.
</para>
<para>
Even though an element in <classname>GST_STATE_PLAYING</classname>
is ready for data processing, it will not necessarily do that. If
the element is placed in a thread (see <xref
linkend="chapter-threads"/>), it will process data automatically.
In other cases, however, you will need to <emphasis>iterate</emphasis>
the element's container.
</para>
</sect1>
</chapter>

View file

@ -1,141 +1,179 @@
<chapter id="chapter-helloworld">
<chapter id="chapter-hello-world">
<title>Your first application</title>
<para>
This chapter will summarize everything you've learned in the previous
chapters. It describes all aspects of a simple &GStreamer; application,
including initializing libraries, creating elements, packing elements
together in a pipeline and playing this pipeline. By doing all this,
you will be able to build a simple Ogg/Vorbis audio player.
This chapter describes the most rudimentary aspects of a
<application>GStreamer</application> application, including initializing
the libraries, creating elements, packing them into a pipeline and playing,
pausing and stopping the pipeline.
</para>
<sect1 id="section-helloworld">
<sect1 id="section-hello-world">
<title>Hello world</title>
<para>
We're going to create a simple first application, a simple Ogg/Vorbis
command-line audio player. For this, we will use only standard
&GStreamer; components. The player will read a file specified on
the command-line. Let's get started!
</para>
<para>
We've learned, in <xref linkend="chapter-init"/>, that the first thing
to do in your application is to initialize &GStreamer; by calling
<function>gst_init ()</function>. Also, make sure that the application
includes <filename>gst/gst.h</filename> so all function names and
objects are properly defined. Use <function>#include
&lt;gst/gst.h&gt;</function> to do that.
</para>
<para>
Next, you'll want to create the different elements using
<function>gst_element_factory_make ()</function>. For an Ogg/Vorbis
audio player, we'll need a source element that reads files from a
disk. &GStreamer; includes this element under the name
<quote>filesrc</quote>. Next, we'll need something to parse the
file and decoder it into raw audio. &GStreamer; has two elements
for this: the first parses Ogg streams into elementary streams (video,
audio) and is called <quote>oggdemux</quote>. The second is a Vorbis
audio decoder, it's conveniently called <quote>vorbisdec</quote>.
Since <quote>oggdemux</quote> creates dynamic pads for each elementary
stream, you'll need to set a <quote>new-pad</quote> event handler
on the <quote>oggdemux</quote> element, like you've learned in
<xref linkend="section-pads-dynamic"/>, to link the Ogg parser and
the Vorbis decoder elements together. At last, we'll also need an
audio output element, we will use <quote>alsasink</quote>, which
outputs sound to an ALSA audio device.
</para>
<para>
The last thing left to do is to add all elements into a container
element, a <classname>GstPipeline</classname>, and iterate this
pipeline until we've played the whole song. We've previously
learned how to add elements to a container bin in <xref
linkend="chapter-bins"/>, and we've learned about element states
in <xref linkend="section-elements-states"/>. We will use the function
<function>gst_bin_sync_children_state ()</function> to synchronize
the state of a bin on all of its contained children.
</para>
<para>
Let's now add all the code together to get our very first audio
player:
We will create a simple first application, a complete MP3 player, using
standard <application>GStreamer</application> components. The player
will read from a file that is given as the first argument to the program.
</para>
<programlisting>
/* example-begin helloworld.c */
#include &lt;gst/gst.h&gt;
/*
* Global objects are usually a bad thing. For the purpose of this
* example, we will use them, however.
*/
GstElement *pipeline, *source, *parser, *decoder, *sink;
static void
new_pad (GstElement *element,
GstPad *pad,
gpointer data)
{
/* We can now link this pad with the audio decoder and
* add both decoder and audio output to the pipeline. */
gst_pad_link (pad, gst_element_get_pad (decoder, "sink"));
gst_bin_add_many (GST_BIN (pipeline), decoder, sink, NULL);
/* This function synchronizes a bins state on all of its
* contained children. */
gst_bin_sync_children_state (GST_BIN (pipeline));
}
int
main (int argc,
char *argv[])
main (int argc, char *argv[])
{
/* initialize GStreamer */
gst_init (&amp;argc, &amp;argv);
GstElement *pipeline, *filesrc, *decoder, *audiosink;
gst_init(&amp;argc, &amp;argv);
/* check input arguments */
if (argc != 2) {
g_print ("Usage: %s &lt;Ogg/Vorbis filename&gt;\n", argv[0]);
return -1;
g_print ("usage: %s &lt;mp3 filename&gt;\n", argv[0]);
exit (-1);
}
/* create elements */
pipeline = gst_pipeline_new ("audio-player");
source = gst_element_factory_make ("filesrc", "file-source");
parser = gst_element_factory_make ("oggdemux", "ogg-parser");
decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
sink = gst_element_factory_make ("alsasink", "alsa-output");
/* create a new pipeline to hold the elements */
pipeline = gst_pipeline_new ("pipeline");
/* set filename property on the file source */
/* create a disk reader */
filesrc = gst_element_factory_make ("filesrc", "disk_source");
g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
/* link together - note that we cannot link the parser and
* decoder yet, becuse the parser uses dynamic pads. For that,
* we set a new-pad signal handler. */
gst_element_link (source, parser);
gst_element_link (decoder, sink);
g_signal_connect (parser, "new-pad", G_CALLBACK (new_pad), NULL);
/* now it's time to get the decoder */
decoder = gst_element_factory_make ("mad", "decoder");
/* put all elements in a bin - or at least the ones we will use
* instantly. */
gst_bin_add_many (GST_BIN (pipeline), source, parser, NULL);
/* and an audio sink */
audiosink = gst_element_factory_make ("osssink", "play_audio");
/* Now set to playing and iterate. We will set the decoder and
* audio output to ready so they initialize their memory already.
* This will decrease the amount of time spent on linking these
* elements when the Ogg parser emits the new-pad signal. */
gst_element_set_state (decoder, GST_STATE_READY);
gst_element_set_state (sink, GST_STATE_READY);
/* add objects to the main pipeline */
gst_bin_add_many (GST_BIN (pipeline), filesrc, decoder, audiosink, NULL);
/* link src to sink */
gst_element_link_many (filesrc, decoder, audiosink, NULL);
/* start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* and now iterate - the rest will be automatic from here on.
* When the file is finished, gst_bin_iterate () will return
* FALSE, thereby terminating this loop. */
while (gst_bin_iterate (GST_BIN (pipeline))) ;
while (gst_bin_iterate (GST_BIN (pipeline)));
/* clean up nicely */
/* stop the pipeline */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
return 0;
/* we don't need a reference to these objects anymore */
gst_object_unref (GST_OBJECT (pipeline));
/* unreffing the pipeline unrefs the contained elements as well */
exit (0);
}
/* example-end helloworld.c */
</programlisting>
<!-- FIXME: this image needs updating -->
<para>
Let's go through this example step by step.
</para>
<para>
The first thing you have to do is to include the standard
<application>GStreamer</application> headers and
initialize the framework.
</para>
<programlisting>
#include &lt;gst/gst.h&gt;
...
int
main (int argc, char *argv[])
{
...
gst_init(&amp;argc, &amp;argv);
...
</programlisting>
<para>
We are going to create three elements and one pipeline. Since all
elements share the same base type, <ulink type="http"
url="../../gstreamer/html/GstElement.html"><classname>GstElement</classname></ulink>,
we can define them as:
</para>
<programlisting>
...
GstElement *pipeline, *filesrc, *decoder, *audiosink;
...
</programlisting>
<para>
Next, we are going to create an empty pipeline. As you have seen in
the basic introduction, this pipeline will hold and manage all the
elements we are going to pack into it.
</para>
<programlisting>
/* create a new pipeline to hold the elements */
pipeline = gst_pipeline_new ("pipeline");
</programlisting>
<para>
We use the standard constructor for a pipeline: gst_pipeline_new ().
</para>
<para>
We then create a disk source element. The disk source element is able to
read from a file. We use the standard GObject property mechanism to set
a property of the element: the file to read from.
</para>
<programlisting>
/* create a disk reader */
filesrc = gst_element_factory_make ("filesrc", "disk_source");
g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
</programlisting>
<note>
<para>
You can check if the filesrc != NULL to verify the creation of the
disk source element.
</para>
</note>
<para>
We now create the MP3 decoder element. This assumes that the 'mad' plugin
is installed on the system where this application is executed.
</para>
<programlisting>
/* now it's time to get the decoder */
decoder = gst_element_factory_make ("mad", "decoder");
</programlisting>
<para>
gst_element_factory_make() takes two arguments: a string that will
identify the element you need and a second argument: how you want
to name the element. The name of the element is something you can
choose yourself and might be used to retrieve the element from a
bin/pipeline.
</para>
<para>
Finally we create our audio sink element. This element will be able
to play back the audio using OSS.
</para>
<programlisting>
/* and an audio sink */
audiosink = gst_element_factory_make ("osssink", "play_audio");
</programlisting>
<para>
We then add the elements to the pipeline.
</para>
<programlisting>
/* add objects to the main pipeline */
gst_bin_add_many (GST_BIN (pipeline), filesrc, decoder, audiosink, NULL);
</programlisting>
<para>
We link the different pads of the elements together like this:
</para>
<programlisting>
/* link src to sink */
gst_element_link_many (filesrc, decoder, audiosink, NULL);
</programlisting>
<para>
We now have created a complete pipeline. We can visualise the
pipeline as follows:
@ -147,27 +185,76 @@ main (int argc,
<imagedata fileref="images/hello-world.&image;" format="&IMAGE;" />
</imageobject>
</mediaobject>
</figure>
<para>
Everything is now set up to start streaming. We use the following
statements to change the state of the pipeline:
</para>
<programlisting>
/* start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
</programlisting>
<note>
<para>
<application>GStreamer</application> will take care of the READY and PAUSED state for
you when going from NULL to PLAYING.
</para>
</note>
<para>
Since we do not use threads, nothing will happen yet. We have to
call gst_bin_iterate() to execute one iteration of the pipeline.
</para>
<programlisting>
while (gst_bin_iterate (GST_BIN (pipeline)));
</programlisting>
<para>
The gst_bin_iterate() function will return TRUE as long as something
interesting happened inside the pipeline. When the end-of-file has been
reached the _iterate function will return FALSE and we can end the loop.
</para>
<programlisting>
/* stop the pipeline */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
exit (0);
</programlisting>
<note>
<para>
Don't forget to set the state of the pipeline to NULL. This will free
all of the resources held by the elements.
</para>
</note>
</sect1>
<sect1 id="section-helloworld-compilerun">
<title>Compiling and Running helloworld.c</title>
<sect1 id="section-hello-world-compile">
<title>Compiling helloworld.c</title>
<para>
To compile the helloworld example, use: <command>gcc -Wall
$(pkg-config --cflags --libs gstreamer-&GST_MAJORMINOR;)
helloworld.c -o helloworld</command>. &GStreamer; makes use of
<command>pkg-config</command> to get compiler and linker flags
needed to compile this application. If you're running a
non-standard installation, make sure the
<classname>PKG_CONFIG_PATH</classname> environment variable is
set to the correct location (<filename>$libdir/pkgconfig</filename>).
To compile the helloworld example, use:
</para>
<programlisting>
gcc -Wall `pkg-config gstreamer-&GST_MAJORMINOR; --cflags --libs` helloworld.c \
-o helloworld
</programlisting>
<para>
We use pkg-config to get the compiler flags needed to compile
this application. Make sure to have your PKG_CONFIG_PATH environment
variable set to the correct location if you are building this
application against the uninstalled location.
</para>
<para>
You can run this example application with <command>./helloworld
file.ogg</command>. Substitute <filename>file.ogg</filename>
with your favourite Ogg/Vorbis file.
You can run the example with
(substitute helloworld.mp3 with you favorite MP3 file):
</para>
<programlisting>
./helloworld helloworld.mp3
</programlisting>
</sect1>
<sect1 id="section-hello-world-conclusion">
@ -175,23 +262,19 @@ main (int argc,
<para>
This concludes our first example. As you see, setting up a pipeline
is very low-level but powerful. You will see later in this manual how
you can create a more powerful media player with even less effort
using higher-level interfaces. We will discuss all that in <xref
linkend="part-highlevel"/>. We will first, however, go more in-depth
into more advanced &GStreamer; internals.
you can create a custom MP3 element with a higher-level API.
</para>
<para>
It should be clear from the example that we can very easily replace
the <quote>filesrc</quote> element with some other element that
reads data from a network, or some other data source element that
is better integrated with your desktop environment. Also, you can
use other decoders and parsers to support other media types. You
can use another audio sink if you're not running Linux, but Mac OS X,
Windows or FreeBSD, or you can instead use a filesink to write audio
files to disk instead of playing them back. By using an audio card
source, you can even do audio capture instead of playback. All this
shows the reusability of &GStreamer; elements, which is its greatest
advantage.
It should be clear from the example that we can very easily replace the
filesrc element with the gnomevfssrc element, giving you instant streaming
from any gnomevfs URL.
</para>
<para>
We can also choose to use another type of sink instead of the audiosink.
We could use a filesink to write the raw samples to a file, for example.
It should also be clear that inserting filters, like a stereo effect,
into the pipeline is not that hard to do. The most important thing is
that you can reuse already existing elements.
</para>
</sect1>
</chapter>

View file

@ -1,31 +1,31 @@
<chapter id="chapter-init">
<title>Initializing &GStreamer;</title>
<chapter id="chapter-initialisation">
<title>Initializing <application>GStreamer</application></title>
<para>
When writing a &GStreamer; application, you can simply include
<filename>gst/gst.h</filename> to get access to the library
functions. Besides that, you will also need to intialize the
&GStreamer; library.
When writing a <application>GStreamer</application> application, you can
simply include <filename class='headerfile'>gst/gst.h</filename> to get
access to the library functions.
</para>
<para>
Before the <application>GStreamer</application> libraries can be used,
<function>gst_init</function> has to be called from the main application.
This call will perform the necessary initialization of the library as
well as parse the GStreamer-specific command line options.
</para>
<para>
A typical program
&EXAFOOT;
would have code to initialize GStreamer that
looks like this:
</para>
<sect1 id="section-init-c">
<title>Simple initialization</title>
<para>
Before the &GStreamer; libraries can be used,
<function>gst_init</function> has to be called from the main
application. This call will perform the necessary initialization
of the library as well as parse the &GStreamer;-specific command
line options.
</para>
<para>
A typical program &EXAFOOT; would have code to initialize
&GStreamer; that looks like this:
</para>
<programlisting>
#include &lt;gst/gst.h&gt;
<![CDATA[
/* example-begin init.c */
#include <gst/gst.h>
int
main (int argc,
char *argv[])
main (int argc, char *argv[])
{
guint major, minor, micro;
@ -37,35 +37,35 @@ main (int argc,
return 0;
}
/* example-end init.c */
]]>
</programlisting>
<para>
Use the <symbol>GST_VERSION_MAJOR</symbol>,
<symbol>GST_VERSION_MINOR</symbol> and <symbol>GST_VERSION_MICRO</symbol>
macros to get the &GStreamer; version you are building against, or
use the function <function>gst_version</function> to get the version
your application is linked against. &GStreamer; currently uses a
scheme where versions with the same major and minor versions are
API-/ and ABI-compatible.
macros to get the <application>GStreamer</application> version you are
building against, or use the function <function>gst_version</function>
to get the version your application is linked against.
<!-- FIXME: include an automatically generated list of these options. -->
</para>
<para>
It is also possible to call the <function>gst_init</function> function
with two <symbol>NULL</symbol> arguments, in which case no command line
options will be parsed by <application>GStreamer</application>.
</para>
</sect1>
<sect1>
<title>The popt interface</title>
<para>
You can also use a popt table to initialize your own parameters as
shown in the next example:
You can also use a popt table to initialize your own parameters as shown in the
next example:
</para>
<programlisting>
/* example-begin popt.c */
#include &lt;gst/gst.h&gt;
int
main (int argc,
char *argv[])
main(int argc, char *argv[])
{
gboolean silent = FALSE;
gchar *savefile = NULL;
@ -83,6 +83,7 @@ main (int argc,
return 0;
}
/* example-end popt.c */
</programlisting>
<para>
As shown in this fragment, you can use a <ulink
@ -94,4 +95,5 @@ main (int argc,
<application>GStreamer</application> options.
</para>
</sect1>
</chapter>

View file

@ -1,534 +1,244 @@
<chapter id="chapter-pads" xreflabel="Pads and capabilities">
<title>Pads and capabilities</title>
<para>
As we have seen in <xref linkend="chapter-elements"/>, the pads are
the element's interface to the outside world. Data streams from one
element's source pad to another element's sink pad. The specific
type of media that the element can handle will be exposed by the
pad's capabilities. We will talk more on capabilities later in this
chapter (see <xref linkend="section-caps"/>).
</para>
<sect1 id="section-pads">
<chapter id="chapter-pads">
<title>Pads</title>
<para>
A pad type is defined by two properties: its direction and its
availability. As we've mentioned before, &GStreamer; defines two
pad directions: source pads and sink pads. This terminology is
defined from the view of within the element: elements receive data
on their sink pads and generate data on their source pads.
Schematically, sink pads are drawn on the left side of an element,
whereas source pads are drawn on the right side of an element. In
such graphs, data flows from left to right.
<footnote>
As we have seen in <xref linkend="chapter-elements"/>, the pads are the element's
interface to the outside world.
</para>
<para>
The specific type of media that the element can handle will be exposed by the pads.
The description of this media type is done with capabilities(see
<xref linkend="section-caps"/>)
</para>
<para>
Pads are either source or sink pads. The terminology is defined from the
view of the element itself: elements accept data on their sink pads, and
send data out on their source pads. Sink pads are drawn on the left,
while source pads are drawn on the right of an element. In general,
data flows from left to right in the graph.<footnote>
<para>
In reality, there is no objection to data flowing from a
source pad to the sink pad of an element upstream (to the
left of this element in drawings). Data will, however, always
flow from a source pad of one element to the sink pad of
source pad to the sink pad of an element upstream. Data will, however,
always flow from a source pad of one element to the sink pad of
another.
</para>
</footnote>
</para></footnote>
</para>
<para>
Pad directions are very simple compared to pad availability. A pad
can have any of three availabilities: always, sometimes and on
request. The meaning of those three types is exactly as it says:
always pads always exist, sometimes pad exist only in certain
cases (and can disappear randomly), and on-request pads appear
only if explicitely requested by applications.
</para>
<sect1 id="section-pads-type">
<title>Types of pad</title>
<sect2 id="section-pads-dynamic">
<title>Dynamic (or sometimes) pads</title>
<title>Dynamic pads</title>
<para>
Some elements might not have all of their pads when the element is
created. This can happen, for example, with an Ogg demuxer element.
The element will read the Ogg stream and create dynamic pads for
each contained elementary stream (vorbis, theora) when it detects
such a stream in the Ogg stream. Likewise, it will delete the pad
when the stream ends. This principle is very useful for demuxer
elements, for example.
created. This
can happen, for example, with an MPEG system demultiplexer. The
demultiplexer will create its pads at runtime when it detects the
different elementary streams in the MPEG system stream.
</para>
<para>
Running <application>gst-inspect oggdemux</application> will show
that the element has only one pad: a sink pad called 'sink'. The
other pads are <quote>dormant</quote>. You can see this in the pad
template because there is an <quote>Exists: Sometimes</quote>
property. Depending on the type of Ogg file you play, the pads will
be created. We will see that this is very important when you are
going to create dynamic pipelines. You can attach a signal handler
to an element to inform you when the element has created a new pad
from one of its <quote>sometimes</quote> pad templates. The
following piece of code is an example of how to do this:
Running <application>gst-inspect mpegdemux</application> will show that
the element has only one pad: a sink pad called 'sink'. The other pads are
"dormant". You can see this in the pad template because there is
an 'Exists: Sometimes'
property. Depending on the type of MPEG file you play, the pads will
be created. We
will see that this is very important when you are going to create dynamic
pipelines later on in this manual.
</para>
<programlisting>
static void
cb_new_pad (GstElement *element,
GstPad *pad,
gpointer data)
{
g_print ("A new pad %s was created\n", gst_pad_get_name (pad));
/* here, you would setup a new pad link for the newly created pad */
[..]
}
int
main(int argc, char *argv[])
{
GstElement *pipeline, *source, *demux;
/* init */
gst_init (&amp;argc, &amp;argv);
/* create elements */
pipeline = gst_pipeline_new ("my_pipeline");
source = gst_element_factory_make ("filesrc", "source");
g_object_set (source, "location", argv[1], NULL);
demux = gst_element_factory_make ("oggdemux", "demuxer");
/* you would normally check that the elements were created properly */
/* put together a pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, demux, NULL);
gst_element_link (source, demux);
/* listen for newly created pads */
g_signal_connect (demux, "new-pad", G_CALLBACK (cb_new_pad), NULL);
/* start the pipeline */
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
while (gst_bin_iterate (GST_BIN (pipeline)));
[..]
}
</programlisting>
</sect2>
<sect2 id="section-pads-request">
<title>Request pads</title>
<para>
An element can also have request pads. These pads are not created
automatically but are only created on demand. This is very useful
for multiplexers, aggregators and tee elements. Aggregators are
elements that merge the content of several input streams together
into one output stream. Tee elements are the reverse: they are
elements that have one input stream and copy this stream to each
of their output pads, which are created on request. Whenever an
application needs another copy of the stream, it can simply request
a new output pad from the tee element.
for multiplexers, aggregators and tee elements.
</para>
<para>
The following piece of code shows how you can request a new output
pad from a <quote>tee</quote> element:
The tee element, for example, has one input pad and a request padtemplate for the
output pads. Whenever an element wants to get an output pad from the tee element, it
has to request the pad.
</para>
<programlisting>
static void
some_function (GstElement *tee)
{
GstPad * pad;
pad = gst_element_get_request_pad (tee, "src%d");
g_print ("A new pad %s was created\n", gst_pad_get_name (pad));
/* here, you would link the pad */
[..]
}
</programlisting>
<para>
The <function>gst_element_get_request_pad ()</function> method
can be used to get a pad from the element based on the name of
the pad template. It is also possible to request a pad that is
compatible with another pad template. This is very useful if
you want to link an element to a multiplexer element and you
need to request a pad that is compatible. The method
<function>gst_element_get_compatible_pad ()</function> can be
used to request a compatible pad, as shown in the next example.
It will request a compatible pad from an Ogg multiplexer from
any input.
</para>
<programlisting>
static void
link_to_multiplexer (GstPad *tolink_pad,
GstElement *mux)
{
GstPad *pad;
pad = gst_element_get_compatible_pad (mux, tolink_pad);
gst_pad_link (tolinkpad, pad);
g_print ("A new pad %s was created and linked to %s\n",
gst_pad_get_name (pad), gst_pad_get_name (tolink_pad));
}
</programlisting>
</sect2>
</sect1>
<sect1 id="section-caps">
<title>Capabilities of a pad</title>
<para>
Since the pads play a very important role in how the element is
viewed by the outside world, a mechanism is implemented to describe
the data that can flow or currently flows through the pad by using
capabilities. Here,w e will briefly describe what capabilities are
and how to use them, enough to get an understanding of the concept.
For an in-depth look into capabilities and a list of all capabilities
defined in &GStreamer;, see the <ulink type="http"
url="http://gstreamer.freedesktop.org/data/doc/gstreamer/head/pwg/html/index.html">Plugin
Writers Guide</ulink>.
Since the pads play a very important role in how the element is viewed by the
outside world, a mechanism is implemented to describe the data that can
flow through the pad by using capabilities.
</para>
<para>
Capabilities are attached to pad templates and to pads. For pad
templates, it will describe the types of media that may stream
over a pad created from this template. For pads, it can either
be a list of possible caps (usually a copy of the pad template's
capabilities), in which case the pad is not yet negotiated, or it
is the type of media that currently streams over this pad, in
which case the pad has been negotiated already.
We will briefly describe what capabilities are, enough for you to get a basic understanding
of the concepts. You will find more information on how to create capabilities in the
Plugin Writer's Guide.
</para>
<sect2 id="section-caps-structure">
<title>Dissecting capabilities</title>
<sect2 id="section-pads-caps">
<title>Capabilities</title>
<para>
A pads capabilities are described in a <classname>GstCaps</classname>
object. Internally, a <ulink type="http"
url="../../gstreamer/html/gstreamer-GstCaps.html"><classname>GstCaps</classname></ulink>
will contain one or more <ulink type="http"
url="../../gstreamer/html/gstreamer-GstStructure.html"><classname>GstStructure</classname></ulink>
that will describe one media type. A negotiated pad will have
capabilities set that contain exactly <emphasis>one</emphasis>
structure. Also, this structure will contain only
<emphasis>fixed</emphasis> values. These constraints are not
true for unnegotiated pads or pad templates.
Capabilities are attached to a pad in order to describe
what type of media the pad can handle.
</para>
<para>
As an example, below is a dump of the capabilities of the
<quote>vorbisdec</quote> element, which you will get by running
<command>gst-inspect vorbisdec</command>. You will see two pads:
a source and a sink pad. Both of these pads are always available,
and both have capabilities attached to them. The sink pad will
accept vorbis-encoded audio data, with the mime-type
<quote>audio/x-vorbis</quote>. The source pad will be used
to send raw (decoded) audio samples to the next element, with
a raw audio mime-type (either <quote>audio/x-raw-int</quote> or
<quote>audio/x-raw-float</quote>). The source pad will also
contain properties for the audio samplerate and the amount of
channels, plus some more that you don't need to worry about
for now.
Capabilities is shorthand for "capability chain". A capability chain
is a chain of one capability or more.
</para>
<para>
The basic entity is a capability, and is defined by a name, a MIME
type and a set of properties. A capability can be chained to
another capability, which is why we commonly refer to a chain of
capability entities as "capabilities".
<footnote>
<para>
It is important to understand that the term "capabilities" refers
to a chain of one capability or more. This will be clearer when
you see the structure definition of a <ulink type="http"
url="../../gstreamer/html/gstreamer-GstCaps.html"><classname>GstCaps
</classname></ulink>element.
</para>
</footnote>
</para>
<para>
Below is a dump of the capabilities of the element mad, as shown by
<command>gst-inspect</command>.
You can see two pads: sink and src. Both pads have capability information attached to them.
</para>
<para>
The sink pad (input pad) is called 'sink' and takes data of MIME type 'audio/mp3'. It also has
three properties: layer, bitrate and framed.
</para>
<para>
The source pad (output pad) is called 'src' and outputs data of
MIME type 'audio/raw'. It also has four properties: format, depth,
rate and channels.
</para>
<programlisting>
Pad Templates:
SRC template: 'src'
Availability: Always
Capabilities:
audio/x-raw-float
rate: [ 8000, 50000 ]
channels: [ 1, 2 ]
endianness: 1234
width: 32
buffer-frames: 0
Pads:
SINK template: 'sink'
Availability: Always
Capabilities:
audio/x-vorbis
'mad_sink':
MIME type: 'audio/mp3':
SRC template: 'src'
Availability: Always
Capabilities:
'mad_src':
MIME type: 'audio/raw':
format: String: int
endianness: Integer: 1234
width: Integer: 16
depth: Integer: 16
channels: Integer range: 1 - 2
law: Integer: 0
signed: Boolean: TRUE
rate: Integer range: 11025 - 48000
</programlisting>
</sect2>
<sect2 id="section-caps-props">
<title>Properties and values</title>
<sect2 id="section-pads-props">
<title>What are properties ?</title>
<para>
Properties are used to describe extra information for
capabilities. A property consists of a key (a string) and
a value. There are different possible value types that can be used:
</para>
<itemizedlist>
<listitem>
<para>
Basic types, this can be pretty much any
<classname>GType</classname> registered with Glib. Those
properties indicate a specific, non-dynamic value for this
property. Examples include:
</para>
<itemizedlist>
<listitem>
<para>
An integer value (<classname>G_TYPE_INT</classname>):
the property has this exact value.
</para>
</listitem>
<listitem>
<para>
A boolean value (<classname>G_TYPE_BOOLEAN</classname>):
the property is either TRUE or FALSE.
</para>
</listitem>
<listitem>
<para>
A float value (<classname>G_TYPE_FLOAT</classname>):
the property has this exact floating point value.
</para>
</listitem>
<listitem>
<para>
A string value (<classname>G_TYPE_STRING</classname>):
the property contains a UTF-8 string.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
Range types are <classname>GType</classname>s registered by
&GStreamer; to indicate a range of possible values. They are
used for indicating allowed audio samplerate values or
supported video sizes. The two types defined in &GStreamer;
are:
</para>
<itemizedlist>
<listitem>
<para>
An integer range value
(<classname>GST_TYPE_INT_RANGE</classname>): the property
denotes a range of possible integers, with a lower and an
upper boundary. The <quote>vorbisdec</quote> element, for
example, has a rate property that can be between 8000 and
50000.
</para>
</listitem>
<listitem>
<para>
A float range value
(<classname>GST_TYPE_FLOAT_RANGE</classname>): the property
denotes a range of possible floating point values, with a
lower and an upper boundary.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
A list value (<classname>GST_TYPE_LIST</classname>): the
property can take any value from a list of basic values
given in this list.
</para>
</listitem>
</itemizedlist>
</sect2>
</sect1>
<sect1 id="section-caps-api">
<itemizedlist>
<listitem>
<para>
basic types:
</para>
<itemizedlist>
<listitem>
<para>
an integer value: the property has this exact value.
</para>
</listitem>
<listitem>
<para>
a boolean value: the property is either TRUE or FALSE.
</para>
</listitem>
<listitem>
<para>
a fourcc value: this is a value that is commonly used to
describe an encoding for video,
as used for example by the AVI specification.
<footnote><para>
fourcc values consist of four bytes.
<ulink url="http://www.fourcc.org" type="http">The FOURCC
Definition List</ulink> is the most complete resource
on the allowed fourcc values.
</para></footnote>
</para>
</listitem>
<listitem>
<para>
a float value: the property has this exact floating point value.
</para>
</listitem>
<listitem>
<para>
a string value.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
range types:
</para>
<itemizedlist>
<listitem>
<para>
an integer range value: the property denotes a range of
possible integers. For example, the wavparse element has
a source pad where the "rate" property can go from 8000 to
48000.
</para>
</listitem>
<listitem>
<para>
a float range value: the property denotes a range of possible
floating point values.
</para>
</listitem>
</itemizedlist>
</listitem>
<listitem>
<para>
a list value: the property can take any value from a list of
basic value types or range types.
</para>
</listitem>
</itemizedlist>
</sect2>
<sect2 id="section-pads-caps-use">
<title>What capabilities are used for</title>
<para>
Capabilities describe the type of data that is streamed between
two pads, or that one pad (template) supports. This makes them
very useful for various purposes:
Capabilities describe in great detail the type of media that is handled by the pads.
They are mostly used for:
</para>
<itemizedlist>
<listitem>
<para>
Autoplugging: automatically finding elements to link to a
pad based on its capabilities. All autopluggers use this
method.
Autoplugging: automatically finding plugins for a set of capabilities
</para>
</listitem>
<listitem>
<para>
Compatibility detection: when two pads are linked, &GStreamer;
can verify if the two pads are talking about the same media
type. The process of linking two pads and checking if they
are compatible is called <quote>caps negotiation</quote>.
</para>
</listitem>
<listitem>
<para>
Metadata: by reading the capabilities from a pad, applications
can provide information about the type of media that is being
streamed over the pad, which is information about the stream
thatis currently being played back.
</para>
</listitem>
<listitem>
<para>
Filtering: an application can use capabilities to limit the
possible media types that can stream between two pads to a
specific subset of their supported stream types. An application
can, for example, use <quote>filtered caps</quote> to set a
specific (non-fixed) video size that will stream between two
pads.
Compatibility detection: when two pads are linked, <application>GStreamer</application>
can verify if the two pads are talking about the same media types.
The process of linking two pads and checking if they are compatible
is called "caps negotiation".
</para>
</listitem>
</itemizedlist>
<sect2 id="section-caps-metadata">
<title>Using capabilities for metadata</title>
<para>
A pad can have a set (i.e. one or more) of capabilities attached
to it. You can get values of properties in a set of capabilities
by querying individual properties of one structure. You can get
a structure from a caps using
<function>gst_caps_get_structure ()</function>:
</para>
<programlisting>
static void
read_video_props (GstCaps *caps)
{
gint width, height;
const GstStructure *str;
str = gst_caps_get_structure (caps);
if (!gst_structure_get_int (str, "width", &amp;width) ||
!gst_structure_get_int (str, "height", &amp;height)) {
g_print ("No width/height available\n");
return;
}
g_print ("The video size of this set of capabilities is %dx%d\n",
width, height);
}
</programlisting>
</sect2>
<sect2 id="section-caps-filter">
<title>Creating capabilities for filtering</title>
<para>
While capabilities are mainly used inside a plugin to describe the
media type of the pads, the application programmer also has to have
basic understanding of capabilities in order to interface with the
plugins, especially when using filtered caps. When you're using
filtered caps or fixation, you're limiting the allowed types of
media that can stream between two pads to a subset of their supported
media types. You do this by filtering using your own set of
capabilities. In order to do this, you need to create your own
<classname>GstCaps</classname>. The simplest way to do this is by
using the convenience function <function>gst_caps_new_simple
()</function>:
</para>
<programlisting>
static void
link_pads_with_filter (GstPad *one,
GstPad *other)
{
GstCaps *caps;
caps = gst_caps_new_simple ("video/x-raw-yuv",
"width", G_TYPE_INT, 384,
"height", G_TYPE_INT, 288,
"framerate", G_TYPE_DOUBLE, 25.,
NULL);
gst_pad_link_filtered (one, other, caps);
}
</programlisting>
<para>
In some cases, you will want to create a more elaborate set of
capabilities to filter a link between two pads. Then, this function
is too simplistic and you'll want to use the method
<function>gst_caps_new_full ()</function>:
</para>
<programlisting>
static void
link_pads_with_filter (GstPad *one,
GstPad *other)
{
GstCaps *caps;
caps = gst_caps_new_full (
gst_structure_new ("video/x-raw-yuv",
"width", G_TYPE_INT, 384,
"height", G_TYPE_INT, 288,
"framerate", G_TYPE_DOUBLE, 25.,
NULL),
gst_structure_new ("video/x-raw-rgb",
"width", G_TYPE_INT, 384,
"height", G_TYPE_INT, 288,
"framerate", G_TYPE_DOUBLE, 25.,
NULL),
NULL);
gst_pad_link_filtered (one, other, caps);
}
</programlisting>
<para>
See the API references for the full API of
<classname>GstStructure</classname> and
<classname>GstCaps</classname>.
</para>
</sect2>
</sect1>
<sect1 id="section-pads-ghost">
<title>Ghost pads</title>
<para>
You can see from <xref linkend="section-bin-noghost-img"/> how a bin
has no pads of its own. This is where "ghost pads" come into play.
</para>
<figure float="1" id="section-bin-noghost-img">
<title>Visualisation of a <ulink type="http"
url="../../gstreamer/html/GstBin.html"><classname>GstBin</classname></ulink>
element without ghost pads</title>
<mediaobject>
<imageobject>
<imagedata fileref="images/bin-element-noghost.&image;"
format="&IMAGE;"/>
</imageobject>
</mediaobject>
</figure>
<para>
A ghost pad is a pad from some element in the bin that can be
accessed directly from the bin as well. Compare it to a symbolic
link in UNIX filesystems. Using ghost pads on bins, the bin also
has a pad and can transparently be used as an element in other
parts of your code.
</para>
<figure float="1" id="section-bin-ghost-img">
<title>Visualisation of a <ulink type="http"
url="../../gstreamer/html/GstBin.html"><classname>GstBin</classname></ulink>
element with a ghost pad</title>
<mediaobject>
<imageobject>
<imagedata fileref="images/bin-element-ghost.&image;"
format="&IMAGE;"/>
</imageobject>
</mediaobject>
</figure>
<para>
<xref linkend="section-bin-ghost-img"/> is a representation of a
ghost pad. The sink pad of element one is now also a pad of the bin.
Obviously, ghost pads can be added to any type of elements, not just
to a <classname>GstBin</classname>.
</para>
<para>
A ghostpad is created using the function
<function>gst_element_add_ghost_pad ()</function>:
</para>
<programlisting>
int
main (int argc,
char *argv[])
{
GstElement *bin, *sink;
/* init */
gst_init (&amp;argc, &amp;argv);
/* create element, add to bin, add ghostpad */
sink = gst_element_factory_make ("fakesink", "sink");
bin = gst_bin_new ("mybin");
gst_bin_add (GST_BIN (bin), sink);
gst_element_add_ghost_pad (bin,
gst_element_get_pad (sink, "sink"), "sink");
[..]
}
</programlisting>
<para>
In the above example, the bin now also has a pad: the pad called
<quote>sink</quote> of the given element. The bin can, from here
on, be used as a substitute for the sink element. You could, for
example, link another element to the bin.
</para>
</sect1>
</chapter>

View file

@ -28,57 +28,4 @@
</para>
</listitem>
</itemizedlist>
<para>
All plugins should implement one function, <function>plugin_init</function>,
that creates all the element factories and registers all the type
definitions contained in the plugin.
Without this function, a plugin cannot be registered.
</para>
<para>
The plugins are maintained in the plugin system. Optionally, the
type definitions and the element factories can be saved into an XML
representation so that the plugin system does not have to load all
available plugins in order to know their definition.
</para>
<para>
The basic plugin structure has the following fields:
</para>
<programlisting>
typedef struct _GstPlugin GstPlugin;
struct _GstPlugin {
gchar *name; /* name of the plugin */
gchar *longname; /* long name of plugin */
gchar *filename; /* filename it came from */
GList *types; /* list of types provided */
gint numtypes;
GList *elements; /* list of elements provided */
gint numelements;
GList *autopluggers; /* list of autopluggers provided */
gint numautopluggers;
gboolean loaded; /* if the plugin is in memory */
};
</programlisting>
<para>
You can query a <classname>GList</classname> of available plugins with the
function <function>gst_plugin_get_list</function> as this example shows:
</para>
<programlisting>
GList *plugins;
plugins = gst_plugin_get_list ();
while (plugins) {
GstPlugin *plugin = (GstPlugin *)plugins-&gt;data;
g_print ("plugin: %s\n", gst_plugin_get_name (plugin));
plugins = g_list_next (plugins);
}
</programlisting>
</chapter>

View file

@ -2,267 +2,14 @@
<title>Components</title>
<para>
&GStreamer; includes several higher-level components to simplify your
applications life. All of the components discussed here (for now) are
targetted at media playback. The idea of each of these components is
to integrate as closely as possible with a &GStreamer; pipeline, but
to hide the complexity of media type detection and several other
rather complex topics that have been discussed in <xref
linkend="part-advanced"/>.
FIXME: This chapter is way out of date.
</para>
<para>
We currently recommend people to use either playbin (see <xref
linkend="section-components-playbin"/>) or decodebin (see <xref
linkend="section-components-decodebin"/>), depending on their needs. The
other components discussed here are either outdated or deprecated. The
documentation is provided for legacy purposes. Use of those other
components is not recommended.
<application>GStreamer</application> includes components that people can include
in their programs.
</para>
<sect1 id="section-components-playbin">
<title>Playbin</title>
<para>
Playbin is an element that can be created using the standard &GStreamer;
API (e.g. <function>gst_element_factory_make ()</function>). The factory
is conveniently called <quote>playbin</quote>. By being a
<classname>GstElement</classname>, playbin automatically supports all
of the features of this class, including error handling, tag support,
state handling, getting stream positions, seeking, and so on.
</para>
<para>
Setting up a playbin pipeline is as simple as creating an instance of
the playbin element, setting a file location (this has to be a valid
URI, so <quote>&lt;protocol&gt;://&lt;location&gt;</quote>, e.g.
file:///tmp/my.ogg or http://www.example.org/stream.ogg) using the
<quote>uri</quote> property on playbin, and then setting the element
to the <classname>GST_STATE_PLAYING</classname> state. Internally,
playbin uses threads, so there's no need to iterate the element or
anything. However, one thing to keep in mind is that signals fired
by playbin might come from another than the main thread, so be sure
to keep this in mind in your signal handles. Most application
programmers will want to use a function such as <function>g_idle_add
()</function> to make sure that the signal is handled in the main
thread.
</para>
<programlisting>
#include &lt;gst/gst.h&gt;
static void
cb_eos (GstElement *play,
gpointer data)
{
gst_main_quit ();
}
static void
cb_error (GstElement *play,
GstElement *src,
GError *err,
gchar *debug,
gpointer data)
{
g_print ("Error: %s\n", err->message);
}
gint
main (gint argc,
gchar *argv[])
{
GstElement *play;
/* init GStreamer */
gst_init (&amp;argc, &amp;argv);
/* make sure we have a URI */
if (argc != 2) {
g_print ("Usage: %s &lt;URI&gt;\n", argv[0]);
return -1;
}
/* set up */
play = gst_element_factory_make ("playbin", "play);
g_object_set (G_OBJECT (play), "uri", argv[1], NULL);
g_signal_connect (play, "eos", G_CALLBACK (cb_eos), NULL);
g_signal_connect (play, "error", G_CALLBACK (cb_error), NULL);
if (gst_element_set_state (play, GST_STATE_PLAYING) != GST_STATE_SUCCESS) {
g_print ("Failed to play\n");
return -1;
}
/* now run */
gst_main ();
/* also clean up */
gst_element_set_state (play, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (play));
return 0;
}
</programlisting>
<para>
Playbin has several features that have been discussed previously:
</para>
<itemizedlist>
<listitem>
<para>
Settable video and audio output (using the <quote>video-sink</quote>
and <quote>audio-sink</quote> properties).
</para>
</listitem>
<listitem>
<para>
Mostly controllable and trackable as a
<classname>GstElement</classname>, including error handling, eos
handling, tag handling, state handling, media position handling and
seeking.
</para>
</listitem>
<listitem>
<para>
Buffers network-sources.
</para>
</listitem>
<listitem>
<para>
Supports visualizations for audio-only media.
</para>
</listitem>
</itemizedlist>
</sect1>
<sect1 id="section-components-decodebin">
<title>Decodebin</title>
<para>
Decodebin is the actual autoplugger backend of playbin, which was
discussed in the previous section. Decodebin will, in short, accept
input from a source that is linked to its sinkpad and will try to
detect the media type contained in the stream, and set up decoder
routines for each of those. It will automatically select decoders.
For each decoded stream, it will emit the <quote>new-decoded-pad</quote>
signal, to let the client know about the newly found decoded stream.
For unknown streams (which might be the whole stream), it will emit
the <quote>unknown-type</quote> signal. The application is then
responsible for reporting the error to the user.
</para>
<para>
The example code below will play back an audio stream of an input
file. For readability, it does not include any error handling of
any sort.
</para>
<programlisting>
#include &lt;gst/gst.h&gt;
GstElement *pipeline, *audio;
GstPad *audiopad;
static void
cb_newpad (GstElement *decodebin,
GstPad *pad,
gboolean last,
gpointer data)
{
GstCaps *caps;
GstStructure *str;
/* only link audio; only link once */
if (GST_PAD_IS_LINKED (audiopad))
return;
caps = gst_pad_get_caps (pad);
str = gst_caps_get_structure (caps, 0);
if (!strstr (gst_structure_get_name (str), "audio"))
return;
/* link'n'play */
gst_pad_link (pad, audiopad);
gst_bin_add (GST_BIN (pipeline), audio);
gst_bin_sync_children_state (GST_BIN (pipeline));
}
gint
main (gint argc,
gchar *argv[])
{
GstElement *src, *dec, *conv, *scale, *sink;
/* init GStreamer */
gst_init (&amp;argc, &amp;argv);
/* make sure we have input */
if (argc != 2) {
g_print ("Usage: %s &lt;filename&gt;\n", argv[0]);
return -1;
}
/* setup */
pipeline = gst_pipeline_new ("pipeline");
src = gst_element_factory_make ("filesrc", "source");
g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
dec = gst_element_factory_make ("decodebin", "decoder");
g_signal_connect (dec, "new-decoded-pad", G_CALLBACK (cb_newpad), NULL);
audio = gst_bin_new ("audiobin");
conv = gst_element_factory_make ("audioconvert", "aconv");
audiopad = gst_element_get_pad (conv, "sink");
scale = gst_element_factory_make ("audioscale", "scale");
sink = gst_element_factory_make ("alsasink", "sink");
gst_bin_add_many (GST_BIN (audio), conv, scale, sink, NULL);
gst_element_link_many (conv, scale, sink);
gst_bin_add_many (GST_BIN (pipeline), src, dec, NULL);
gst_element_link (src, dec);
/* run */
gst_element_set_state (audio, GST_STATE_PAUSED);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
while (gst_bin_iterate (GST_BIN (pipeline))) ;
/* cleanup */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (pipeline));
return 0;
}
</programlisting>
<para>
Although decodebin is a good autoplugger, there's a whole lot of
things that it does not do and is not intended to do:
</para>
<itemizedlist>
<listitem>
<para>
Taking care of input streams with a known media type (e.g. a DVD,
an audio-CD or such).
</para>
</listitem>
<listitem>
<para>
Selection of streams (e.g. which audio track to play in case of
multi-language media streams).
</para>
</listitem>
<listitem>
<para>
Overlaying subtitles over a decoded video stream.
</para>
</listitem>
</itemizedlist>
</sect1>
<sect1 id="section-components-spider">
<title>Spider</title>
<para>
Bla
</para>
</sect1>
<sect1 id="section-components-gst-play">
<title>GstPlay</title>
<para>
@ -271,6 +18,14 @@ main (gint argc,
</sect1>
<sect1 id="section-components-gst-media-play">
<title>GstMediaPlay</title>
<para>
GstMediaPlay is a complete player widget.
</para>
</sect1>
<sect1 id="section-components-gst-editor">
<title>GstEditor</title>
<para>

View file

@ -1,18 +1,12 @@
<chapter id="chapter-motivation">
<title>Motivation &amp; Goals</title>
<title>Motivation</title>
<para>
Linux has historically lagged behind other operating systems in the
multimedia arena. Microsoft's <trademark>Windows</trademark> and
Apple's <trademark>MacOS</trademark> both have strong support for
multimedia devices, multimedia content creation, playback, and
realtime processing. Linux, on the other hand, has a poorly integrated
collection of multimedia utilities and applications available, which
can hardly compete with the professional level of software available
for MS Windows and MacOS.
</para>
<para>
GStreamer was designed to provide a solution to the current Linux media
problems.
Linux has historically lagged behind other operating systems in the multimedia
arena. Microsoft's <trademark>Windows</trademark> and Apple's <trademark>MacOS</trademark> both have strong support
for multimedia devices, multimedia content creation,
playback, and realtime processing. Linux, on the other hand, has a poorly integrated
collection of multimedia utilities and applications available, which can hardly compete
with the professional level of software available for MS Windows and MacOS.
</para>
<sect1 id="section-motivation-problems">
@ -23,41 +17,37 @@
<sect2 id="section-motivation-duplicate">
<title>Multitude of duplicate code</title>
<para>
The Linux user who wishes to hear a sound file must hunt through
their collection of sound file players in order to play the tens
of sound file formats in wide use today. Most of these players
basically reimplement the same code over and over again.
The Linux user who wishes to hear a sound file must hunt through their collection of
sound file players in order to play the tens of sound file formats in wide use today.
Most of these players basically reimplement the same code over and over again.
</para>
<para>
The Linux developer who wishes to embed a video clip in their
application must use crude hacks to run an external video player.
There is no library available that a developer can use to create
a custom media player.
The Linux developer who wishes to embed a video clip in their application must use
crude hacks to run an external video player. There is no library available that a
developer can use to create a custom media player.
</para>
</sect2>
</sect2>
<sect2 id="section-motivation-goal">
<title>'One goal' media players/libraries</title>
<para>
Your typical MPEG player was designed to play MPEG video and audio.
Most of these players have implemented a complete infrastructure
focused on achieving their only goal: playback. No provisions were
made to add filters or special effects to the video or audio data.
Your typical MPEG player was designed to play MPEG video and audio. Most of
these players have implemented a complete infrastructure focused on
achieving their only goal: playback. No provisions were made to add
filters or special effects to the video or audio data.
</para>
<para>
If you want to convert an MPEG-2 video stream into an AVI file,
your best option would be to take all of the MPEG-2 decoding
algorithms out of the player and duplicate them into your own
AVI encoder. These algorithms cannot easily be shared across
applications.
If you want to convert an MPEG2 video stream into an AVI file, your best
option would be to take all of the MPEG2 decoding algorithms out
of the player and duplicate them into your own AVI encoder. These
algorithms cannot easily be shared across applications.
</para>
<para>
Attempts have been made to create libraries for handling various
media types. Because they focus on a very specific media type
(avifile, libmpeg2, ...), significant work is needed to integrate
them due to a lack of a common API. &GStreamer; allows you to
wrap these libraries with a common API, which significantly
simplifies integration and reuse.
Attempts have been made to create libraries for handling various media types.
Because they focus on a very specific media type (avifile, libmpeg2, ...),
significant work is needed to integrate them due to a lack of a common API.
GStreamer allows you to wrap these libraries with a common API, which
significantly simplifies integration and reuse.
</para>
</sect2>
@ -66,9 +56,9 @@
<para>
Your typical media player might have a plugin for different media
types. Two media players will typically implement their own plugin
mechanism so that the codecs cannot be easily exchanged. The plugin
system of the typical media player is also very tailored to the
specific needs of the application.
mechanism so that the codecs cannot be easily exchanged. The plugin system
of the typical media player is also very tailored to the specific needs
of the application.
</para>
<para>
The lack of a unified plugin mechanism also seriously hinders the
@ -76,27 +66,14 @@
code to all the different plugin mechanisms.
</para>
<para>
While &GStreamer; also uses it own plugin system it offers a very rich
While GStreamer also uses it own plugin system it offers a very rich
framework for the plugin developper and ensures the plugin can be used
in a wide range of applications, transparently interacting with other
plugins. The framework that &GStreamer; provides for the plugins is
plugins. The framework that GStreamer provides for the plugins is
flexible enough to host even the most demanding plugins.
</para>
</sect2>
<sect2 id="section-motivation-experience">
<title>Poor user experience</title>
<para>
Because of the problems mentioned above, application authors have
so far often been urged to spend a considerable amount of time in
writing their own backends, plugin mechanisms and so on. The result
has often been, unfortunately, that both the backend as well as the
user interface were only half-finished. Demotivated, the application
authors would start rewriting the whole thing and complete the circle.
This leads to a <emphasis>poor end user experience</emphasis>.
</para>
</sect2>
<sect2 id="section-motivation-network">
<title>Provision for network transparency</title>
<para>
@ -110,12 +87,12 @@
type="http">GNOME object embedding using Bonobo</ulink>.
</para>
<para>
The &GStreamer; core does not use network transparent technologies
at the lowest level as it only adds overhead for the local case.
The GStreamer core does not use network transparent technologies at the
lowest level as it only adds overhead for the local case.
That said, it shouldn't be hard to create a wrapper around the
core components. There are tcp plugins now that implement a
&GStreamer; Data Protocol that allows pipelines to be slit over
TCP. These are located in the gst-plugins module directory gst/tcp.
core components. There are tcp plugins now that implement a GStreamer
Data Protocol that allows pipelines to be slit over TCP. These are
located in the gst-plugins module directory gst/tcp.
</para>
</sect2>
@ -131,172 +108,4 @@
</para>
</sect2>
</sect1>
<sect1 id="section-goals-design">
<title>The design goals</title>
<para>
We describe what we try to achieve with &GStreamer;.
</para>
<sect2 id="section-goals-clean">
<title>Clean and powerful</title>
<para>
&GStreamer; wants to provide a clean interface to:
</para>
<itemizedlist>
<listitem>
<para>
The application programmer who wants to build a media pipeline.
The programmer can use an extensive set of powerful tools to create
media pipelines without writing a single line of code. Performing
complex media manipulations becomes very easy.
</para>
</listitem>
<listitem>
<para>
The plugin programmer. Plugin programmers are provided a clean and
simple API to create self contained plugins. An extensive debugging
and tracing mechanism has been integrated. GStreamer also comes with
an extensive set of real-life plugins that serve as examples too.
</para>
</listitem>
</itemizedlist>
</sect2>
<sect2 id="section-goals-object">
<title>Object oriented</title>
<para>
&GStreamer; adheres to the GLib 2.0 object model. A programmer
familiar with GLib 2.0 or older versions of GTK+ will be
comfortable with &GStreamer;.
</para>
<para>
&GStreamer; uses the mechanism of signals and object properties.
</para>
<para>
All objects can be queried at runtime for their various properties and
capabilities.
</para>
<para>
&GStreamer; intends to be similar in programming methodology to GTK+.
This applies to the object model, ownership of objects, reference
counting, ...
</para>
</sect2>
<sect2 id="section-goals-extensible">
<title>Extensible</title>
<para>
All &GStreamer; Objects can be extended using the GObject
inheritance methods.
</para>
<para>
All plugins are loaded dynamically and can be extended and upgraded
independently.
</para>
</sect2>
<sect2 id="section-goals-binary">
<title>Allow binary only plugins</title>
<para>
Plugins are shared libraries that are loaded at runtime. Since all
the properties of the plugin can be set using the GObject properties,
there is no need (and in fact no way) to have any header files
installed for the plugins.
</para>
<para>
Special care has been taken to make plugins completely selfcontained.
All relevant aspects of plugins can be queried at run-time.
</para>
</sect2>
<sect2 id="section-goals-performance">
<title>High performance</title>
<para>
High performance is obtained by:
</para>
<itemizedlist>
<listitem>
<para>
using GLib's <function>g_mem_chunk</function> and fast
non-blocking allocation algorithms where possible to
minimize dynamic memory allocation.
</para>
</listitem>
<listitem>
<para>
extremely light-weight links between plugins. Data can travel
the pipeline with minimal overhead. Data passing between
plugins only involves a pointer dereference in a typical
pipeline.
</para>
</listitem>
<listitem>
<para>
providing a mechanism to directly work on the target memory.
A plugin can for example directly write to the X server's
shared memory space. Buffers can also point to arbitrary
memory, such as a sound card's internal hardware buffer.
</para>
</listitem>
<listitem>
<para>
refcounting and copy on write minimize usage of memcpy.
Sub-buffers efficiently split buffers into manageable pieces.
</para>
</listitem>
<listitem>
<para>
the use of cothreads to minimize the threading overhead.
Cothreads are a simple and fast user-space method for
switching between subtasks. Cothreads were measured to
consume as little as 600 cpu cycles.
</para>
</listitem>
<listitem>
<para>
allowing hardware acceleration by using specialized plugins.
</para>
</listitem>
<listitem>
<para>
using a plugin registry with the specifications of the plugins so
that the plugin loading can be delayed until the plugin is actually
used.
</para>
</listitem>
<listitem>
<para>
all critical data passing is free of locks and mutexes.
</para>
</listitem>
</itemizedlist>
</sect2>
<sect2 id="section-goals-separation">
<title>Clean core/plugins separation</title>
<para>
The core of &GStreamer; is essentially media-agnostic. It only knows
about bytes and blocks, and only contains basic elements.
The core of &GStreamer; is functional enough to even implement
low-level system tools, like cp.
</para>
<para>
All of the media handling functionality is provided by plugins
external to the core. These tell the core how to handle specific
types of media.
</para>
</sect2>
<sect2 id="section-goals-testbed">
<title>Provide a framework for codec experimentation</title>
<para>
&GStreamer; also wants to be an easy framework where codec
developers can experiment with different algorithms, speeding up
the development of open and free multimedia codecs like <ulink
url="http://www.xiph.org/ogg/index.html" type="http">Theora and
Vorbis</ulink>.
</para>
</sect2>
</sect1>
</chapter>

View file

@ -6,30 +6,29 @@
</para>
<sect1 id="section-intro-what">
<title>What is &GStreamer;?</title>
<title>What is GStreamer?</title>
<para>
&GStreamer; is a framework for creating streaming media applications.
GStreamer is a framework for creating streaming media applications.
The fundamental design comes from the video pipeline at Oregon Graduate
Institute, as well as some ideas from DirectShow.
</para>
<para>
&GStreamer;'s development framework makes it possible to write any
type of streaming multimedia application. The &GStreamer; framework
is designed to make it easy to write applications that handle audio
or video or both. It isn't restricted to audio and video, and can
process any kind of data flow.
GStreamer's development framework makes it possible to write any type of
streaming multimedia application. The GStreamer framework is designed
to make it easy to write applications that handle audio or video or both.
It isn't restricted to audio and video, and can process any kind of
data flow.
The pipeline design is made to have little overhead above what the
applied filters induce. This makes &GStreamer; a good framework for
designing even high-end audio applications which put high demands on
latency.
applied filters induce. This makes GStreamer a good framework for designing
even high-end audio applications which put high demands on latency.
</para>
<para>
One of the the most obvious uses of &GStreamer; is using it to build
a media player. &GStreamer; already includes components for building a
One of the the most obvious uses of GStreamer is using it to build
a media player. GStreamer already includes components for building a
media player that can support a very wide variety of formats, including
MP3, Ogg/Vorbis, MPEG-1/2, AVI, Quicktime, mod, and more. &GStreamer;,
MP3, Ogg Vorbis, MPEG1, MPEG2, AVI, Quicktime, mod, and more. GStreamer,
however, is much more than just another media player. Its main advantages
are that the pluggable components can be mixed and matched into arbitrary
pipelines so that it's possible to write a full-fledged video or audio
@ -45,72 +44,15 @@
</para>
<para>
The &GStreamer; core function is to provide a framework for plugins,
data flow and media type handling/negotiation. It also provides an
API to write applications using the various plugins.
</para>
</sect1>
<sect1 id="section-intro-structure">
<title>Structure of this Manual</title>
<para>
This book is about &GStreamer; from a developer's point of view; it
describes how to write a &GStreamer; application using the &GStreamer;
libraries and tools. For an explanation about writing plugins, we
suggest the <ulink type="http"
url="http://gstreamer.freedesktop.org/data/doc/gstreamer/head/pwg/html/index.html">Plugin
Writers Guide</ulink>.
The GStreamer core function is to provide a framework for plugins, data flow
and media type handling/negotiation.
It also provides an API to write applications using the various plugins.
</para>
<para>
<xref linkend="part-overview"/> gives you an overview of &GStreamer;'s
motivation design goals.
</para>
<para>
<xref linkend="part-basics"/> rapidly covers the basics of &GStreamer;
application programming. At the end of that chapter, you should be
able to build your own audio player using &GStreamer;
</para>
<para>
In <xref linkend="part-advanced"/>, we will move on to complicated
subjects which make &GStreamer; stand out of its competitors. We
will discuss application-pipeline interaction using dynamic parameters
and interfaces, we will discuss threading and threaded pipelines,
scheduling and clocks (and synchronization). Most of those topics are
not just there to introduce you to their API, but primarily to give
a deeper insight in solving application programming problems with
&GStreamer; and understanding their concepts.
</para>
<para>
Next, in <xref linkend="part-highlevel"/>, we will go into higher-level
programming APIs for &GStreamer;. You don't exactly need to know all
the details from the previous parts to understand this, but you will
need to understand basic &GStreamer; concepts nevertheless. We will,
amongst others, discuss XML, playbin and autopluggers.
</para>
<para>
In <xref linkend="part-appendices"/>, you will find some random
information on integrating with GNOME, KDE, OS X or Windows, some
debugging help and general tips to improve and simplify &GStreamer;
programming.
</para>
<para>
In order to understand this manual, you will need to have a basic
understanding of the C language. Since &GStreamer; uses <ulink
url="http://developer.gnome.org/arch/gtk/glib.html" type="http">GLib
2.0</ulink>, the reader is assumed to understand the basics of the
<ulink url="http://developer.gnome.org/doc/API/2.0/gobject/index.html"
type="http">GObject object model</ulink>. It is recommended to have
skimmed through the introduction of the <ulink type="http"
url="http://www.le-hacker.org/papers/gobject/index.html">GObject
tutorial</ulink> before reading this. You may also want to have a look
at Eric Harlow's book <emphasis>Developing Linux Applications with
GTK+ and GDK</emphasis>.
This book is about GStreamer from a developer's point of view; it describes
how to write a GStreamer application using the GStreamer libraries and tools.
For an explanation about writing plugins, we suggest the Plugin Writers Guide.
</para>
</sect1>