Docs updates

Original commit message from CVS:
Docs updates
This commit is contained in:
Wim Taymans 2001-01-05 18:50:41 +00:00
parent f12fae5c3d
commit ded7ca6616
9 changed files with 212 additions and 85 deletions

View file

@ -44,7 +44,8 @@
The basic main function of a chain-based element is like:
</para>
<programlisting>
static void chain_function (GstPad *pad, GstBuffer *buffer)
static void
chain_function (GstPad *pad, GstBuffer *buffer)
{
GstBuffer *outbuffer;
@ -55,6 +56,16 @@ static void chain_function (GstPad *pad, GstBuffer *buffer)
gst_pad_push (srcpad, outbuffer);
}
</programlisting>
<para>
Chain based function are mainly used for elements that have a one to one
relation between their input and output behaviour. An example of such an
element can be a simple video blur filter. The filter takes a buffer in, performs
the blur operation on it and sends out the resulting buffer.
</para>
<para>
Another element, for example, is a volume filter. The filter takes audio samples as
input, performs the volume effect and sends out the resulting buffer.
</para>
</sect1>
@ -71,6 +82,12 @@ static void chain_function (GstPad *pad, GstBuffer *buffer)
buffer = gst_pad_pull (sinkpad);
...
// process buffer, create outbuffer
while (!done) {
....
// optionally request another buffer
buffer = gst_pad_pull (sinkpad);
....
}
...
gst_pad_push (srcpad, outbuffer);
}
@ -87,6 +104,13 @@ static void chain_function (GstPad *pad, GstBuffer *buffer)
complex using cothreads when there are multiple input/output pads for the
loop-based element.
</para>
<para>
Loop based elements are mainly used for the more complex elements that need a
specific amount of data before they can start to produce output. An example
of such an element is the mpeg video decoder. the element will pull a buffer,
performs some decoding on it and optionally requests more buffers to decode, when
a complete video frame has been decoded, a buffer is send out.
</para>
<para>
There is no problem in putting cothreaded elements into a
<classname>GstThread</classname> to create even more complex pipelines with

View file

@ -42,7 +42,7 @@
</sect1>
<sect1>
<title>MIME Types</title>
<title>more on MIME Types</title>
<para>
GStreamer uses MIME types to indentify the different types of data
that can be handled by the elements. They are the high level
@ -77,6 +77,10 @@
This allows for very dynamic and extensible element creation as we
will see.
</para>
<para>
As we have seen in the previous chapter, the MIME types are added
to the Capability structure of a pad.
</para>
<para>
In our helloworld example the elements we constructed would have the

View file

@ -325,8 +325,8 @@ eos (GstSrc *src)
you can create a custom MP3 element with a more high level API.
</para>
<para>
It should be clear from the example that we can vary easily replace the
disksrc element with a httpsrc, giving you instant network streaming.
It should be clear from the example that we can very easily replace the
disksrc element with an httpsrc, giving you instant network streaming.
An element could be build to handle icecast connections, for example.
</para>
<para>

View file

@ -7,7 +7,7 @@
</para>
<para>
In this chapter we will introduce you to autoplugging. Using the MIME
types of the elements GStreamer can automatically create a pipeline
types of the elements <application>GStreamer</application> can automatically create a pipeline
for you.
</para>
@ -26,63 +26,63 @@
static gboolean playing;
/* eos will be called when the src element has an end of stream */
void eos(GstSrc *src)
void
eos (GstSrc *src)
{
g_print("have eos, quitting\n");
g_print ("have eos, quitting\n");
playing = FALSE;
}
int main(int argc,char *argv[])
int
main (int argc, char *argv[])
{
GstElement *disksrc, *audiosink;
GstElement *pipeline;
if (argc != 2) {
g_print("usage: %s &lt;filename&gt;\n", argv[0]);
exit(-1);
g_print ("usage: %s &lt;filename&gt;\n", argv[0]);
exit (-1);
}
gst_init(&amp;argc,&amp;argv);
gst_init (&amp;argc, &amp;argv);
/* create a new bin to hold the elements */
pipeline = gst_pipeline_new("pipeline");
pipeline = gst_pipeline_new ("pipeline");
/* create a disk reader */
disksrc = gst_elementfactory_make("disksrc", "disk_source");
gtk_object_set(GTK_OBJECT(disksrc),"location", argv[1],NULL);
gtk_signal_connect(GTK_OBJECT(disksrc),"eos",
GTK_SIGNAL_FUNC(eos),NULL);
disksrc = gst_elementfactory_make ("disksrc", "disk_source");
gtk_object_set (GTK_OBJECT (disksrc), "location", argv[1], NULL);
gtk_signal_connect (GTK_OBJECT (disksrc), "eos",
GTK_SIGNAL_FUNC (eos), NULL);
/* and an audio sink */
audiosink = gst_elementfactory_make("audiosink", "play_audio");
audiosink = gst_elementfactory_make ("audiosink", "play_audio");
/* add objects to the main pipeline */
gst_pipeline_add_src(GST_PIPELINE(pipeline), disksrc);
gst_pipeline_add_sink(GST_PIPELINE(pipeline), audiosink);
gst_pipeline_add_src (GST_PIPELINE (pipeline), disksrc);
gst_pipeline_add_sink (GST_PIPELINE (pipeline), audiosink);
if (!gst_pipeline_autoplug(GST_PIPELINE(pipeline))) {
g_print("unable to handle stream\n");
exit(-1);
if (!gst_pipeline_autoplug (GST_PIPELINE (pipeline))) {
g_print ("unable to handle stream\n");
exit (-1);
}
/* make it ready */
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY);
/* start playing */
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
playing = TRUE;
while (playing) {
gst_bin_iterate(GST_BIN(pipeline));
gst_bin_iterate (GST_BIN (pipeline));
}
/* stop the bin */
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);
gst_pipeline_destroy(pipeline);
gst_pipeline_destroy (pipeline);
exit(0);
exit (0);
}
</programlisting>
@ -99,9 +99,9 @@ int main(int argc,char *argv[])
<programlisting>
...
if (!gst_pipeline_autoplug(pipeline)) {
g_print("unable to handle stream\n");
exit(-1);
if (!gst_pipeline_autoplug (pipeline)) {
g_print ("unable to handle stream\n");
exit (-1);
}
...
@ -147,7 +147,7 @@ int main(int argc,char *argv[])
<para>
If you really want, you can use the GSteamer components to do the
autoplugging yourself.
autoplugging yourself. We will cover this topic in the dynamic pipeline chapter.
</para>
<para>
@ -165,9 +165,9 @@ int main(int argc,char *argv[])
</programlisting>
<para>
You can also try to use an AVI or MPEG file as its input. Using autoplugging,
GStreamer will automatically figure out how to handle the stream. Remember that
only the audio part will be played because we have only added an audiosink to
the pipeline.
<application>GStreamer</application> will automatically figure out how to
handle the stream. Remember that only the audio part will be played because
we have only added an audiosink to the pipeline.
</para>
<programlisting>
./helloworld2 mymovie.mpeg

View file

@ -157,6 +157,6 @@ Single
4 0 0 50 0 0 12 0.0000 4 135 75 3675 5475 ?\001
4 0 0 50 0 0 12 0.0000 4 135 735 9825 5475 audio/raw\001
4 0 0 50 0 0 12 0.0000 4 180 855 4350 5325 audio/mpeg\001
4 0 0 50 0 0 12 0.0000 4 180 1395 5475 5625 audio/mpeg-frame\001
4 0 0 50 0 0 12 0.0000 4 135 735 8700 5625 audio/raw\001
4 0 0 50 0 0 12 0.0000 4 180 1395 6825 5325 audio/mpeg-frame\001
4 0 0 50 0 0 12 0.0000 4 180 855 5775 5625 audio/mpeg\001
4 0 0 50 0 0 12 0.0000 4 180 855 7125 5325 audio/mpeg\001

View file

@ -116,6 +116,26 @@ Pads:
</itemizedlist>
</sect2>
<sect2 id="sec-pads-caps-use">
<title>What are the capabilities used for?</title>
<para>
Capabilities describe in great detail the type of media that is handled by the pads.
They are mostly used for:
</para>
<itemizedlist>
<listitem>
<para>
Autoplugging: automatically finding plugins for a set of capabilities
</para>
</listitem>
<listitem>
<para>
Compatibility detection: when two pads are connected, <application>GStreamer</application>
can verify if the two pads are talking about the same media types.
</para>
</listitem>
</itemizedlist>
</sect2>
</sect1>
</chapter>

View file

@ -103,6 +103,88 @@ gstreamer-launch disksrc redpill.vob ! css-descramble ! private_stream_1.0 ! \
</para>
</sect1>
<sect1>
<title><command>gstreamer-inspect</command></title>
<para>
This is a tool to query a plugin or an element about its properties.
</para>
<para>
To query the information about the element mpg123, you would specify:
</para>
<screen>
gstreamer-inspect mpg123
</screen>
<para>
Below is the output of a query for the audiosink element:
</para>
<screen>
Factory Details:
Long name: Audio Sink (OSS)
Class: Sink/Audio
Description: Output to a sound card via OSS
Version: 0.1.0
Author(s): Erik Walthinsen &lt;omega@cse.ogi.edu&gt;
Copyright: (C) 1999
Pad Templates:
SINK template: 'sink'
Exists: Always
Capabilities:
'audiosink_sink':
MIME type: 'audio/raw':
format: Integer: 16
depth: List:
Integer: 8
Integer: 16
rate: Integer range: 8000 - 48000
channels: Integer range: 1 - 2
Element Flags:
GST_ELEMENT_THREADSUGGESTED
no flags set
Element Implementation:
No loopfunc(), must be chain-based or not configured yet
Has change_state() function
Pads:
SINK: 'sink'
Implementation:
Has chainfunc(): 0x4001cde8
Has default eosfunc() gst_pad_eos_func()
Pad Template: 'sink'
Capabilities:
'audiosink_sink':
MIME type: 'audio/raw':
format: Integer: 16
depth: List:
Integer: 8
Integer: 16
rate: Integer range: 8000 - 48000
channels: Integer range: 1 - 2
Element Arguments:
GstAudioSink::mute: Boolean
GstAudioSink::format: Enum (default 16)
(8): 8 Bits
(16): 16 Bits
GstAudioSink::channels: Enum (default 2)
(1): Mono
(2): Stereo
GstAudioSink::frequency: Integer
</screen>
<para>
To query the information about a plugin, you would do:
</para>
<screen>
gstreamer-inspect gstelements
</screen>
</sect1>
<sect1>
<title><command>gstmediaplay</command></title>
<para>

View file

@ -1,20 +1,20 @@
<chapter id="cha-queues">
<title>Queues</title>
<para>
A <classname>GstQueue</classname> is an implementation of a <classname>GstConnection</classname>.
A <classname>GstQueue</classname> is a filter element.
Queues can be used to connect two elements in such way that the data can
be buffered.
</para>
<para>
A buffer that is sinked to a Queue will not automatically be pushed to the
next connected element but will be buffered. It will be pushed to the next
element as soon as gst_connection_push() is called.
element as soon as gst_connection_push () is called.
</para>
<para>
Queues are mostly used in conjunction with a <classname>GstThread</classname> to
provide an external connection for the thread elements. You could have one
thread feeding buffers into a <classname>GstQueue</classname> and another
thread repeadedly calling gst_connection_push() on the queue to feed its
thread repeadedly calling gst_connection_push () on the queue to feed its
internal elements.
</para>

View file

@ -14,21 +14,18 @@
GstElement *my_thread;
// create the thread object
my_thread = gst_thread_new("my_thread");
g_return_if_fail(audio_thread != NULL);
my_thread = gst_thread_new ("my_thread");
g_return_if_fail (audio_thread != NULL);
// add some plugins
gst_bin_add(GST_BIN(my_thread),GST_ELEMENT(funky_src));
gst_bin_add(GST_BIN(my_thread),GST_ELEMENT(cool_effect));
gst_bin_add (GST_BIN (my_thread), GST_ELEMENT (funky_src));
gst_bin_add (GST_BIN (my_thread), GST_ELEMENT (cool_effect));
// connect the elements here...
...
// prepare the thread
gst_element_set_state(GST_ELEMENT(my_thread),GST_STATE_READY);
// start playing
gst_element_set_state(GST_ELEMENT(my_thread),GST_STATE_PLAYING);
gst_element_set_state (GST_ELEMENT (my_thread), GST_STATE_PLAYING);
</programlisting>
@ -39,8 +36,8 @@
<note>
<para>
The thread must contain at least one element of type <classname>GstSrc</classname>
or <classname>GstConnection</classname> in order to work.
A thread should normally contain a source element. Most often, the thread
is fed with data from a queue.
</para>
</note>
@ -60,82 +57,82 @@
#include &lt;gst/gst.h&gt;
/* eos will be called when the src element has an end of stream */
void eos(GstSrc *src, gpointer data)
void
eos (GstSrc *src, gpointer data)
{
GstThread *thread = GST_THREAD(data);
g_print("have eos, quitting\n");
GstThread *thread = GST_THREAD (data);
g_print ("have eos, quitting\n");
/* stop the bin */
gst_element_set_state(GST_ELEMENT(thread), GST_STATE_NULL);
gst_element_set_state (GST_ELEMENT (thread), GST_STATE_NULL);
gst_main_quit();
gst_main_quit ();
}
int main(int argc,char *argv[])
int
main (int argc, char *argv[])
{
GstElement *disksrc, *audiosink;
GstElement *pipeline;
GstElement *thread;
if (argc != 2) {
g_print("usage: %s &lt;filename&gt;\n", argv[0]);
exit(-1);
g_print ("usage: %s &lt;filename&gt;\n", argv[0]);
exit (-1);
}
gst_init(&amp;argc,&amp;argv);
gst_init (&amp;argc, &amp;argv);
/* create a new thread to hold the elements */
thread = gst_thread_new("thread");
g_assert(thread != NULL);
thread = gst_thread_new ("thread");
g_assert (thread != NULL);
/* create a new bin to hold the elements */
pipeline = gst_pipeline_new("pipeline");
g_assert(pipeline != NULL);
pipeline = gst_pipeline_new ("pipeline");
g_assert (pipeline != NULL);
/* create a disk reader */
disksrc = gst_elementfactory_make("disksrc", "disk_source");
g_assert(disksrc != NULL);
gtk_object_set(GTK_OBJECT(disksrc),"location", argv[1],NULL);
gtk_signal_connect(GTK_OBJECT(disksrc),"eos",
GTK_SIGNAL_FUNC(eos), thread);
disksrc = gst_elementfactory_make ("disksrc", "disk_source");
g_assert (disksrc != NULL);
gtk_object_set (GTK_OBJECT (disksrc), "location", argv[1], NULL);
gtk_signal_connect (GTK_OBJECT (disksrc), "eos",
GTK_SIGNAL_FUNC (eos), thread);
/* and an audio sink */
audiosink = gst_elementfactory_make("audiosink", "play_audio");
g_assert(audiosink != NULL);
audiosink = gst_elementfactory_make ("audiosink", "play_audio");
g_assert (audiosink != NULL);
/* add objects to the main pipeline */
gst_bin_add(GST_BIN(pipeline), disksrc);
gst_bin_add(GST_BIN(pipeline), audiosink);
gst_bin_add (GST_BIN (pipeline), disksrc);
gst_bin_add (GST_BIN (pipeline), audiosink);
/* automatically setup the pipeline */
if (!gst_pipeline_autoplug(GST_PIPELINE(pipeline))) {
g_print("unable to handle stream\n");
exit(-1);
if (!gst_pipeline_autoplug (GST_PIPELINE (pipeline))) {
g_print ("unable to handle stream\n");
exit (-1);
}
/* remove the source element from the pipeline */
gst_bin_remove(GST_BIN(pipeline), disksrc);
gst_bin_remove (GST_BIN (pipeline), disksrc);
/* insert the source element in the thread, remember a thread needs at
least one source or connection element */
gst_bin_add(GST_BIN(thread), disksrc);
gst_bin_add (GST_BIN (thread), disksrc);
/* add the pipeline to the thread too */
gst_bin_add(GST_BIN(thread), GST_ELEMENT(pipeline));
gst_bin_add (GST_BIN (thread), GST_ELEMENT (pipeline));
/* make it ready */
gst_element_set_state(GST_ELEMENT(thread), GST_STATE_READY);
/* start playing */
gst_element_set_state(GST_ELEMENT(thread), GST_STATE_PLAYING);
gst_element_set_state (GST_ELEMENT (thread), GST_STATE_PLAYING);
/* do whatever you want here, the thread will be playing */
...
gst_main();
gst_main ();
gst_pipeline_destroy(thread);
gst_pipeline_destroy (thread);
exit(0);
exit (0);
}
</programlisting>