From ded7ca661667d922aa8368955b8b485c467a6078 Mon Sep 17 00:00:00 2001 From: Wim Taymans Date: Fri, 5 Jan 2001 18:50:41 +0000 Subject: [PATCH] Docs updates Original commit message from CVS: Docs updates --- docs/manual/cothreads.sgml | 26 ++++++++++- docs/manual/factories.sgml | 6 ++- docs/manual/helloworld.sgml | 4 +- docs/manual/helloworld2.sgml | 64 +++++++++++++-------------- docs/manual/mime-world.fig | 4 +- docs/manual/pads.sgml | 20 +++++++++ docs/manual/programs.sgml | 82 ++++++++++++++++++++++++++++++++++ docs/manual/queues.sgml | 6 +-- docs/manual/threads.sgml | 85 +++++++++++++++++------------------- 9 files changed, 212 insertions(+), 85 deletions(-) diff --git a/docs/manual/cothreads.sgml b/docs/manual/cothreads.sgml index d97bab2a49..7fc9c67964 100644 --- a/docs/manual/cothreads.sgml +++ b/docs/manual/cothreads.sgml @@ -44,7 +44,8 @@ The basic main function of a chain-based element is like: -static void chain_function (GstPad *pad, GstBuffer *buffer) +static void +chain_function (GstPad *pad, GstBuffer *buffer) { GstBuffer *outbuffer; @@ -55,6 +56,16 @@ static void chain_function (GstPad *pad, GstBuffer *buffer) gst_pad_push (srcpad, outbuffer); } + + Chain based function are mainly used for elements that have a one to one + relation between their input and output behaviour. An example of such an + element can be a simple video blur filter. The filter takes a buffer in, performs + the blur operation on it and sends out the resulting buffer. + + + Another element, for example, is a volume filter. The filter takes audio samples as + input, performs the volume effect and sends out the resulting buffer. + @@ -71,6 +82,12 @@ static void chain_function (GstPad *pad, GstBuffer *buffer) buffer = gst_pad_pull (sinkpad); ... // process buffer, create outbuffer + while (!done) { + .... + // optionally request another buffer + buffer = gst_pad_pull (sinkpad); + .... + } ... gst_pad_push (srcpad, outbuffer); } @@ -87,6 +104,13 @@ static void chain_function (GstPad *pad, GstBuffer *buffer) complex using cothreads when there are multiple input/output pads for the loop-based element. + + Loop based elements are mainly used for the more complex elements that need a + specific amount of data before they can start to produce output. An example + of such an element is the mpeg video decoder. the element will pull a buffer, + performs some decoding on it and optionally requests more buffers to decode, when + a complete video frame has been decoded, a buffer is send out. + There is no problem in putting cothreaded elements into a GstThread to create even more complex pipelines with diff --git a/docs/manual/factories.sgml b/docs/manual/factories.sgml index 810b5ef136..cba70f3491 100644 --- a/docs/manual/factories.sgml +++ b/docs/manual/factories.sgml @@ -42,7 +42,7 @@ - MIME Types + more on MIME Types GStreamer uses MIME types to indentify the different types of data that can be handled by the elements. They are the high level @@ -77,6 +77,10 @@ This allows for very dynamic and extensible element creation as we will see. + + As we have seen in the previous chapter, the MIME types are added + to the Capability structure of a pad. + In our helloworld example the elements we constructed would have the diff --git a/docs/manual/helloworld.sgml b/docs/manual/helloworld.sgml index ee71f4473f..f4acf28185 100644 --- a/docs/manual/helloworld.sgml +++ b/docs/manual/helloworld.sgml @@ -325,8 +325,8 @@ eos (GstSrc *src) you can create a custom MP3 element with a more high level API. - It should be clear from the example that we can vary easily replace the - disksrc element with a httpsrc, giving you instant network streaming. + It should be clear from the example that we can very easily replace the + disksrc element with an httpsrc, giving you instant network streaming. An element could be build to handle icecast connections, for example. diff --git a/docs/manual/helloworld2.sgml b/docs/manual/helloworld2.sgml index 5be1aabf08..98c2e96736 100644 --- a/docs/manual/helloworld2.sgml +++ b/docs/manual/helloworld2.sgml @@ -7,7 +7,7 @@ In this chapter we will introduce you to autoplugging. Using the MIME - types of the elements GStreamer can automatically create a pipeline + types of the elements GStreamer can automatically create a pipeline for you. @@ -26,63 +26,63 @@ static gboolean playing; /* eos will be called when the src element has an end of stream */ -void eos(GstSrc *src) +void +eos (GstSrc *src) { - g_print("have eos, quitting\n"); + g_print ("have eos, quitting\n"); playing = FALSE; } -int main(int argc,char *argv[]) +int +main (int argc, char *argv[]) { GstElement *disksrc, *audiosink; GstElement *pipeline; if (argc != 2) { - g_print("usage: %s <filename>\n", argv[0]); - exit(-1); + g_print ("usage: %s <filename>\n", argv[0]); + exit (-1); } - gst_init(&argc,&argv); + gst_init (&argc, &argv); /* create a new bin to hold the elements */ - pipeline = gst_pipeline_new("pipeline"); + pipeline = gst_pipeline_new ("pipeline"); /* create a disk reader */ - disksrc = gst_elementfactory_make("disksrc", "disk_source"); - gtk_object_set(GTK_OBJECT(disksrc),"location", argv[1],NULL); - gtk_signal_connect(GTK_OBJECT(disksrc),"eos", - GTK_SIGNAL_FUNC(eos),NULL); + disksrc = gst_elementfactory_make ("disksrc", "disk_source"); + gtk_object_set (GTK_OBJECT (disksrc), "location", argv[1], NULL); + gtk_signal_connect (GTK_OBJECT (disksrc), "eos", + GTK_SIGNAL_FUNC (eos), NULL); /* and an audio sink */ - audiosink = gst_elementfactory_make("audiosink", "play_audio"); + audiosink = gst_elementfactory_make ("audiosink", "play_audio"); /* add objects to the main pipeline */ - gst_pipeline_add_src(GST_PIPELINE(pipeline), disksrc); - gst_pipeline_add_sink(GST_PIPELINE(pipeline), audiosink); + gst_pipeline_add_src (GST_PIPELINE (pipeline), disksrc); + gst_pipeline_add_sink (GST_PIPELINE (pipeline), audiosink); - if (!gst_pipeline_autoplug(GST_PIPELINE(pipeline))) { - g_print("unable to handle stream\n"); - exit(-1); + if (!gst_pipeline_autoplug (GST_PIPELINE (pipeline))) { + g_print ("unable to handle stream\n"); + exit (-1); } - /* make it ready */ - gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY); /* start playing */ - gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING); + gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); playing = TRUE; while (playing) { - gst_bin_iterate(GST_BIN(pipeline)); + gst_bin_iterate (GST_BIN (pipeline)); } /* stop the bin */ - gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL); + gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL); - gst_pipeline_destroy(pipeline); + gst_pipeline_destroy (pipeline); - exit(0); + exit (0); } @@ -99,9 +99,9 @@ int main(int argc,char *argv[]) ... - if (!gst_pipeline_autoplug(pipeline)) { - g_print("unable to handle stream\n"); - exit(-1); + if (!gst_pipeline_autoplug (pipeline)) { + g_print ("unable to handle stream\n"); + exit (-1); } ... @@ -147,7 +147,7 @@ int main(int argc,char *argv[]) If you really want, you can use the GSteamer components to do the - autoplugging yourself. + autoplugging yourself. We will cover this topic in the dynamic pipeline chapter. @@ -165,9 +165,9 @@ int main(int argc,char *argv[]) You can also try to use an AVI or MPEG file as its input. Using autoplugging, - GStreamer will automatically figure out how to handle the stream. Remember that - only the audio part will be played because we have only added an audiosink to - the pipeline. + GStreamer will automatically figure out how to + handle the stream. Remember that only the audio part will be played because + we have only added an audiosink to the pipeline. ./helloworld2 mymovie.mpeg diff --git a/docs/manual/mime-world.fig b/docs/manual/mime-world.fig index 61814797c8..0997636c91 100644 --- a/docs/manual/mime-world.fig +++ b/docs/manual/mime-world.fig @@ -157,6 +157,6 @@ Single 4 0 0 50 0 0 12 0.0000 4 135 75 3675 5475 ?\001 4 0 0 50 0 0 12 0.0000 4 135 735 9825 5475 audio/raw\001 4 0 0 50 0 0 12 0.0000 4 180 855 4350 5325 audio/mpeg\001 -4 0 0 50 0 0 12 0.0000 4 180 1395 5475 5625 audio/mpeg-frame\001 4 0 0 50 0 0 12 0.0000 4 135 735 8700 5625 audio/raw\001 -4 0 0 50 0 0 12 0.0000 4 180 1395 6825 5325 audio/mpeg-frame\001 +4 0 0 50 0 0 12 0.0000 4 180 855 5775 5625 audio/mpeg\001 +4 0 0 50 0 0 12 0.0000 4 180 855 7125 5325 audio/mpeg\001 diff --git a/docs/manual/pads.sgml b/docs/manual/pads.sgml index b802b6592f..5a7fbf050d 100644 --- a/docs/manual/pads.sgml +++ b/docs/manual/pads.sgml @@ -116,6 +116,26 @@ Pads: + + What are the capabilities used for? + + Capabilities describe in great detail the type of media that is handled by the pads. + They are mostly used for: + + + + + Autoplugging: automatically finding plugins for a set of capabilities + + + + + Compatibility detection: when two pads are connected, GStreamer + can verify if the two pads are talking about the same media types. + + + + diff --git a/docs/manual/programs.sgml b/docs/manual/programs.sgml index 6cb94f40c3..080dec661e 100644 --- a/docs/manual/programs.sgml +++ b/docs/manual/programs.sgml @@ -103,6 +103,88 @@ gstreamer-launch disksrc redpill.vob ! css-descramble ! private_stream_1.0 ! \ + + <command>gstreamer-inspect</command> + + This is a tool to query a plugin or an element about its properties. + + + To query the information about the element mpg123, you would specify: + + + +gstreamer-inspect mpg123 + + + + Below is the output of a query for the audiosink element: + + + +Factory Details: + Long name: Audio Sink (OSS) + Class: Sink/Audio + Description: Output to a sound card via OSS + Version: 0.1.0 + Author(s): Erik Walthinsen <omega@cse.ogi.edu> + Copyright: (C) 1999 + +Pad Templates: + SINK template: 'sink' + Exists: Always + Capabilities: + 'audiosink_sink': + MIME type: 'audio/raw': + format: Integer: 16 + depth: List: + Integer: 8 + Integer: 16 + rate: Integer range: 8000 - 48000 + channels: Integer range: 1 - 2 + +Element Flags: + GST_ELEMENT_THREADSUGGESTED + no flags set + +Element Implementation: + No loopfunc(), must be chain-based or not configured yet + Has change_state() function + +Pads: + SINK: 'sink' + Implementation: + Has chainfunc(): 0x4001cde8 + Has default eosfunc() gst_pad_eos_func() + Pad Template: 'sink' + Capabilities: + 'audiosink_sink': + MIME type: 'audio/raw': + format: Integer: 16 + depth: List: + Integer: 8 + Integer: 16 + rate: Integer range: 8000 - 48000 + channels: Integer range: 1 - 2 + +Element Arguments: + GstAudioSink::mute: Boolean + GstAudioSink::format: Enum (default 16) + (8): 8 Bits + (16): 16 Bits + GstAudioSink::channels: Enum (default 2) + (1): Mono + (2): Stereo + GstAudioSink::frequency: Integer + + + + To query the information about a plugin, you would do: + + + +gstreamer-inspect gstelements + + <command>gstmediaplay</command> diff --git a/docs/manual/queues.sgml b/docs/manual/queues.sgml index aaf86cb8bb..93d25b60f4 100644 --- a/docs/manual/queues.sgml +++ b/docs/manual/queues.sgml @@ -1,20 +1,20 @@ Queues - A GstQueue is an implementation of a GstConnection. + A GstQueue is a filter element. Queues can be used to connect two elements in such way that the data can be buffered. A buffer that is sinked to a Queue will not automatically be pushed to the next connected element but will be buffered. It will be pushed to the next - element as soon as gst_connection_push() is called. + element as soon as gst_connection_push () is called. Queues are mostly used in conjunction with a GstThread to provide an external connection for the thread elements. You could have one thread feeding buffers into a GstQueue and another - thread repeadedly calling gst_connection_push() on the queue to feed its + thread repeadedly calling gst_connection_push () on the queue to feed its internal elements. diff --git a/docs/manual/threads.sgml b/docs/manual/threads.sgml index 6de7861af3..29908ade29 100644 --- a/docs/manual/threads.sgml +++ b/docs/manual/threads.sgml @@ -14,21 +14,18 @@ GstElement *my_thread; // create the thread object - my_thread = gst_thread_new("my_thread"); - g_return_if_fail(audio_thread != NULL); + my_thread = gst_thread_new ("my_thread"); + g_return_if_fail (audio_thread != NULL); // add some plugins - gst_bin_add(GST_BIN(my_thread),GST_ELEMENT(funky_src)); - gst_bin_add(GST_BIN(my_thread),GST_ELEMENT(cool_effect)); + gst_bin_add (GST_BIN (my_thread), GST_ELEMENT (funky_src)); + gst_bin_add (GST_BIN (my_thread), GST_ELEMENT (cool_effect)); // connect the elements here... ... - // prepare the thread - gst_element_set_state(GST_ELEMENT(my_thread),GST_STATE_READY); - // start playing - gst_element_set_state(GST_ELEMENT(my_thread),GST_STATE_PLAYING); + gst_element_set_state (GST_ELEMENT (my_thread), GST_STATE_PLAYING); @@ -39,8 +36,8 @@ - The thread must contain at least one element of type GstSrc - or GstConnection in order to work. + A thread should normally contain a source element. Most often, the thread + is fed with data from a queue. @@ -60,82 +57,82 @@ #include <gst/gst.h> /* eos will be called when the src element has an end of stream */ -void eos(GstSrc *src, gpointer data) +void +eos (GstSrc *src, gpointer data) { - GstThread *thread = GST_THREAD(data); - g_print("have eos, quitting\n"); + GstThread *thread = GST_THREAD (data); + g_print ("have eos, quitting\n"); /* stop the bin */ - gst_element_set_state(GST_ELEMENT(thread), GST_STATE_NULL); + gst_element_set_state (GST_ELEMENT (thread), GST_STATE_NULL); - gst_main_quit(); + gst_main_quit (); } -int main(int argc,char *argv[]) +int +main (int argc, char *argv[]) { GstElement *disksrc, *audiosink; GstElement *pipeline; GstElement *thread; if (argc != 2) { - g_print("usage: %s <filename>\n", argv[0]); - exit(-1); + g_print ("usage: %s <filename>\n", argv[0]); + exit (-1); } - gst_init(&argc,&argv); + gst_init (&argc, &argv); /* create a new thread to hold the elements */ - thread = gst_thread_new("thread"); - g_assert(thread != NULL); + thread = gst_thread_new ("thread"); + g_assert (thread != NULL); /* create a new bin to hold the elements */ - pipeline = gst_pipeline_new("pipeline"); - g_assert(pipeline != NULL); + pipeline = gst_pipeline_new ("pipeline"); + g_assert (pipeline != NULL); /* create a disk reader */ - disksrc = gst_elementfactory_make("disksrc", "disk_source"); - g_assert(disksrc != NULL); - gtk_object_set(GTK_OBJECT(disksrc),"location", argv[1],NULL); - gtk_signal_connect(GTK_OBJECT(disksrc),"eos", - GTK_SIGNAL_FUNC(eos), thread); + disksrc = gst_elementfactory_make ("disksrc", "disk_source"); + g_assert (disksrc != NULL); + gtk_object_set (GTK_OBJECT (disksrc), "location", argv[1], NULL); + gtk_signal_connect (GTK_OBJECT (disksrc), "eos", + GTK_SIGNAL_FUNC (eos), thread); /* and an audio sink */ - audiosink = gst_elementfactory_make("audiosink", "play_audio"); - g_assert(audiosink != NULL); + audiosink = gst_elementfactory_make ("audiosink", "play_audio"); + g_assert (audiosink != NULL); /* add objects to the main pipeline */ - gst_bin_add(GST_BIN(pipeline), disksrc); - gst_bin_add(GST_BIN(pipeline), audiosink); + gst_bin_add (GST_BIN (pipeline), disksrc); + gst_bin_add (GST_BIN (pipeline), audiosink); /* automatically setup the pipeline */ - if (!gst_pipeline_autoplug(GST_PIPELINE(pipeline))) { - g_print("unable to handle stream\n"); - exit(-1); + if (!gst_pipeline_autoplug (GST_PIPELINE (pipeline))) { + g_print ("unable to handle stream\n"); + exit (-1); } /* remove the source element from the pipeline */ - gst_bin_remove(GST_BIN(pipeline), disksrc); + gst_bin_remove (GST_BIN (pipeline), disksrc); /* insert the source element in the thread, remember a thread needs at least one source or connection element */ - gst_bin_add(GST_BIN(thread), disksrc); + gst_bin_add (GST_BIN (thread), disksrc); /* add the pipeline to the thread too */ - gst_bin_add(GST_BIN(thread), GST_ELEMENT(pipeline)); + gst_bin_add (GST_BIN (thread), GST_ELEMENT (pipeline)); - /* make it ready */ - gst_element_set_state(GST_ELEMENT(thread), GST_STATE_READY); /* start playing */ - gst_element_set_state(GST_ELEMENT(thread), GST_STATE_PLAYING); + gst_element_set_state (GST_ELEMENT (thread), GST_STATE_PLAYING); /* do whatever you want here, the thread will be playing */ ... - gst_main(); + gst_main (); - gst_pipeline_destroy(thread); + gst_pipeline_destroy (thread); - exit(0); + exit (0); }