diff --git a/docs/manual/dynamic.sgml b/docs/manual/dynamic.sgml
index 3f7990a660..4cfdcf6846 100644
--- a/docs/manual/dynamic.sgml
+++ b/docs/manual/dynamic.sgml
@@ -14,4 +14,195 @@
when a pad is created. We will use this to create our MPEG1 player.
+
+ We'll start with a simple main function:
+
+
+#include <gnome.h>
+#include <gst/gst.h>
+
+void eof(GstElement *src) {
+ g_print("have eos, quitting\n");
+ exit(0);
+}
+
+gboolean
+idle_func (gpointer data)
+{
+ gst_bin_iterate (GST_BIN (data));
+ return TRUE;
+}
+
+int
+main(int argc, char *argv[])
+{
+ GstElement *pipeline, *src, *parse;
+
+ gst_init (&argc, &argv);
+ gnome_init ("MPEG1 Video player","0.0.1", argc, argv);
+
+ pipeline = gst_pipeline_new ("pipeline");
+ g_return_val_if_fail (pipeline != NULL, -1);
+
+ src = gst_elementfactory_make ("disksrc", "src");
+ g_return_val_if_fail (src != NULL, -1);
+ gtk_object_set (GTK_OBJECT (src), "location", argv[1], NULL);
+
+ parse = gst_elementfactory_make ("mpeg1parse", "parse");
+ g_return_val_if_fail (parse != NULL, -1);
+
+ gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (src));
+ gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (parse));
+
+ gtk_signal_connect (GTK_OBJECT (parse), "new_pad",
+ GTK_SIGNAL_FUNC (new_pad_created), pipeline);
+
+ gtk_signal_connect (GTK_OBJECT (src), "eos",
+ GTK_SIGNAL_FUNC (eof), NULL);
+
+ gst_pad_connect (gst_element_get_pad (src, "src"),
+ gst_element_get_pad (parse, "sink"));
+
+ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
+
+ g_idle_add (idle_func, pipeline);
+
+ gdk_threads_enter ();
+ gst_main ();
+ gdk_threads_leave ();
+
+ return 0;
+}
+
+
+
+ We create two elements: a disksrc (the element that will read the
+ file from disk) and an mpeg1parser. We also add an EOS (End Of Stream)
+ signal to the disksrc so that we will be notified when the file has ended.
+ There's nothing special about this piece of code except for the signal
+ 'new_pad' that we connected to the mpeg1parser using:
+
+
+ gtk_signal_connect (GTK_OBJECT (parse), "new_pad",
+ GTK_SIGNAL_FUNC (new_pad_created), pipeline);
+
+
+ When an elementary stream has been detected in the system stream,
+ mpeg1parse will create a new pad that will provide the data of the
+ elementary stream. A function 'new_pad_created' will be called when
+ the pad is created:
+
+
+void
+new_pad_created (GstElement *parse, GstPad *pad, GstElement *pipeline)
+{
+ GstElement *parse_audio, *parse_video, *decode, *decode_video, *play, *videoscale, *show;
+ GstElement *audio_queue, *video_queue;
+ GstElement *audio_thread, *video_thread;
+
+ GtkWidget *appwindow;
+
+ g_print ("***** a new pad %s was created\n", gst_pad_get_name (pad));
+
+ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED);
+
+ // connect to audio pad
+ if (strncmp (gst_pad_get_name (pad), "audio_", 6) == 0) {
+
+ // construct internal pipeline elements
+ parse_audio = gst_elementfactory_make ("mp3parse", "parse_audio");
+ g_return_if_fail (parse_audio != NULL);
+ decode = gst_elementfactory_make ("mpg123", "decode_audio");
+ g_return_if_fail (decode != NULL);
+ play = gst_elementfactory_make ("audiosink", "play_audio");
+ g_return_if_fail (play != NULL);
+
+ // create the thread and pack stuff into it
+ audio_thread = gst_thread_new ("audio_thread");
+ g_return_if_fail (audio_thread != NULL);
+ gst_bin_add (GST_BIN (audio_thread), GST_ELEMENT (parse_audio));
+ gst_bin_add (GST_BIN (audio_thread), GST_ELEMENT (decode));
+ gst_bin_add (GST_BIN (audio_thread), GST_ELEMENT (play));
+
+ // set up pad connections
+ gst_element_add_ghost_pad (GST_ELEMENT (audio_thread),
+ gst_element_get_pad (parse_audio, "sink"));
+ gst_pad_connect (gst_element_get_pad (parse_audio,"src"),
+ gst_element_get_pad (decode,"sink"));
+ gst_pad_connect (gst_element_get_pad (decode,"src"),
+ gst_element_get_pad (play,"sink"));
+
+ // construct queue and connect everything in the main pipelie
+ audio_queue = gst_elementfactory_make ("queue", "audio_queue");
+
+ gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (audio_queue));
+ gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (audio_thread));
+
+ gst_pad_connect (pad,
+ gst_element_get_pad (audio_queue, "sink"));
+ gst_pad_connect (gst_element_get_pad (audio_queue, "src"),
+ gst_element_get_pad (audio_thread, "sink"));
+
+ // set up thread state and kick things off
+ g_print ("setting to READY state\n");
+ gst_element_set_state (GST_ELEMENT (audio_thread), GST_STATE_READY);
+
+ }
+ else if (strncmp (gst_pad_get_name (pad), "video_", 6) == 0) {
+
+ // construct internal pipeline elements
+ parse_video = gst_elementfactory_make ("mp1videoparse", "parse_video");
+ g_return_if_fail (parse_video != NULL);
+ decode_video = gst_elementfactory_make ("mpeg_play", "decode_video");
+ g_return_if_fail (decode_video != NULL);
+
+ show = gst_elementfactory_make ("videosink", "show");
+ g_return_if_fail (show != NULL);
+
+ appwindow = gnome_app_new ("MPEG1 player", "MPEG1 player");
+ gnome_app_set_contents (GNOME_APP (appwindow),
+ gst_util_get_widget_arg (GTK_OBJECT (show), "widget"));
+ gtk_widget_show_all (appwindow);
+
+ // create the thread and pack stuff into it
+ video_thread = gst_thread_new ("video_thread");
+ g_return_if_fail (video_thread != NULL);
+ gst_bin_add (GST_BIN (video_thread), GST_ELEMENT (parse_video));
+ gst_bin_add (GST_BIN (video_thread), GST_ELEMENT (decode_video));
+ gst_bin_add (GST_BIN (video_thread), GST_ELEMENT (show));
+
+ // set up pad connections
+ gst_element_add_ghost_pad (GST_ELEMENT (video_thread),
+ gst_element_get_pad (parse_video, "sink"));
+ gst_pad_connect (gst_element_get_pad (parse_video, "src"),
+ gst_element_get_pad (decode_video, "sink"));
+ gst_pad_connect (gst_element_get_pad (decode_video, "src"),
+ gst_element_get_pad (show, "sink"));
+
+ // construct queue and connect everything in the main pipeline
+ video_queue = gst_elementfactory_make ("queue", "video_queue");
+
+ gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (video_queue));
+ gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (video_thread));
+
+ gst_pad_connect (pad,
+ gst_element_get_pad (video_queue, "sink"));
+ gst_pad_connect (gst_element_get_pad (video_queue, "src"),
+ gst_element_get_pad (video_thread, "sink"));
+
+ // set up thread state and kick things off
+ gtk_object_set (GTK_OBJECT (video_thread), "create_thread", TRUE, NULL);
+ g_print ("setting to READY state\n");
+ gst_element_set_state (GST_ELEMENT (video_thread), GST_STATE_READY);
+ }
+ g_print("\n");
+ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
+}
+
+
+ In the above example, we created new elements based on the name of
+ the newly created pad. We added them to a new thread There are other possibilities to check the
+ type of the pad, for example, by using the MIME type and the properties
+ of the pad.
+
diff --git a/docs/manual/programs.sgml b/docs/manual/programs.sgml
index e17c6965b4..d5fbb6b45f 100644
--- a/docs/manual/programs.sgml
+++ b/docs/manual/programs.sgml
@@ -102,8 +102,51 @@ video_00! (mpeg2dec ! videosink)
- Note that the parser isn't capable of more complex pipelines yet, including
- the VOB player above. The minor tweaks will be made post 0.1.0.
+
+ Note that the parser isn't capable of more complex pipelines yet, including
+ the VOB player above. The minor tweaks will be made post 0.1.0.
+
+
+ You can also use the the parser in you own code. GStreamer
+ provides a function gst_parse_launch () that you can use to construt a pipeline.
+ The code of gstreamer-launch actually looks like:
+
+
+#include <gst/gst.h>
+#include <string.h>
+#include <stdlib.h>
+
+int
+main(int argc, char *argv[])
+{
+ GstElement *pipeline;
+ char **argvn;
+ gchar *cmdline;
+ int i;
+
+ gst_init(&argc,&argv);
+
+ pipeline = gst_pipeline_new("launch");
+
+ // make a null-terminated version of argv
+ argvn = g_new0(char *,argc);
+ memcpy(argvn,argv+1,sizeof(char*)*(argc-1));
+ // join the argvs together
+ cmdline = g_strjoinv(" ",argvn);
+ // free the null-terminated argv
+ g_free(argvn);
+
+ gst_parse_launch(cmdline,pipeline);
+
+ fprintf(stderr,"RUNNING pipeline\n");
+ gst_element_set_state(pipeline,GST_STATE_PLAYING);
+
+ while (1)
+ gst_bin_iterate (GST_BIN (pipeline));
+
+ return 0;
+}
+
diff --git a/docs/manual/typedetection.sgml b/docs/manual/typedetection.sgml
index 1ec4f42794..cfbf97782b 100644
--- a/docs/manual/typedetection.sgml
+++ b/docs/manual/typedetection.sgml
@@ -1,6 +1,146 @@
Typedetection
+ Sometimes the capabilities of a pad are not specificied. The disksrc, for
+ example, does not know what type of file it is reading. Before you can attach
+ and element to the pad of the disksrc, you need to determine the media type in
+ order to be able to choose a compatible element.
+
+
+ To solve this problem, a plugin can provide the GStreamer
+ core library with a typedefinition library with a typedefinition. The typedefinition
+ will contain the following information:
+
+
+
+ The MIME type we are going to define.
+
+
+
+
+ An optional string with a list of possible file extensions this
+ type usually is associated with. the list entries are separated with
+ a space. eg, ".mp3 .mpa .mpg".
+
+
+
+
+ An optional typefind function.
+
+
+
+
+
+ The typefind functions give a meaning to the MIME types that are used
+ in GStreamer. The typefind function is a function with the following definition:
+
+
+typedef GstCaps *(*GstTypeFindFunc) (GstBuffer *buf, gpointer priv);
+
+
+ This typefind function will inspect a GstBuffer with data and will output
+ a GstCaps structure describing the type. If the typefind function does not
+ understand the buffer contents, it will return NULL.
+
+
+ GStreamer has a typefind element in its core elements
+ that can be used to determine the type of a given pad.
+
+
+ The next example will show how a typefind element can be inserted into a pipeline
+ to detect the media type of a file. It will output the capabilities of the pad into
+ an XML representation.
+
+
+#include <gst/gst.h>
+
+void type_found (GstElement *typefind, GstCaps* caps);
+
+int
+main(int argc, char *argv[])
+{
+ GstElement *bin, *disksrc, *typefind;
+
+ gst_init(&argc,&argv);
+
+ if (argc != 2) {
+ g_print("usage: %s <filename>\n", argv[0]);
+ exit(-1);
+ }
+
+ /* create a new bin to hold the elements */
+ bin = gst_bin_new("bin");
+ g_assert(bin != NULL);
+
+ /* create a disk reader */
+ disksrc = gst_elementfactory_make("disksrc", "disk_source");
+ g_assert(disksrc != NULL);
+ gtk_object_set(GTK_OBJECT(disksrc),"location", argv[1],NULL);
+
+ /* create the typefind element */
+ typefind = gst_elementfactory_make("typefind", "typefind");
+ g_assert(typefind != NULL);
+
+ /* add objects to the main pipeline */
+ gst_bin_add(GST_BIN(bin), disksrc);
+ gst_bin_add(GST_BIN(bin), typefind);
+
+ gtk_signal_connect (GTK_OBJECT (typefind), "have_type",
+ type_found, NULL);
+
+ gst_pad_connect(gst_element_get_pad(disksrc,"src"),
+ gst_element_get_pad(typefind,"sink"));
+
+ /* start playing */
+ gst_element_set_state(GST_ELEMENT(bin), GST_STATE_PLAYING);
+
+ gst_bin_iterate(GST_BIN(bin));
+
+ gst_element_set_state(GST_ELEMENT(bin), GST_STATE_NULL);
+
+ exit(0);
+}
+
+
+ We create a very simple pipeline with only a disksrc and the typefind element
+ in it. The sinkpad of the typefind element has been connected to the src pad
+ of the disksrc.
+
+
+ We attached a signal 'have_type' to the typefind element which will be called
+ when the type of the media stream as been detected.
+
+
+ the typefind function will loop over all the registered types and will execute
+ each of the typefind functions. As soon as a function returns a GstCaps pointer,
+ the type_found function will be called:
+
+
+
+void
+type_found (GstElement *typefind, GstCaps* caps)
+{
+ xmlDocPtr doc;
+ xmlNodePtr parent;
+
+ doc = xmlNewDoc ("1.0");
+ doc->root = xmlNewDocNode (doc, NULL, "Capabilities", NULL);
+
+ parent = xmlNewChild (doc->root, NULL, "Caps1", NULL);
+ gst_caps_save_thyself (caps, parent);
+
+ xmlDocDump (stdout, doc);
+}
+
+
+ In the type_found function we can print or inspect the type that has been
+ detected using the GstCaps APIs. In this example, we just print out the
+ XML representation of the caps structure to stdout.
+
+
+ A more usefull option would be to use the registry to look up an element
+ that can handle this particular caps structure, or we can also use the
+ autoplugger to connect this caps structure to, for example, a videosink.
diff --git a/docs/manual/utility.sgml b/docs/manual/utility.sgml
index aa3a0ee45e..c8247bc498 100644
--- a/docs/manual/utility.sgml
+++ b/docs/manual/utility.sgml
@@ -1,6 +1,81 @@
Utility functions
+ while you can use the regular gtk_object_getv () function to
+ query the value of an object property, GStreamer
+ provides some easy wrappers for this common operation.
+
+ Instead of writing the following Gtk+ code to query the GTK_STRING value
+ of an object:
+
+
+ GtkArg arg;
+ guchar *value;
+
+ arg.name = argname;
+ gtk_object_getv (GTK_OBJECT (object), 1, &arg);
+ value = GTK_VALUE_STRING (arg);
+
+
+ You can also use:
+
+
+ value = gst_util_get_string_arg (object, argname);
+
+
+ These convenience functions exist for the following types:
+
+
+
+ gint: with gst_util_get_int_arg ();
+
+
+
+
+ gboolean: with gst_util_get_bool_arg ();
+
+
+
+
+ glong: with gst_util_get_long_arg ();
+
+
+
+
+ gfloat: with gst_util_get_float_arg ();
+
+
+
+
+ gdouble: with gst_util_get_double_arg ();
+
+
+
+
+ guchar*: with gst_util_get_string_arg ();
+
+
+
+
+ gpointer: with gst_util_get_pointer_arg ();
+
+
+
+
+ GtkWidget*: with gst_util_get_widget_arg ();
+
+
+
+
+
+ There is also another utility function that can be used to dump a block
+ of memory on the console. This function is very usefull for plugin
+ developers. The function will dump size bytes of the memory pointed
+ to by mem.
+
+
+ void gst_util_dump_mem(guchar *mem, guint size);
+