gstreamer/docs/manual/dynamic.sgml
Erik Walthinsen e6a59c8a62 Merged HEAD from BRANCH-INCSCHED1-200104161 into BRANCH-INCSCHED1.
Original commit message from CVS:
Merged HEAD from BRANCH-INCSCHED1-200104161 into BRANCH-INCSCHED1.
2001-04-16 21:43:28 +00:00

208 lines
7.7 KiB
Text

<chapter id="cha-dynamic">
<title>Dynamic pipelines</title>
<para>
this chapter we will see how you can create a dynamic pipleine. A dynamic
pipeline is a pipeline that is updated or created while media is flowing
through it. We will create a partial pipeline first and add more elements
while the pipeline is playing. Dynamic pipelines cause all sorts of
scheduling issues and will remain a topic of research for a long time in
GStreamer.
</para>
<para>
We will show how to create an mpeg1 video player using dynamic pipelines.
As you have seen in the pad section, we can attach a signal to an element
when a pad is created. We will use this to create our MPEG1 player.
</para>
<para>
We'll start with a simple main function:
</para>
<programlisting>
#include &lt;gnome.h&gt;
#include &lt;gst/gst.h&gt;
void eof(GstElement *src) {
g_print("have eos, quitting\n");
exit(0);
}
gboolean
idle_func (gpointer data)
{
gst_bin_iterate (GST_BIN (data));
return TRUE;
}
int
main(int argc, char *argv[])
{
GstElement *pipeline, *src, *parse;
gst_init (&amp;argc, &amp;argv);
gnome_init ("MPEG1 Video player","0.0.1", argc, argv);
pipeline = gst_pipeline_new ("pipeline");
g_return_val_if_fail (pipeline != NULL, -1);
src = gst_elementfactory_make ("disksrc", "src");
g_return_val_if_fail (src != NULL, -1);
gtk_object_set (GTK_OBJECT (src), "location", argv[1], NULL);
parse = gst_elementfactory_make ("mpeg1parse", "parse");
g_return_val_if_fail (parse != NULL, -1);
gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (src));
gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (parse));
gtk_signal_connect (GTK_OBJECT (parse), "new_pad",
GTK_SIGNAL_FUNC (new_pad_created), pipeline);
gtk_signal_connect (GTK_OBJECT (src), "eos",
GTK_SIGNAL_FUNC (eof), NULL);
gst_pad_connect (gst_element_get_pad (src, "src"),
gst_element_get_pad (parse, "sink"));
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
g_idle_add (idle_func, pipeline);
gdk_threads_enter ();
gst_main ();
gdk_threads_leave ();
return 0;
}
</programlisting>
<para>
We create two elements: a disksrc (the element that will read the
file from disk) and an mpeg1parser. We also add an EOS (End Of Stream)
signal to the disksrc so that we will be notified when the file has ended.
There's nothing special about this piece of code except for the signal
'new_pad' that we connected to the mpeg1parser using:
</para>
<programlisting>
gtk_signal_connect (GTK_OBJECT (parse), "new_pad",
GTK_SIGNAL_FUNC (new_pad_created), pipeline);
</programlisting>
<para>
When an elementary stream has been detected in the system stream,
mpeg1parse will create a new pad that will provide the data of the
elementary stream. A function 'new_pad_created' will be called when
the pad is created:
</para>
<programlisting>
void
new_pad_created (GstElement *parse, GstPad *pad, GstElement *pipeline)
{
GstElement *parse_audio, *parse_video, *decode, *decode_video, *play, *videoscale, *show;
GstElement *audio_queue, *video_queue;
GstElement *audio_thread, *video_thread;
GtkWidget *appwindow;
g_print ("***** a new pad &percnt;s was created\n", gst_pad_get_name (pad));
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PAUSED);
// connect to audio pad
if (strncmp (gst_pad_get_name (pad), "audio_", 6) == 0) {
// construct internal pipeline elements
parse_audio = gst_elementfactory_make ("mp3parse", "parse_audio");
g_return_if_fail (parse_audio != NULL);
decode = gst_elementfactory_make ("mpg123", "decode_audio");
g_return_if_fail (decode != NULL);
play = gst_elementfactory_make ("audiosink", "play_audio");
g_return_if_fail (play != NULL);
// create the thread and pack stuff into it
audio_thread = gst_thread_new ("audio_thread");
g_return_if_fail (audio_thread != NULL);
gst_bin_add (GST_BIN (audio_thread), GST_ELEMENT (parse_audio));
gst_bin_add (GST_BIN (audio_thread), GST_ELEMENT (decode));
gst_bin_add (GST_BIN (audio_thread), GST_ELEMENT (play));
// set up pad connections
gst_element_add_ghost_pad (GST_ELEMENT (audio_thread),
gst_element_get_pad (parse_audio, "sink"));
gst_pad_connect (gst_element_get_pad (parse_audio,"src"),
gst_element_get_pad (decode,"sink"));
gst_pad_connect (gst_element_get_pad (decode,"src"),
gst_element_get_pad (play,"sink"));
// construct queue and connect everything in the main pipelie
audio_queue = gst_elementfactory_make ("queue", "audio_queue");
gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (audio_queue));
gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (audio_thread));
gst_pad_connect (pad,
gst_element_get_pad (audio_queue, "sink"));
gst_pad_connect (gst_element_get_pad (audio_queue, "src"),
gst_element_get_pad (audio_thread, "sink"));
// set up thread state and kick things off
g_print ("setting to READY state\n");
gst_element_set_state (GST_ELEMENT (audio_thread), GST_STATE_READY);
}
else if (strncmp (gst_pad_get_name (pad), "video_", 6) == 0) {
// construct internal pipeline elements
parse_video = gst_elementfactory_make ("mp1videoparse", "parse_video");
g_return_if_fail (parse_video != NULL);
decode_video = gst_elementfactory_make ("mpeg_play", "decode_video");
g_return_if_fail (decode_video != NULL);
show = gst_elementfactory_make ("videosink", "show");
g_return_if_fail (show != NULL);
appwindow = gnome_app_new ("MPEG1 player", "MPEG1 player");
gnome_app_set_contents (GNOME_APP (appwindow),
gst_util_get_widget_arg (GTK_OBJECT (show), "widget"));
gtk_widget_show_all (appwindow);
// create the thread and pack stuff into it
video_thread = gst_thread_new ("video_thread");
g_return_if_fail (video_thread != NULL);
gst_bin_add (GST_BIN (video_thread), GST_ELEMENT (parse_video));
gst_bin_add (GST_BIN (video_thread), GST_ELEMENT (decode_video));
gst_bin_add (GST_BIN (video_thread), GST_ELEMENT (show));
// set up pad connections
gst_element_add_ghost_pad (GST_ELEMENT (video_thread),
gst_element_get_pad (parse_video, "sink"));
gst_pad_connect (gst_element_get_pad (parse_video, "src"),
gst_element_get_pad (decode_video, "sink"));
gst_pad_connect (gst_element_get_pad (decode_video, "src"),
gst_element_get_pad (show, "sink"));
// construct queue and connect everything in the main pipeline
video_queue = gst_elementfactory_make ("queue", "video_queue");
gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (video_queue));
gst_bin_add (GST_BIN (pipeline), GST_ELEMENT (video_thread));
gst_pad_connect (pad,
gst_element_get_pad (video_queue, "sink"));
gst_pad_connect (gst_element_get_pad (video_queue, "src"),
gst_element_get_pad (video_thread, "sink"));
// set up thread state and kick things off
gtk_object_set (GTK_OBJECT (video_thread), "create_thread", TRUE, NULL);
g_print ("setting to READY state\n");
gst_element_set_state (GST_ELEMENT (video_thread), GST_STATE_READY);
}
g_print("\n");
gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
}
</programlisting>
<para>
In the above example, we created new elements based on the name of
the newly created pad. We added them to a new thread There are other possibilities to check the
type of the pad, for example, by using the MIME type and the properties
of the pad.
</para>
</chapter>