From b7810a363593e6324a8a2a097613d02238b8032d Mon Sep 17 00:00:00 2001 From: Wim Taymans Date: Thu, 12 Oct 2000 19:41:30 +0000 Subject: [PATCH] Removed the hardcoded pipeline setup is gstplay in favor of the autoplugging features. Still in a rought shape... Original commit message from CVS: Removed the hardcoded pipeline setup is gstplay in favor of the autoplugging features. Still in a rought shape... --- editor/gsteditorconnection.c | 2 +- editor/gsteditorpad.c | 2 +- gst/gstpipeline.c | 4 +- gstplay/Makefile.am | 3 - gstplay/avi.c | 34 ------- gstplay/gstplay.c | 183 ++++++++--------------------------- gstplay/mpeg1.c | 129 ------------------------ gstplay/mpeg2.c | 150 ---------------------------- 8 files changed, 47 insertions(+), 460 deletions(-) delete mode 100644 gstplay/avi.c delete mode 100644 gstplay/mpeg1.c delete mode 100644 gstplay/mpeg2.c diff --git a/editor/gsteditorconnection.c b/editor/gsteditorconnection.c index 95c8699eb0..1f7f28be97 100644 --- a/editor/gsteditorconnection.c +++ b/editor/gsteditorconnection.c @@ -179,7 +179,7 @@ static void gst_editor_connection_realize(GstEditorConnection *connection) { connection->line = gnome_canvas_item_new( GST_EDITOR_ELEMENT(connection->parent)->group, gnome_canvas_line_get_type(), - "points",connection->points,NULL); + "points",connection->points,"width_units",2.0, NULL); } static void gst_editor_connection_destroy(GtkObject *object) { diff --git a/editor/gsteditorpad.c b/editor/gsteditorpad.c index db7c544276..e2972aeca3 100644 --- a/editor/gsteditorpad.c +++ b/editor/gsteditorpad.c @@ -226,7 +226,7 @@ static void gst_editor_pad_realize(GstEditorPad *pad) { pad->border = gnome_canvas_item_new(pad->group, gnome_canvas_rect_get_type(), - "width_units",1.0,"fill_color","white","outline_color","black", + "width_units",1.0,"fill_color_rgba", 0xCCFFCC00,"outline_color","black", "x1",0.0,"y1",0.0,"x2",pad->width,"y2",pad->height,NULL); g_return_if_fail(pad->border != NULL); GST_EDITOR_SET_OBJECT(pad->border,pad); diff --git a/gst/gstpipeline.c b/gst/gstpipeline.c index b2fca3f9bf..b636204d69 100644 --- a/gst/gstpipeline.c +++ b/gst/gstpipeline.c @@ -419,6 +419,8 @@ differ: GstElement *thesrcelement = srcelement; GstElement *thebin = GST_ELEMENT(pipeline); + if (g_list_length(factories[i]) < 1) goto next; + sinkelement = (GstElement *)elements->data; use_thread = have_common; @@ -486,7 +488,7 @@ differ: // this element is now the new source element thesrcelement = element; } - +next: elements = g_list_next(elements); i++; } diff --git a/gstplay/Makefile.am b/gstplay/Makefile.am index 5147c0ff3a..b8783c8c6a 100644 --- a/gstplay/Makefile.am +++ b/gstplay/Makefile.am @@ -11,12 +11,9 @@ glade_DATA = gstplay.glade play.xpm stop.xpm pause.xpm gstplay_SOURCES = \ gstplay.c \ - mpeg1.c mpeg2.c avi.c\ interface.c interface.h \ callbacks.c callbacks.h -noinst_HEADERS = codecs.h - CFLAGS += -O2 -Wall -DDATADIR=\""$(gladedir)/"\" gstplay_CFLAGS = $(shell gnome-config --cflags gnomeui) $(shell libglade-config --cflags gnome) \ diff --git a/gstplay/avi.c b/gstplay/avi.c deleted file mode 100644 index 2b22bbfd7d..0000000000 --- a/gstplay/avi.c +++ /dev/null @@ -1,34 +0,0 @@ - -#ifdef HAVE_CONFIG_H -# include -#endif - -#include -#include - -extern GstElement *video_render_queue, *audio_render_queue; - -void avi_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline) -{ - g_print("***** a new pad %s was created\n", gst_pad_get_name(pad)); - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED); - - // connect to audio pad - //if (0) { - if (strncmp(gst_pad_get_name(pad), "audio_", 6) == 0) { - - gst_bin_add(GST_BIN(pipeline), audio_render_queue); - gst_pad_connect(pad, - gst_element_get_pad(audio_render_queue,"sink")); - - } else if (strncmp(gst_pad_get_name(pad), "video_", 6) == 0) { - //} else if (0) { - - gst_bin_add(GST_BIN(pipeline), video_render_queue); - gst_pad_connect(pad, - gst_element_get_pad(video_render_queue,"sink")); - } - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING); - g_print("\n"); -} - diff --git a/gstplay/gstplay.c b/gstplay/gstplay.c index 8ffd8ffebe..5f54cf6978 100644 --- a/gstplay/gstplay.c +++ b/gstplay/gstplay.c @@ -15,36 +15,30 @@ #include "callbacks.h" #include "interface.h" -#include "codecs.h" - #define MUTEX_STATUS() (g_mutex_trylock(gdk_threads_mutex)? g_mutex_unlock(gdk_threads_mutex), "was not locked" : "was locked") - #define BUFFER 20 -extern gboolean _gst_plugin_spew; -gboolean idle_func(gpointer data); -GstElement *show, *video_render_queue; -GstElement *audio_play, *audio_render_queue; +static gboolean idle_func(gpointer data); +static gint start_from_file(guchar *filename); + +GstElement *show; +GstElement *audio_play; GstElement *src; +GstElement *parse; GstElement *pipeline; -GstElement *parse = NULL; -GstElement *typefind; -GstElement *video_render_thread; -GstElement *audio_render_thread; GstPlayState state; gboolean picture_shown = FALSE; guchar statusline[200]; guchar *statustext = "stopped"; GtkWidget *status_area; +GtkWidget *video; GtkAdjustment *adjustment; GtkWidget *play_button; GtkWidget *pause_button; GtkWidget *stop_button; GtkFileSelection *open_file_selection; -gint start_from_file(guchar *filename); - static void frame_displayed(GstSrc *asrc) { int size, time, frame_time = 0, src_pos; @@ -79,9 +73,8 @@ static void frame_displayed(GstSrc *asrc) DEBUG("gstplay: frame displayed end %s\n", MUTEX_STATUS()); } -gboolean idle_func(gpointer data) { +static gboolean idle_func(gpointer data) { DEBUG("idle start %s\n",MUTEX_STATUS()); - //gst_src_push(GST_SRC(data)); gst_bin_iterate(GST_BIN(data)); DEBUG("idle stop %s\n",MUTEX_STATUS()); return TRUE; @@ -103,7 +96,7 @@ void show_next_picture() { DEBUG("gstplay: next found %s\n", MUTEX_STATUS()); } -void mute_audio(gboolean mute) { +static void mute_audio(gboolean mute) { gtk_object_set(GTK_OBJECT(audio_play),"mute",mute,NULL); } @@ -135,7 +128,7 @@ on_exit_menu_activate (GtkMenuItem *menuitem, gdk_threads_leave(); gstplay_tear_down(); gdk_threads_enter(); - gtk_main_quit(); + gst_main_quit(); } void on_ok_button1_clicked (GtkButton *button, @@ -155,12 +148,6 @@ gint on_gstplay_delete_event(GtkWidget *widget, GdkEvent *event, gpointer data) return FALSE; } -void gstplay_parse_pads_created(GstElement *element, gpointer data) -{ - printf("gstplay: element \"%s\" is ready\n", gst_element_get_name(element)); - gst_clock_reset(gst_clock_get_system()); -} - void change_state(GstPlayState new_state) { if (new_state == state) return; @@ -169,6 +156,7 @@ void change_state(GstPlayState new_state) { mute_audio(FALSE); statustext = "playing"; update_status_area(status_area); + gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_READY); gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING); gtk_idle_add(idle_func, pipeline); state = GSTPLAY_PLAYING; @@ -178,12 +166,14 @@ void change_state(GstPlayState new_state) { statustext = "paused"; update_status_area(status_area); if (state != GSTPLAY_STOPPED) gtk_idle_remove_by_data(pipeline); + //gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED); mute_audio(TRUE); state = GSTPLAY_PAUSE; update_buttons(1); break; case GSTPLAY_STOPPED: if (state != GSTPLAY_PAUSE) gtk_idle_remove_by_data(pipeline); + //gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_NULL); statustext = "stopped"; update_status_area(status_area); mute_audio(TRUE); @@ -191,104 +181,31 @@ void change_state(GstPlayState new_state) { gtk_object_set(GTK_OBJECT(src),"offset",0,NULL); update_buttons(2); update_slider(adjustment, 0.0); - show_next_picture(); + //show_next_picture(); break; } } -static void have_type(GstSink *sink) { - gint type; - GstType *gsttype; - - type = gst_util_get_int_arg(GTK_OBJECT(sink),"type"); - gsttype = gst_type_find_by_id(type); - - g_print("have type %d:%s\n", type, gsttype->mime); - - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_NULL); - gst_bin_remove(GST_BIN(pipeline), GST_ELEMENT(sink)); - - gst_pad_disconnect(gst_element_get_pad(src,"src"), - gst_element_get_pad(GST_ELEMENT(sink),"sink")); - - if (strstr(gsttype->mime, "mpeg1-system")) { - parse = gst_elementfactory_make("mpeg1parse","mpeg1_system_parse"); - gtk_signal_connect(GTK_OBJECT(parse),"new_pad", - GTK_SIGNAL_FUNC(mpeg1_new_pad_created),pipeline); - gtk_signal_connect(GTK_OBJECT(show),"frame_displayed", - GTK_SIGNAL_FUNC(frame_displayed),NULL); - } - else if (strstr(gsttype->mime, "mpeg2-system")) { - parse = gst_elementfactory_make("mpeg2parse","mpeg2_system_parse"); - gtk_signal_connect(GTK_OBJECT(parse),"new_pad", - GTK_SIGNAL_FUNC(mpeg2_new_pad_created),pipeline); - gtk_signal_connect(GTK_OBJECT(show),"frame_displayed", - GTK_SIGNAL_FUNC(frame_displayed),NULL); - } - else if (strstr(gsttype->mime, "avi")) { - parse = gst_elementfactory_make("parseavi","parse"); - gtk_signal_connect(GTK_OBJECT(parse),"new_pad", - GTK_SIGNAL_FUNC(avi_new_pad_created),pipeline); - } - else if (strstr(gsttype->mime, "mpeg1")) { - mpeg1_setup_video_thread(gst_element_get_pad(src,"src"), video_render_queue, GST_ELEMENT(pipeline)); - gst_clock_reset(gst_clock_get_system()); - gtk_signal_connect(GTK_OBJECT(show),"frame_displayed", - GTK_SIGNAL_FUNC(frame_displayed),NULL); - } - else if (strstr(gsttype->mime, "mp3")) { - mpeg1_setup_audio_thread(gst_element_get_pad(src,"src"), audio_render_queue, GST_ELEMENT(pipeline)); - gst_clock_reset(gst_clock_get_system()); - } - else { - g_print("unknown media type\n"); - exit(0); - } - - if (parse) { - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(parse)); - gst_pad_connect(gst_element_get_pad(src,"src"), - gst_element_get_pad(parse,"sink")); - gtk_signal_connect(GTK_OBJECT(parse),"pads_created", - GTK_SIGNAL_FUNC(gstplay_parse_pads_created),pipeline); - } - gtk_object_set(GTK_OBJECT(src),"offset",0,NULL); - - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_render_thread)); - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_render_thread)); - - g_print("setting to READY state\n"); - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_READY); - g_print("setting to PLAYING state\n"); - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING); - g_print("set to PLAYING state\n"); - -} - -gint start_from_file(guchar *filename) +static gint start_from_file(guchar *filename) { - src = gst_elementfactory_make("disksrc","disk_src"); + src = gst_elementfactory_make("disksrc", "disk_src"); g_return_val_if_fail(src != NULL, -1); g_print("should be using file '%s'\n",filename); gtk_object_set(GTK_OBJECT(src),"location",filename,NULL); - typefind = gst_elementfactory_make("typefind","typefind"); - g_return_val_if_fail(typefind != NULL, -1); - - gtk_signal_connect(GTK_OBJECT(typefind),"have_type", - GTK_SIGNAL_FUNC(have_type),NULL); - - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(src)); - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(typefind)); - + gst_pipeline_add_src(GST_PIPELINE(pipeline),GST_ELEMENT(src)); gtk_signal_connect(GTK_OBJECT(src),"eos", GTK_SIGNAL_FUNC(eof),NULL); - gst_pad_connect(gst_element_get_pad(src,"src"), - gst_element_get_pad(typefind,"sink")); + if (!gst_pipeline_autoplug(GST_PIPELINE(pipeline))) { + g_print("unable to handle stream\n"); + exit(-1); + } + if (GST_PAD_CONNECTED(gst_element_get_pad(show, "sink"))) { + gtk_widget_show(video); + } g_print("setting to READY state\n"); - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_READY); state = GSTPLAY_STOPPED; @@ -311,18 +228,14 @@ main (int argc, char *argv[]) bindtextdomain (PACKAGE, PACKAGE_LOCALE_DIR); textdomain (PACKAGE); - g_thread_init(NULL); - gtk_init(&argc,&argv); + gst_init(&argc,&argv); gnome_init ("gstreamer", VERSION, argc, argv); glade_init(); glade_gnome_init(); - gst_init(&argc,&argv); - //gst_plugin_load_all(); g_print("using %s\n", DATADIR"gstplay.glade"); /* load the interface */ xml = glade_xml_new(DATADIR "gstplay.glade", "gstplay"); - /* connect the signals in the interface */ status_area = glade_xml_get_widget(xml, "status_area"); slider = glade_xml_get_widget(xml, "slider"); @@ -349,43 +262,33 @@ main (int argc, char *argv[]) GTK_SIGNAL_FUNC (target_drag_data_received), NULL); - gst_plugin_load("videosink"); - g_snprintf(statusline, 200, "seeking"); - pipeline = gst_pipeline_new("main_pipeline"); - g_return_val_if_fail(pipeline != NULL, -1); + /* create a new bin to hold the elements */ + pipeline = gst_pipeline_new("pipeline"); + g_assert(pipeline != NULL); - video_render_thread = gst_thread_new("video_render_thread"); - g_return_val_if_fail(video_render_thread != NULL, -1); + /* and an audio sink */ + audio_play = gst_elementfactory_make("audiosink","play_audio"); + g_return_val_if_fail(audio_play != NULL, -1); + + /* and a video sink */ show = gst_elementfactory_make("videosink","show"); g_return_val_if_fail(show != NULL, -1); gtk_object_set(GTK_OBJECT(show),"xv_enabled",FALSE,NULL); + gtk_signal_connect(GTK_OBJECT(show),"frame_displayed", + GTK_SIGNAL_FUNC(frame_displayed),NULL); + video = gst_util_get_widget_arg(GTK_OBJECT(show),"widget"); gnome_dock_set_client_area(GNOME_DOCK(glade_xml_get_widget(xml, "dock1")), - gst_util_get_widget_arg(GTK_OBJECT(show),"widget")); - gst_bin_add(GST_BIN(video_render_thread),GST_ELEMENT(show)); + video); + gst_pipeline_add_sink(GST_PIPELINE(pipeline), audio_play); + gst_pipeline_add_sink(GST_PIPELINE(pipeline), show); + + /* connect the signals in the interface */ glade_xml_signal_autoconnect(xml); - video_render_queue = gst_elementfactory_make("queue","video_render_queue"); - gtk_object_set(GTK_OBJECT(video_render_queue),"max_level",BUFFER,NULL); - gst_pad_connect(gst_element_get_pad(video_render_queue,"src"), - gst_element_get_pad(show,"sink")); - gtk_object_set(GTK_OBJECT(video_render_thread),"create_thread",TRUE,NULL); - - - audio_render_thread = gst_thread_new("audio_render_thread"); - g_return_val_if_fail(audio_render_thread != NULL, -1); - audio_play = gst_elementfactory_make("audiosink","play_audio"); - gst_bin_add(GST_BIN(audio_render_thread),GST_ELEMENT(audio_play)); - - audio_render_queue = gst_elementfactory_make("queue","audio_render_queue"); - gtk_object_set(GTK_OBJECT(audio_render_queue),"max_level",BUFFER,NULL); - gst_pad_connect(gst_element_get_pad(audio_render_queue,"src"), - gst_element_get_pad(audio_play,"sink")); - gtk_object_set(GTK_OBJECT(audio_render_thread),"create_thread",TRUE,NULL); - if (argc > 1) { gint ret; @@ -393,9 +296,7 @@ main (int argc, char *argv[]) if (ret < 0) exit(ret); } - gdk_threads_enter(); - gtk_main(); - gdk_threads_leave(); + gst_main(); return 0; } diff --git a/gstplay/mpeg1.c b/gstplay/mpeg1.c deleted file mode 100644 index 4c04b5cd42..0000000000 --- a/gstplay/mpeg1.c +++ /dev/null @@ -1,129 +0,0 @@ - -#define BUFFER 20 -#define VIDEO_DECODER "mpeg_play" - -#ifdef HAVE_CONFIG_H -# include -#endif - -#include -#include - -#include "codecs.h" - - -extern gboolean _gst_plugin_spew; -extern GstElement *video_render_queue; -extern GstElement *audio_render_queue; - -void mpeg1_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline) -{ - - g_print("***** a new pad %s was created\n", gst_pad_get_name(pad)); - - // connect to audio pad - //if (0) { - if (strncmp(gst_pad_get_name(pad), "audio_", 6) == 0 && audio_render_queue) { - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED); - mpeg1_setup_audio_thread(pad, audio_render_queue, pipeline); - - } else if (strncmp(gst_pad_get_name(pad), "video_", 6) == 0) { - //} else if (0) { - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED); - mpeg1_setup_video_thread(pad, video_render_queue, pipeline); - } - else return; - - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING); -} - -void mpeg1_setup_audio_thread(GstPad *pad, GstElement *audio_render_queue, GstElement *pipeline) -{ - GstElement *parse_audio, *decode; - GstElement *audio_queue; - GstElement *audio_thread; - - gst_plugin_load("mp3parse"); - gst_plugin_load("mpg123"); - // construct internal pipeline elements - parse_audio = gst_elementfactory_make("mp3parse","parse_audio"); - g_return_if_fail(parse_audio != NULL); - decode = gst_elementfactory_make("mpg123","decode_audio"); - g_return_if_fail(decode != NULL); - - // create the thread and pack stuff into it - audio_thread = gst_thread_new("audio_thread"); - g_return_if_fail(audio_thread != NULL); - gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio)); - gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode)); - gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(audio_render_queue)); - - // set up pad connections - gst_element_add_ghost_pad(GST_ELEMENT(audio_thread), - gst_element_get_pad(parse_audio,"sink")); - gst_pad_connect(gst_element_get_pad(parse_audio,"src"), - gst_element_get_pad(decode,"sink")); - gst_pad_connect(gst_element_get_pad(decode,"src"), - gst_element_get_pad(audio_render_queue,"sink")); - - // construct queue and connect everything in the main pipelie - audio_queue = gst_elementfactory_make("queue","audio_queue"); - gtk_object_set(GTK_OBJECT(audio_queue),"max_level",BUFFER,NULL); - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_queue)); - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_thread)); - gst_pad_connect(pad, - gst_element_get_pad(audio_queue,"sink")); - gst_pad_connect(gst_element_get_pad(audio_queue,"src"), - gst_element_get_pad(audio_thread,"sink")); - - // set up thread state and kick things off - gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL); - g_print("setting to READY state\n"); - gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_READY); -} - -void mpeg1_setup_video_thread(GstPad *pad, GstElement *video_render_queue, GstElement *pipeline) -{ - GstElement *parse_video, *decode_video; - GstElement *video_queue; - GstElement *video_thread; - - gst_plugin_load("mp1videoparse"); - gst_plugin_load(VIDEO_DECODER); - // construct internal pipeline elements - parse_video = gst_elementfactory_make("mp1videoparse","parse_video"); - g_return_if_fail(parse_video != NULL); - decode_video = gst_elementfactory_make(VIDEO_DECODER,"decode_video"); - g_return_if_fail(decode_video != NULL); - - // create the thread and pack stuff into it - video_thread = gst_thread_new("video_thread"); - g_return_if_fail(video_thread != NULL); - gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(parse_video)); - gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(decode_video)); - gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(video_render_queue)); - - // set up pad connections - gst_element_add_ghost_pad(GST_ELEMENT(video_thread), - gst_element_get_pad(parse_video,"sink")); - gst_pad_connect(gst_element_get_pad(parse_video,"src"), - gst_element_get_pad(decode_video,"sink")); - gst_pad_connect(gst_element_get_pad(decode_video,"src"), - gst_element_get_pad(video_render_queue,"sink")); - - // construct queue and connect everything in the main pipeline - video_queue = gst_elementfactory_make("queue","video_queue"); - gtk_object_set(GTK_OBJECT(video_queue),"max_level",BUFFER,NULL); - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_queue)); - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_thread)); - gst_pad_connect(pad, - gst_element_get_pad(video_queue,"sink")); - gst_pad_connect(gst_element_get_pad(video_queue,"src"), - gst_element_get_pad(video_thread,"sink")); - - // set up thread state and kick things off - gtk_object_set(GTK_OBJECT(video_thread),"create_thread",TRUE,NULL); - g_print("setting to RUNNING state\n"); - gst_element_set_state(GST_ELEMENT(video_thread),GST_STATE_READY); -} - diff --git a/gstplay/mpeg2.c b/gstplay/mpeg2.c deleted file mode 100644 index aefdd4a783..0000000000 --- a/gstplay/mpeg2.c +++ /dev/null @@ -1,150 +0,0 @@ - -#define BUFFER 20 -#define VIDEO_DECODER "mpeg2play" - -#ifdef HAVE_CONFIG_H -# include -#endif - -#include -#include - -#include "codecs.h" - - -extern gboolean _gst_plugin_spew; -extern GstElement *video_render_queue, *audio_render_queue; -GstElement *merge_subtitles; - -void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline) -{ - GstElement *parse_audio, *decode; - GstElement *audio_queue; - GstElement *audio_thread; - - g_print("***** a new pad %s was created\n", gst_pad_get_name(pad)); - - // connect to audio pad - if (strncmp(gst_pad_get_name(pad), "video_", 6) == 0) { - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED); - mpeg2_setup_video_thread(pad, video_render_queue, pipeline); - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING); - return; - } - else if (strncmp(gst_pad_get_name(pad), "private_stream_1.0", 18) == 0) { - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED); - gst_plugin_load("ac3parse"); - gst_plugin_load("ac3dec"); - // construct internal pipeline elements - parse_audio = gst_elementfactory_make("ac3parse","parse_audio"); - g_return_if_fail(parse_audio != NULL); - decode = gst_elementfactory_make("ac3dec","decode_audio"); - g_return_if_fail(decode != NULL); - } else if (strncmp(gst_pad_get_name(pad), "subtitle_stream_4", 17) == 0) { - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED); - gst_pad_connect(pad, - gst_element_get_pad(merge_subtitles,"subtitle")); - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING); - return; - } - else if (strncmp(gst_pad_get_name(pad), "audio_", 6) == 0) { - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED); - gst_plugin_load("mp3parse"); - gst_plugin_load("mpg123"); - // construct internal pipeline elements - parse_audio = gst_elementfactory_make("mp3parse","parse_audio"); - g_return_if_fail(parse_audio != NULL); - decode = gst_elementfactory_make("mpg123","decode_audio"); - g_return_if_fail(decode != NULL); - } - else { - return; - } - - // create the thread and pack stuff into it - audio_thread = gst_thread_new("audio_thread"); - g_return_if_fail(audio_thread != NULL); - gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio)); - gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode)); - gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(audio_render_queue)); - - // set up pad connections - gst_element_add_ghost_pad(GST_ELEMENT(audio_thread), - gst_element_get_pad(parse_audio,"sink")); - gst_pad_connect(gst_element_get_pad(parse_audio,"src"), - gst_element_get_pad(decode,"sink")); - gst_pad_connect(gst_element_get_pad(decode,"src"), - gst_element_get_pad(audio_render_queue,"sink")); - - // construct queue and connect everything in the main pipelie - audio_queue = gst_elementfactory_make("queue","audio_queue"); - gtk_object_set(GTK_OBJECT(audio_queue),"max_level",30,NULL); - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_queue)); - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_thread)); - gst_pad_connect(pad, - gst_element_get_pad(audio_queue,"sink")); - gst_pad_connect(gst_element_get_pad(audio_queue,"src"), - gst_element_get_pad(audio_thread,"sink")); - - // set up thread state and kick things off - gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL); - g_print("setting to READY state\n"); - gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_READY); - - gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING); -} - -void mpeg2_setup_video_thread(GstPad *pad, GstElement *show, GstElement *pipeline) -{ - GstElement *parse_video, *decode_video; - GstElement *video_queue; - GstElement *video_thread; - - gst_plugin_load("mp1videoparse"); - gst_plugin_load(VIDEO_DECODER); - gst_plugin_load("mpeg2subt"); - // construct internal pipeline elements - parse_video = gst_elementfactory_make("mp1videoparse","parse_video"); - g_return_if_fail(parse_video != NULL); - decode_video = gst_elementfactory_make(VIDEO_DECODER,"decode_video"); - g_return_if_fail(decode_video != NULL); - merge_subtitles = gst_elementfactory_make("mpeg2subt","merge_subtitles"); - g_return_if_fail(merge_subtitles != NULL); - - // create the thread and pack stuff into it - video_thread = gst_thread_new("video_thread"); - g_return_if_fail(video_thread != NULL); - gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(parse_video)); - gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(decode_video)); - gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(merge_subtitles)); - gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(video_render_queue)); - gst_bin_use_cothreads(GST_BIN(video_thread), FALSE); - - // set up pad connections - gst_element_add_ghost_pad(GST_ELEMENT(video_thread), - gst_element_get_pad(parse_video,"sink")); - gst_pad_connect(gst_element_get_pad(parse_video,"src"), - gst_element_get_pad(decode_video,"sink")); - gst_pad_connect(gst_element_get_pad(decode_video,"src"), - gst_element_get_pad(merge_subtitles,"video")); - gst_pad_connect(gst_element_get_pad(merge_subtitles,"src"), - gst_element_get_pad(video_render_queue,"sink")); - - // construct queue and connect everything in the main pipeline - video_queue = gst_elementfactory_make("queue","video_queue"); - gtk_object_set(GTK_OBJECT(video_queue),"max_level",BUFFER,NULL); - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_queue)); - gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_thread)); - gst_pad_connect(pad, - gst_element_get_pad(video_queue,"sink")); - gst_pad_connect(gst_element_get_pad(video_queue,"src"), - gst_element_get_pad(video_thread,"sink")); - - // set up thread state and kick things off - gtk_object_set(GTK_OBJECT(video_thread),"create_thread",TRUE,NULL); - g_print("setting to RUNNING state\n"); - gst_element_set_state(GST_ELEMENT(video_thread),GST_STATE_READY); - - g_print("\n"); -} -