mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-25 11:11:08 +00:00
Removed the hardcoded pipeline setup is gstplay in favor of the autoplugging features. Still in a rought shape...
Original commit message from CVS: Removed the hardcoded pipeline setup is gstplay in favor of the autoplugging features. Still in a rought shape...
This commit is contained in:
parent
7694a06b5d
commit
b7810a3635
8 changed files with 47 additions and 460 deletions
|
@ -179,7 +179,7 @@ static void gst_editor_connection_realize(GstEditorConnection *connection) {
|
||||||
connection->line = gnome_canvas_item_new(
|
connection->line = gnome_canvas_item_new(
|
||||||
GST_EDITOR_ELEMENT(connection->parent)->group,
|
GST_EDITOR_ELEMENT(connection->parent)->group,
|
||||||
gnome_canvas_line_get_type(),
|
gnome_canvas_line_get_type(),
|
||||||
"points",connection->points,NULL);
|
"points",connection->points,"width_units",2.0, NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void gst_editor_connection_destroy(GtkObject *object) {
|
static void gst_editor_connection_destroy(GtkObject *object) {
|
||||||
|
|
|
@ -226,7 +226,7 @@ static void gst_editor_pad_realize(GstEditorPad *pad) {
|
||||||
|
|
||||||
pad->border = gnome_canvas_item_new(pad->group,
|
pad->border = gnome_canvas_item_new(pad->group,
|
||||||
gnome_canvas_rect_get_type(),
|
gnome_canvas_rect_get_type(),
|
||||||
"width_units",1.0,"fill_color","white","outline_color","black",
|
"width_units",1.0,"fill_color_rgba", 0xCCFFCC00,"outline_color","black",
|
||||||
"x1",0.0,"y1",0.0,"x2",pad->width,"y2",pad->height,NULL);
|
"x1",0.0,"y1",0.0,"x2",pad->width,"y2",pad->height,NULL);
|
||||||
g_return_if_fail(pad->border != NULL);
|
g_return_if_fail(pad->border != NULL);
|
||||||
GST_EDITOR_SET_OBJECT(pad->border,pad);
|
GST_EDITOR_SET_OBJECT(pad->border,pad);
|
||||||
|
|
|
@ -419,6 +419,8 @@ differ:
|
||||||
GstElement *thesrcelement = srcelement;
|
GstElement *thesrcelement = srcelement;
|
||||||
GstElement *thebin = GST_ELEMENT(pipeline);
|
GstElement *thebin = GST_ELEMENT(pipeline);
|
||||||
|
|
||||||
|
if (g_list_length(factories[i]) < 1) goto next;
|
||||||
|
|
||||||
sinkelement = (GstElement *)elements->data;
|
sinkelement = (GstElement *)elements->data;
|
||||||
|
|
||||||
use_thread = have_common;
|
use_thread = have_common;
|
||||||
|
@ -486,7 +488,7 @@ differ:
|
||||||
// this element is now the new source element
|
// this element is now the new source element
|
||||||
thesrcelement = element;
|
thesrcelement = element;
|
||||||
}
|
}
|
||||||
|
next:
|
||||||
elements = g_list_next(elements);
|
elements = g_list_next(elements);
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,12 +11,9 @@ glade_DATA = gstplay.glade play.xpm stop.xpm pause.xpm
|
||||||
|
|
||||||
gstplay_SOURCES = \
|
gstplay_SOURCES = \
|
||||||
gstplay.c \
|
gstplay.c \
|
||||||
mpeg1.c mpeg2.c avi.c\
|
|
||||||
interface.c interface.h \
|
interface.c interface.h \
|
||||||
callbacks.c callbacks.h
|
callbacks.c callbacks.h
|
||||||
|
|
||||||
noinst_HEADERS = codecs.h
|
|
||||||
|
|
||||||
CFLAGS += -O2 -Wall -DDATADIR=\""$(gladedir)/"\"
|
CFLAGS += -O2 -Wall -DDATADIR=\""$(gladedir)/"\"
|
||||||
|
|
||||||
gstplay_CFLAGS = $(shell gnome-config --cflags gnomeui) $(shell libglade-config --cflags gnome) \
|
gstplay_CFLAGS = $(shell gnome-config --cflags gnomeui) $(shell libglade-config --cflags gnome) \
|
||||||
|
|
|
@ -1,34 +0,0 @@
|
||||||
|
|
||||||
#ifdef HAVE_CONFIG_H
|
|
||||||
# include <config.h>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <gnome.h>
|
|
||||||
#include <gst/gst.h>
|
|
||||||
|
|
||||||
extern GstElement *video_render_queue, *audio_render_queue;
|
|
||||||
|
|
||||||
void avi_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
|
||||||
{
|
|
||||||
g_print("***** a new pad %s was created\n", gst_pad_get_name(pad));
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED);
|
|
||||||
|
|
||||||
// connect to audio pad
|
|
||||||
//if (0) {
|
|
||||||
if (strncmp(gst_pad_get_name(pad), "audio_", 6) == 0) {
|
|
||||||
|
|
||||||
gst_bin_add(GST_BIN(pipeline), audio_render_queue);
|
|
||||||
gst_pad_connect(pad,
|
|
||||||
gst_element_get_pad(audio_render_queue,"sink"));
|
|
||||||
|
|
||||||
} else if (strncmp(gst_pad_get_name(pad), "video_", 6) == 0) {
|
|
||||||
//} else if (0) {
|
|
||||||
|
|
||||||
gst_bin_add(GST_BIN(pipeline), video_render_queue);
|
|
||||||
gst_pad_connect(pad,
|
|
||||||
gst_element_get_pad(video_render_queue,"sink"));
|
|
||||||
}
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING);
|
|
||||||
g_print("\n");
|
|
||||||
}
|
|
||||||
|
|
|
@ -15,36 +15,30 @@
|
||||||
#include "callbacks.h"
|
#include "callbacks.h"
|
||||||
#include "interface.h"
|
#include "interface.h"
|
||||||
|
|
||||||
#include "codecs.h"
|
|
||||||
|
|
||||||
#define MUTEX_STATUS() (g_mutex_trylock(gdk_threads_mutex)? g_mutex_unlock(gdk_threads_mutex), "was not locked" : "was locked")
|
#define MUTEX_STATUS() (g_mutex_trylock(gdk_threads_mutex)? g_mutex_unlock(gdk_threads_mutex), "was not locked" : "was locked")
|
||||||
|
|
||||||
|
|
||||||
#define BUFFER 20
|
#define BUFFER 20
|
||||||
|
|
||||||
extern gboolean _gst_plugin_spew;
|
static gboolean idle_func(gpointer data);
|
||||||
gboolean idle_func(gpointer data);
|
static gint start_from_file(guchar *filename);
|
||||||
GstElement *show, *video_render_queue;
|
|
||||||
GstElement *audio_play, *audio_render_queue;
|
GstElement *show;
|
||||||
|
GstElement *audio_play;
|
||||||
GstElement *src;
|
GstElement *src;
|
||||||
|
GstElement *parse;
|
||||||
GstElement *pipeline;
|
GstElement *pipeline;
|
||||||
GstElement *parse = NULL;
|
|
||||||
GstElement *typefind;
|
|
||||||
GstElement *video_render_thread;
|
|
||||||
GstElement *audio_render_thread;
|
|
||||||
GstPlayState state;
|
GstPlayState state;
|
||||||
gboolean picture_shown = FALSE;
|
gboolean picture_shown = FALSE;
|
||||||
guchar statusline[200];
|
guchar statusline[200];
|
||||||
guchar *statustext = "stopped";
|
guchar *statustext = "stopped";
|
||||||
GtkWidget *status_area;
|
GtkWidget *status_area;
|
||||||
|
GtkWidget *video;
|
||||||
GtkAdjustment *adjustment;
|
GtkAdjustment *adjustment;
|
||||||
GtkWidget *play_button;
|
GtkWidget *play_button;
|
||||||
GtkWidget *pause_button;
|
GtkWidget *pause_button;
|
||||||
GtkWidget *stop_button;
|
GtkWidget *stop_button;
|
||||||
GtkFileSelection *open_file_selection;
|
GtkFileSelection *open_file_selection;
|
||||||
|
|
||||||
gint start_from_file(guchar *filename);
|
|
||||||
|
|
||||||
static void frame_displayed(GstSrc *asrc)
|
static void frame_displayed(GstSrc *asrc)
|
||||||
{
|
{
|
||||||
int size, time, frame_time = 0, src_pos;
|
int size, time, frame_time = 0, src_pos;
|
||||||
|
@ -79,9 +73,8 @@ static void frame_displayed(GstSrc *asrc)
|
||||||
DEBUG("gstplay: frame displayed end %s\n", MUTEX_STATUS());
|
DEBUG("gstplay: frame displayed end %s\n", MUTEX_STATUS());
|
||||||
}
|
}
|
||||||
|
|
||||||
gboolean idle_func(gpointer data) {
|
static gboolean idle_func(gpointer data) {
|
||||||
DEBUG("idle start %s\n",MUTEX_STATUS());
|
DEBUG("idle start %s\n",MUTEX_STATUS());
|
||||||
//gst_src_push(GST_SRC(data));
|
|
||||||
gst_bin_iterate(GST_BIN(data));
|
gst_bin_iterate(GST_BIN(data));
|
||||||
DEBUG("idle stop %s\n",MUTEX_STATUS());
|
DEBUG("idle stop %s\n",MUTEX_STATUS());
|
||||||
return TRUE;
|
return TRUE;
|
||||||
|
@ -103,7 +96,7 @@ void show_next_picture() {
|
||||||
DEBUG("gstplay: next found %s\n", MUTEX_STATUS());
|
DEBUG("gstplay: next found %s\n", MUTEX_STATUS());
|
||||||
}
|
}
|
||||||
|
|
||||||
void mute_audio(gboolean mute) {
|
static void mute_audio(gboolean mute) {
|
||||||
gtk_object_set(GTK_OBJECT(audio_play),"mute",mute,NULL);
|
gtk_object_set(GTK_OBJECT(audio_play),"mute",mute,NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,7 +128,7 @@ on_exit_menu_activate (GtkMenuItem *menuitem,
|
||||||
gdk_threads_leave();
|
gdk_threads_leave();
|
||||||
gstplay_tear_down();
|
gstplay_tear_down();
|
||||||
gdk_threads_enter();
|
gdk_threads_enter();
|
||||||
gtk_main_quit();
|
gst_main_quit();
|
||||||
}
|
}
|
||||||
|
|
||||||
void on_ok_button1_clicked (GtkButton *button,
|
void on_ok_button1_clicked (GtkButton *button,
|
||||||
|
@ -155,12 +148,6 @@ gint on_gstplay_delete_event(GtkWidget *widget, GdkEvent *event, gpointer data)
|
||||||
return FALSE;
|
return FALSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
void gstplay_parse_pads_created(GstElement *element, gpointer data)
|
|
||||||
{
|
|
||||||
printf("gstplay: element \"%s\" is ready\n", gst_element_get_name(element));
|
|
||||||
gst_clock_reset(gst_clock_get_system());
|
|
||||||
}
|
|
||||||
|
|
||||||
void change_state(GstPlayState new_state) {
|
void change_state(GstPlayState new_state) {
|
||||||
|
|
||||||
if (new_state == state) return;
|
if (new_state == state) return;
|
||||||
|
@ -169,6 +156,7 @@ void change_state(GstPlayState new_state) {
|
||||||
mute_audio(FALSE);
|
mute_audio(FALSE);
|
||||||
statustext = "playing";
|
statustext = "playing";
|
||||||
update_status_area(status_area);
|
update_status_area(status_area);
|
||||||
|
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_READY);
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING);
|
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING);
|
||||||
gtk_idle_add(idle_func, pipeline);
|
gtk_idle_add(idle_func, pipeline);
|
||||||
state = GSTPLAY_PLAYING;
|
state = GSTPLAY_PLAYING;
|
||||||
|
@ -178,12 +166,14 @@ void change_state(GstPlayState new_state) {
|
||||||
statustext = "paused";
|
statustext = "paused";
|
||||||
update_status_area(status_area);
|
update_status_area(status_area);
|
||||||
if (state != GSTPLAY_STOPPED) gtk_idle_remove_by_data(pipeline);
|
if (state != GSTPLAY_STOPPED) gtk_idle_remove_by_data(pipeline);
|
||||||
|
//gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED);
|
||||||
mute_audio(TRUE);
|
mute_audio(TRUE);
|
||||||
state = GSTPLAY_PAUSE;
|
state = GSTPLAY_PAUSE;
|
||||||
update_buttons(1);
|
update_buttons(1);
|
||||||
break;
|
break;
|
||||||
case GSTPLAY_STOPPED:
|
case GSTPLAY_STOPPED:
|
||||||
if (state != GSTPLAY_PAUSE) gtk_idle_remove_by_data(pipeline);
|
if (state != GSTPLAY_PAUSE) gtk_idle_remove_by_data(pipeline);
|
||||||
|
//gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_NULL);
|
||||||
statustext = "stopped";
|
statustext = "stopped";
|
||||||
update_status_area(status_area);
|
update_status_area(status_area);
|
||||||
mute_audio(TRUE);
|
mute_audio(TRUE);
|
||||||
|
@ -191,104 +181,31 @@ void change_state(GstPlayState new_state) {
|
||||||
gtk_object_set(GTK_OBJECT(src),"offset",0,NULL);
|
gtk_object_set(GTK_OBJECT(src),"offset",0,NULL);
|
||||||
update_buttons(2);
|
update_buttons(2);
|
||||||
update_slider(adjustment, 0.0);
|
update_slider(adjustment, 0.0);
|
||||||
show_next_picture();
|
//show_next_picture();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void have_type(GstSink *sink) {
|
static gint start_from_file(guchar *filename)
|
||||||
gint type;
|
|
||||||
GstType *gsttype;
|
|
||||||
|
|
||||||
type = gst_util_get_int_arg(GTK_OBJECT(sink),"type");
|
|
||||||
gsttype = gst_type_find_by_id(type);
|
|
||||||
|
|
||||||
g_print("have type %d:%s\n", type, gsttype->mime);
|
|
||||||
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_NULL);
|
|
||||||
gst_bin_remove(GST_BIN(pipeline), GST_ELEMENT(sink));
|
|
||||||
|
|
||||||
gst_pad_disconnect(gst_element_get_pad(src,"src"),
|
|
||||||
gst_element_get_pad(GST_ELEMENT(sink),"sink"));
|
|
||||||
|
|
||||||
if (strstr(gsttype->mime, "mpeg1-system")) {
|
|
||||||
parse = gst_elementfactory_make("mpeg1parse","mpeg1_system_parse");
|
|
||||||
gtk_signal_connect(GTK_OBJECT(parse),"new_pad",
|
|
||||||
GTK_SIGNAL_FUNC(mpeg1_new_pad_created),pipeline);
|
|
||||||
gtk_signal_connect(GTK_OBJECT(show),"frame_displayed",
|
|
||||||
GTK_SIGNAL_FUNC(frame_displayed),NULL);
|
|
||||||
}
|
|
||||||
else if (strstr(gsttype->mime, "mpeg2-system")) {
|
|
||||||
parse = gst_elementfactory_make("mpeg2parse","mpeg2_system_parse");
|
|
||||||
gtk_signal_connect(GTK_OBJECT(parse),"new_pad",
|
|
||||||
GTK_SIGNAL_FUNC(mpeg2_new_pad_created),pipeline);
|
|
||||||
gtk_signal_connect(GTK_OBJECT(show),"frame_displayed",
|
|
||||||
GTK_SIGNAL_FUNC(frame_displayed),NULL);
|
|
||||||
}
|
|
||||||
else if (strstr(gsttype->mime, "avi")) {
|
|
||||||
parse = gst_elementfactory_make("parseavi","parse");
|
|
||||||
gtk_signal_connect(GTK_OBJECT(parse),"new_pad",
|
|
||||||
GTK_SIGNAL_FUNC(avi_new_pad_created),pipeline);
|
|
||||||
}
|
|
||||||
else if (strstr(gsttype->mime, "mpeg1")) {
|
|
||||||
mpeg1_setup_video_thread(gst_element_get_pad(src,"src"), video_render_queue, GST_ELEMENT(pipeline));
|
|
||||||
gst_clock_reset(gst_clock_get_system());
|
|
||||||
gtk_signal_connect(GTK_OBJECT(show),"frame_displayed",
|
|
||||||
GTK_SIGNAL_FUNC(frame_displayed),NULL);
|
|
||||||
}
|
|
||||||
else if (strstr(gsttype->mime, "mp3")) {
|
|
||||||
mpeg1_setup_audio_thread(gst_element_get_pad(src,"src"), audio_render_queue, GST_ELEMENT(pipeline));
|
|
||||||
gst_clock_reset(gst_clock_get_system());
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
g_print("unknown media type\n");
|
|
||||||
exit(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parse) {
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(parse));
|
|
||||||
gst_pad_connect(gst_element_get_pad(src,"src"),
|
|
||||||
gst_element_get_pad(parse,"sink"));
|
|
||||||
gtk_signal_connect(GTK_OBJECT(parse),"pads_created",
|
|
||||||
GTK_SIGNAL_FUNC(gstplay_parse_pads_created),pipeline);
|
|
||||||
}
|
|
||||||
gtk_object_set(GTK_OBJECT(src),"offset",0,NULL);
|
|
||||||
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_render_thread));
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_render_thread));
|
|
||||||
|
|
||||||
g_print("setting to READY state\n");
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_READY);
|
|
||||||
g_print("setting to PLAYING state\n");
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING);
|
|
||||||
g_print("set to PLAYING state\n");
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
gint start_from_file(guchar *filename)
|
|
||||||
{
|
{
|
||||||
src = gst_elementfactory_make("disksrc","disk_src");
|
src = gst_elementfactory_make("disksrc", "disk_src");
|
||||||
g_return_val_if_fail(src != NULL, -1);
|
g_return_val_if_fail(src != NULL, -1);
|
||||||
g_print("should be using file '%s'\n",filename);
|
g_print("should be using file '%s'\n",filename);
|
||||||
gtk_object_set(GTK_OBJECT(src),"location",filename,NULL);
|
gtk_object_set(GTK_OBJECT(src),"location",filename,NULL);
|
||||||
|
|
||||||
typefind = gst_elementfactory_make("typefind","typefind");
|
gst_pipeline_add_src(GST_PIPELINE(pipeline),GST_ELEMENT(src));
|
||||||
g_return_val_if_fail(typefind != NULL, -1);
|
|
||||||
|
|
||||||
gtk_signal_connect(GTK_OBJECT(typefind),"have_type",
|
|
||||||
GTK_SIGNAL_FUNC(have_type),NULL);
|
|
||||||
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(src));
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(typefind));
|
|
||||||
|
|
||||||
gtk_signal_connect(GTK_OBJECT(src),"eos",
|
gtk_signal_connect(GTK_OBJECT(src),"eos",
|
||||||
GTK_SIGNAL_FUNC(eof),NULL);
|
GTK_SIGNAL_FUNC(eof),NULL);
|
||||||
|
|
||||||
gst_pad_connect(gst_element_get_pad(src,"src"),
|
if (!gst_pipeline_autoplug(GST_PIPELINE(pipeline))) {
|
||||||
gst_element_get_pad(typefind,"sink"));
|
g_print("unable to handle stream\n");
|
||||||
|
exit(-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (GST_PAD_CONNECTED(gst_element_get_pad(show, "sink"))) {
|
||||||
|
gtk_widget_show(video);
|
||||||
|
}
|
||||||
g_print("setting to READY state\n");
|
g_print("setting to READY state\n");
|
||||||
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_READY);
|
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_READY);
|
||||||
|
|
||||||
state = GSTPLAY_STOPPED;
|
state = GSTPLAY_STOPPED;
|
||||||
|
@ -311,18 +228,14 @@ main (int argc, char *argv[])
|
||||||
bindtextdomain (PACKAGE, PACKAGE_LOCALE_DIR);
|
bindtextdomain (PACKAGE, PACKAGE_LOCALE_DIR);
|
||||||
textdomain (PACKAGE);
|
textdomain (PACKAGE);
|
||||||
|
|
||||||
g_thread_init(NULL);
|
gst_init(&argc,&argv);
|
||||||
gtk_init(&argc,&argv);
|
|
||||||
gnome_init ("gstreamer", VERSION, argc, argv);
|
gnome_init ("gstreamer", VERSION, argc, argv);
|
||||||
glade_init();
|
glade_init();
|
||||||
glade_gnome_init();
|
glade_gnome_init();
|
||||||
gst_init(&argc,&argv);
|
|
||||||
//gst_plugin_load_all();
|
|
||||||
|
|
||||||
g_print("using %s\n", DATADIR"gstplay.glade");
|
g_print("using %s\n", DATADIR"gstplay.glade");
|
||||||
/* load the interface */
|
/* load the interface */
|
||||||
xml = glade_xml_new(DATADIR "gstplay.glade", "gstplay");
|
xml = glade_xml_new(DATADIR "gstplay.glade", "gstplay");
|
||||||
/* connect the signals in the interface */
|
|
||||||
|
|
||||||
status_area = glade_xml_get_widget(xml, "status_area");
|
status_area = glade_xml_get_widget(xml, "status_area");
|
||||||
slider = glade_xml_get_widget(xml, "slider");
|
slider = glade_xml_get_widget(xml, "slider");
|
||||||
|
@ -349,43 +262,33 @@ main (int argc, char *argv[])
|
||||||
GTK_SIGNAL_FUNC (target_drag_data_received),
|
GTK_SIGNAL_FUNC (target_drag_data_received),
|
||||||
NULL);
|
NULL);
|
||||||
|
|
||||||
gst_plugin_load("videosink");
|
|
||||||
|
|
||||||
g_snprintf(statusline, 200, "seeking");
|
g_snprintf(statusline, 200, "seeking");
|
||||||
|
|
||||||
pipeline = gst_pipeline_new("main_pipeline");
|
/* create a new bin to hold the elements */
|
||||||
g_return_val_if_fail(pipeline != NULL, -1);
|
pipeline = gst_pipeline_new("pipeline");
|
||||||
|
g_assert(pipeline != NULL);
|
||||||
|
|
||||||
video_render_thread = gst_thread_new("video_render_thread");
|
/* and an audio sink */
|
||||||
g_return_val_if_fail(video_render_thread != NULL, -1);
|
audio_play = gst_elementfactory_make("audiosink","play_audio");
|
||||||
|
g_return_val_if_fail(audio_play != NULL, -1);
|
||||||
|
|
||||||
|
/* and a video sink */
|
||||||
show = gst_elementfactory_make("videosink","show");
|
show = gst_elementfactory_make("videosink","show");
|
||||||
g_return_val_if_fail(show != NULL, -1);
|
g_return_val_if_fail(show != NULL, -1);
|
||||||
gtk_object_set(GTK_OBJECT(show),"xv_enabled",FALSE,NULL);
|
gtk_object_set(GTK_OBJECT(show),"xv_enabled",FALSE,NULL);
|
||||||
|
gtk_signal_connect(GTK_OBJECT(show),"frame_displayed",
|
||||||
|
GTK_SIGNAL_FUNC(frame_displayed),NULL);
|
||||||
|
|
||||||
|
video = gst_util_get_widget_arg(GTK_OBJECT(show),"widget");
|
||||||
gnome_dock_set_client_area(GNOME_DOCK(glade_xml_get_widget(xml, "dock1")),
|
gnome_dock_set_client_area(GNOME_DOCK(glade_xml_get_widget(xml, "dock1")),
|
||||||
gst_util_get_widget_arg(GTK_OBJECT(show),"widget"));
|
video);
|
||||||
gst_bin_add(GST_BIN(video_render_thread),GST_ELEMENT(show));
|
|
||||||
|
|
||||||
|
gst_pipeline_add_sink(GST_PIPELINE(pipeline), audio_play);
|
||||||
|
gst_pipeline_add_sink(GST_PIPELINE(pipeline), show);
|
||||||
|
|
||||||
|
/* connect the signals in the interface */
|
||||||
glade_xml_signal_autoconnect(xml);
|
glade_xml_signal_autoconnect(xml);
|
||||||
|
|
||||||
video_render_queue = gst_elementfactory_make("queue","video_render_queue");
|
|
||||||
gtk_object_set(GTK_OBJECT(video_render_queue),"max_level",BUFFER,NULL);
|
|
||||||
gst_pad_connect(gst_element_get_pad(video_render_queue,"src"),
|
|
||||||
gst_element_get_pad(show,"sink"));
|
|
||||||
gtk_object_set(GTK_OBJECT(video_render_thread),"create_thread",TRUE,NULL);
|
|
||||||
|
|
||||||
|
|
||||||
audio_render_thread = gst_thread_new("audio_render_thread");
|
|
||||||
g_return_val_if_fail(audio_render_thread != NULL, -1);
|
|
||||||
audio_play = gst_elementfactory_make("audiosink","play_audio");
|
|
||||||
gst_bin_add(GST_BIN(audio_render_thread),GST_ELEMENT(audio_play));
|
|
||||||
|
|
||||||
audio_render_queue = gst_elementfactory_make("queue","audio_render_queue");
|
|
||||||
gtk_object_set(GTK_OBJECT(audio_render_queue),"max_level",BUFFER,NULL);
|
|
||||||
gst_pad_connect(gst_element_get_pad(audio_render_queue,"src"),
|
|
||||||
gst_element_get_pad(audio_play,"sink"));
|
|
||||||
gtk_object_set(GTK_OBJECT(audio_render_thread),"create_thread",TRUE,NULL);
|
|
||||||
|
|
||||||
if (argc > 1) {
|
if (argc > 1) {
|
||||||
gint ret;
|
gint ret;
|
||||||
|
|
||||||
|
@ -393,9 +296,7 @@ main (int argc, char *argv[])
|
||||||
if (ret < 0) exit(ret);
|
if (ret < 0) exit(ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
gdk_threads_enter();
|
gst_main();
|
||||||
gtk_main();
|
|
||||||
gdk_threads_leave();
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
129
gstplay/mpeg1.c
129
gstplay/mpeg1.c
|
@ -1,129 +0,0 @@
|
||||||
|
|
||||||
#define BUFFER 20
|
|
||||||
#define VIDEO_DECODER "mpeg_play"
|
|
||||||
|
|
||||||
#ifdef HAVE_CONFIG_H
|
|
||||||
# include <config.h>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <gnome.h>
|
|
||||||
#include <gst/gst.h>
|
|
||||||
|
|
||||||
#include "codecs.h"
|
|
||||||
|
|
||||||
|
|
||||||
extern gboolean _gst_plugin_spew;
|
|
||||||
extern GstElement *video_render_queue;
|
|
||||||
extern GstElement *audio_render_queue;
|
|
||||||
|
|
||||||
void mpeg1_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
|
||||||
{
|
|
||||||
|
|
||||||
g_print("***** a new pad %s was created\n", gst_pad_get_name(pad));
|
|
||||||
|
|
||||||
// connect to audio pad
|
|
||||||
//if (0) {
|
|
||||||
if (strncmp(gst_pad_get_name(pad), "audio_", 6) == 0 && audio_render_queue) {
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED);
|
|
||||||
mpeg1_setup_audio_thread(pad, audio_render_queue, pipeline);
|
|
||||||
|
|
||||||
} else if (strncmp(gst_pad_get_name(pad), "video_", 6) == 0) {
|
|
||||||
//} else if (0) {
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED);
|
|
||||||
mpeg1_setup_video_thread(pad, video_render_queue, pipeline);
|
|
||||||
}
|
|
||||||
else return;
|
|
||||||
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING);
|
|
||||||
}
|
|
||||||
|
|
||||||
void mpeg1_setup_audio_thread(GstPad *pad, GstElement *audio_render_queue, GstElement *pipeline)
|
|
||||||
{
|
|
||||||
GstElement *parse_audio, *decode;
|
|
||||||
GstElement *audio_queue;
|
|
||||||
GstElement *audio_thread;
|
|
||||||
|
|
||||||
gst_plugin_load("mp3parse");
|
|
||||||
gst_plugin_load("mpg123");
|
|
||||||
// construct internal pipeline elements
|
|
||||||
parse_audio = gst_elementfactory_make("mp3parse","parse_audio");
|
|
||||||
g_return_if_fail(parse_audio != NULL);
|
|
||||||
decode = gst_elementfactory_make("mpg123","decode_audio");
|
|
||||||
g_return_if_fail(decode != NULL);
|
|
||||||
|
|
||||||
// create the thread and pack stuff into it
|
|
||||||
audio_thread = gst_thread_new("audio_thread");
|
|
||||||
g_return_if_fail(audio_thread != NULL);
|
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio));
|
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode));
|
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(audio_render_queue));
|
|
||||||
|
|
||||||
// set up pad connections
|
|
||||||
gst_element_add_ghost_pad(GST_ELEMENT(audio_thread),
|
|
||||||
gst_element_get_pad(parse_audio,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(parse_audio,"src"),
|
|
||||||
gst_element_get_pad(decode,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(decode,"src"),
|
|
||||||
gst_element_get_pad(audio_render_queue,"sink"));
|
|
||||||
|
|
||||||
// construct queue and connect everything in the main pipelie
|
|
||||||
audio_queue = gst_elementfactory_make("queue","audio_queue");
|
|
||||||
gtk_object_set(GTK_OBJECT(audio_queue),"max_level",BUFFER,NULL);
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_queue));
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_thread));
|
|
||||||
gst_pad_connect(pad,
|
|
||||||
gst_element_get_pad(audio_queue,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(audio_queue,"src"),
|
|
||||||
gst_element_get_pad(audio_thread,"sink"));
|
|
||||||
|
|
||||||
// set up thread state and kick things off
|
|
||||||
gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL);
|
|
||||||
g_print("setting to READY state\n");
|
|
||||||
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_READY);
|
|
||||||
}
|
|
||||||
|
|
||||||
void mpeg1_setup_video_thread(GstPad *pad, GstElement *video_render_queue, GstElement *pipeline)
|
|
||||||
{
|
|
||||||
GstElement *parse_video, *decode_video;
|
|
||||||
GstElement *video_queue;
|
|
||||||
GstElement *video_thread;
|
|
||||||
|
|
||||||
gst_plugin_load("mp1videoparse");
|
|
||||||
gst_plugin_load(VIDEO_DECODER);
|
|
||||||
// construct internal pipeline elements
|
|
||||||
parse_video = gst_elementfactory_make("mp1videoparse","parse_video");
|
|
||||||
g_return_if_fail(parse_video != NULL);
|
|
||||||
decode_video = gst_elementfactory_make(VIDEO_DECODER,"decode_video");
|
|
||||||
g_return_if_fail(decode_video != NULL);
|
|
||||||
|
|
||||||
// create the thread and pack stuff into it
|
|
||||||
video_thread = gst_thread_new("video_thread");
|
|
||||||
g_return_if_fail(video_thread != NULL);
|
|
||||||
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(parse_video));
|
|
||||||
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(decode_video));
|
|
||||||
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(video_render_queue));
|
|
||||||
|
|
||||||
// set up pad connections
|
|
||||||
gst_element_add_ghost_pad(GST_ELEMENT(video_thread),
|
|
||||||
gst_element_get_pad(parse_video,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(parse_video,"src"),
|
|
||||||
gst_element_get_pad(decode_video,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(decode_video,"src"),
|
|
||||||
gst_element_get_pad(video_render_queue,"sink"));
|
|
||||||
|
|
||||||
// construct queue and connect everything in the main pipeline
|
|
||||||
video_queue = gst_elementfactory_make("queue","video_queue");
|
|
||||||
gtk_object_set(GTK_OBJECT(video_queue),"max_level",BUFFER,NULL);
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_queue));
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_thread));
|
|
||||||
gst_pad_connect(pad,
|
|
||||||
gst_element_get_pad(video_queue,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(video_queue,"src"),
|
|
||||||
gst_element_get_pad(video_thread,"sink"));
|
|
||||||
|
|
||||||
// set up thread state and kick things off
|
|
||||||
gtk_object_set(GTK_OBJECT(video_thread),"create_thread",TRUE,NULL);
|
|
||||||
g_print("setting to RUNNING state\n");
|
|
||||||
gst_element_set_state(GST_ELEMENT(video_thread),GST_STATE_READY);
|
|
||||||
}
|
|
||||||
|
|
150
gstplay/mpeg2.c
150
gstplay/mpeg2.c
|
@ -1,150 +0,0 @@
|
||||||
|
|
||||||
#define BUFFER 20
|
|
||||||
#define VIDEO_DECODER "mpeg2play"
|
|
||||||
|
|
||||||
#ifdef HAVE_CONFIG_H
|
|
||||||
# include <config.h>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <gnome.h>
|
|
||||||
#include <gst/gst.h>
|
|
||||||
|
|
||||||
#include "codecs.h"
|
|
||||||
|
|
||||||
|
|
||||||
extern gboolean _gst_plugin_spew;
|
|
||||||
extern GstElement *video_render_queue, *audio_render_queue;
|
|
||||||
GstElement *merge_subtitles;
|
|
||||||
|
|
||||||
void mpeg2_new_pad_created(GstElement *parse,GstPad *pad,GstElement *pipeline)
|
|
||||||
{
|
|
||||||
GstElement *parse_audio, *decode;
|
|
||||||
GstElement *audio_queue;
|
|
||||||
GstElement *audio_thread;
|
|
||||||
|
|
||||||
g_print("***** a new pad %s was created\n", gst_pad_get_name(pad));
|
|
||||||
|
|
||||||
// connect to audio pad
|
|
||||||
if (strncmp(gst_pad_get_name(pad), "video_", 6) == 0) {
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED);
|
|
||||||
mpeg2_setup_video_thread(pad, video_render_queue, pipeline);
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
else if (strncmp(gst_pad_get_name(pad), "private_stream_1.0", 18) == 0) {
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED);
|
|
||||||
gst_plugin_load("ac3parse");
|
|
||||||
gst_plugin_load("ac3dec");
|
|
||||||
// construct internal pipeline elements
|
|
||||||
parse_audio = gst_elementfactory_make("ac3parse","parse_audio");
|
|
||||||
g_return_if_fail(parse_audio != NULL);
|
|
||||||
decode = gst_elementfactory_make("ac3dec","decode_audio");
|
|
||||||
g_return_if_fail(decode != NULL);
|
|
||||||
} else if (strncmp(gst_pad_get_name(pad), "subtitle_stream_4", 17) == 0) {
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED);
|
|
||||||
gst_pad_connect(pad,
|
|
||||||
gst_element_get_pad(merge_subtitles,"subtitle"));
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
else if (strncmp(gst_pad_get_name(pad), "audio_", 6) == 0) {
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PAUSED);
|
|
||||||
gst_plugin_load("mp3parse");
|
|
||||||
gst_plugin_load("mpg123");
|
|
||||||
// construct internal pipeline elements
|
|
||||||
parse_audio = gst_elementfactory_make("mp3parse","parse_audio");
|
|
||||||
g_return_if_fail(parse_audio != NULL);
|
|
||||||
decode = gst_elementfactory_make("mpg123","decode_audio");
|
|
||||||
g_return_if_fail(decode != NULL);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// create the thread and pack stuff into it
|
|
||||||
audio_thread = gst_thread_new("audio_thread");
|
|
||||||
g_return_if_fail(audio_thread != NULL);
|
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(parse_audio));
|
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(decode));
|
|
||||||
gst_bin_add(GST_BIN(audio_thread),GST_ELEMENT(audio_render_queue));
|
|
||||||
|
|
||||||
// set up pad connections
|
|
||||||
gst_element_add_ghost_pad(GST_ELEMENT(audio_thread),
|
|
||||||
gst_element_get_pad(parse_audio,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(parse_audio,"src"),
|
|
||||||
gst_element_get_pad(decode,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(decode,"src"),
|
|
||||||
gst_element_get_pad(audio_render_queue,"sink"));
|
|
||||||
|
|
||||||
// construct queue and connect everything in the main pipelie
|
|
||||||
audio_queue = gst_elementfactory_make("queue","audio_queue");
|
|
||||||
gtk_object_set(GTK_OBJECT(audio_queue),"max_level",30,NULL);
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_queue));
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(audio_thread));
|
|
||||||
gst_pad_connect(pad,
|
|
||||||
gst_element_get_pad(audio_queue,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(audio_queue,"src"),
|
|
||||||
gst_element_get_pad(audio_thread,"sink"));
|
|
||||||
|
|
||||||
// set up thread state and kick things off
|
|
||||||
gtk_object_set(GTK_OBJECT(audio_thread),"create_thread",TRUE,NULL);
|
|
||||||
g_print("setting to READY state\n");
|
|
||||||
gst_element_set_state(GST_ELEMENT(audio_thread),GST_STATE_READY);
|
|
||||||
|
|
||||||
gst_element_set_state(GST_ELEMENT(pipeline),GST_STATE_PLAYING);
|
|
||||||
}
|
|
||||||
|
|
||||||
void mpeg2_setup_video_thread(GstPad *pad, GstElement *show, GstElement *pipeline)
|
|
||||||
{
|
|
||||||
GstElement *parse_video, *decode_video;
|
|
||||||
GstElement *video_queue;
|
|
||||||
GstElement *video_thread;
|
|
||||||
|
|
||||||
gst_plugin_load("mp1videoparse");
|
|
||||||
gst_plugin_load(VIDEO_DECODER);
|
|
||||||
gst_plugin_load("mpeg2subt");
|
|
||||||
// construct internal pipeline elements
|
|
||||||
parse_video = gst_elementfactory_make("mp1videoparse","parse_video");
|
|
||||||
g_return_if_fail(parse_video != NULL);
|
|
||||||
decode_video = gst_elementfactory_make(VIDEO_DECODER,"decode_video");
|
|
||||||
g_return_if_fail(decode_video != NULL);
|
|
||||||
merge_subtitles = gst_elementfactory_make("mpeg2subt","merge_subtitles");
|
|
||||||
g_return_if_fail(merge_subtitles != NULL);
|
|
||||||
|
|
||||||
// create the thread and pack stuff into it
|
|
||||||
video_thread = gst_thread_new("video_thread");
|
|
||||||
g_return_if_fail(video_thread != NULL);
|
|
||||||
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(parse_video));
|
|
||||||
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(decode_video));
|
|
||||||
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(merge_subtitles));
|
|
||||||
gst_bin_add(GST_BIN(video_thread),GST_ELEMENT(video_render_queue));
|
|
||||||
gst_bin_use_cothreads(GST_BIN(video_thread), FALSE);
|
|
||||||
|
|
||||||
// set up pad connections
|
|
||||||
gst_element_add_ghost_pad(GST_ELEMENT(video_thread),
|
|
||||||
gst_element_get_pad(parse_video,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(parse_video,"src"),
|
|
||||||
gst_element_get_pad(decode_video,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(decode_video,"src"),
|
|
||||||
gst_element_get_pad(merge_subtitles,"video"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(merge_subtitles,"src"),
|
|
||||||
gst_element_get_pad(video_render_queue,"sink"));
|
|
||||||
|
|
||||||
// construct queue and connect everything in the main pipeline
|
|
||||||
video_queue = gst_elementfactory_make("queue","video_queue");
|
|
||||||
gtk_object_set(GTK_OBJECT(video_queue),"max_level",BUFFER,NULL);
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_queue));
|
|
||||||
gst_bin_add(GST_BIN(pipeline),GST_ELEMENT(video_thread));
|
|
||||||
gst_pad_connect(pad,
|
|
||||||
gst_element_get_pad(video_queue,"sink"));
|
|
||||||
gst_pad_connect(gst_element_get_pad(video_queue,"src"),
|
|
||||||
gst_element_get_pad(video_thread,"sink"));
|
|
||||||
|
|
||||||
// set up thread state and kick things off
|
|
||||||
gtk_object_set(GTK_OBJECT(video_thread),"create_thread",TRUE,NULL);
|
|
||||||
g_print("setting to RUNNING state\n");
|
|
||||||
gst_element_set_state(GST_ELEMENT(video_thread),GST_STATE_READY);
|
|
||||||
|
|
||||||
g_print("\n");
|
|
||||||
}
|
|
||||||
|
|
Loading…
Reference in a new issue