Initial commit

This commit is contained in:
unknown 2012-04-17 12:50:15 +02:00
commit 27c6ed059b
10 changed files with 4375 additions and 0 deletions

View file

@ -0,0 +1,28 @@
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}

View file

@ -0,0 +1,76 @@
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *sink;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("videotestsrc", "source");
sink = gst_element_factory_make ("autovideosink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
if (gst_element_link (source, sink) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (source, "pattern", 0, NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}

View file

@ -0,0 +1,149 @@
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *source;
GstElement *convert;
GstElement *sink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("uridecodebin", "source");
data.convert = gst_element_factory_make ("audioconvert", "convert");
data.sink = gst_element_factory_make ("autoaudiosink", "sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.sink, NULL);
if (!gst_element_link (data.convert, data.sink)) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Set the URI to play */
g_object_set (data.source, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_get_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}

View file

@ -0,0 +1,157 @@
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstElement *playbin2; /* Our one and only element */
gboolean playing; /* Are we in the PLAYING state? */
gboolean terminate; /* Should we terminate execution? */
gboolean seek_enabled; /* Is seeking enabled for this media? */
gboolean seek_done; /* Have we performed the seek already? */
gint64 duration; /* How long does this media last, in nanoseconds */
} CustomData;
/* Forward definition of the message processing function */
static void handle_message (CustomData *data, GstMessage *msg);
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
data.playing = FALSE;
data.terminate = FALSE;
data.seek_enabled = FALSE;
data.seek_done = FALSE;
data.duration = GST_CLOCK_TIME_NONE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.playbin2 = gst_element_factory_make ("playbin2", "playbin2");
if (!data.playbin2) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Start playing */
ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.playbin2);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (data.playbin2);
do {
msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION);
/* Parse message */
if (msg != NULL) {
handle_message (&data, msg);
} else {
/* We got no message, this means the timeout expired */
if (data.playing) {
GstFormat fmt = GST_FORMAT_TIME;
gint64 current = -1;
/* Query the current position of the stream */
if (!gst_element_query_position (data.playbin2, &fmt, &current)) {
g_printerr ("Could not query current position.\n");
}
/* If we didn't know it yet, query the stream duration */
if (!GST_CLOCK_TIME_IS_VALID (data.duration)) {
if (!gst_element_query_duration (data.playbin2, &fmt, &data.duration)) {
g_printerr ("Could not query current duration.\n");
}
}
/* Print current position and total duration */
g_print ("Position %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
GST_TIME_ARGS (current), GST_TIME_ARGS (data.duration));
/* If seeking is enabled, we have not done it yet, and the time is right, seek */
if (data.seek_enabled && !data.seek_done && current > 10 * GST_SECOND) {
g_print ("\nReached 10s, performing seek...\n");
gst_element_seek_simple (data.playbin2, GST_FORMAT_TIME,
GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, 30 * GST_SECOND);
data.seek_done = TRUE;
}
}
}
} while (!data.terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.playbin2, GST_STATE_NULL);
gst_object_unref (data.playbin2);
return 0;
}
static void handle_message (CustomData *data, GstMessage *msg) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
data->terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
data->terminate = TRUE;
break;
case GST_MESSAGE_DURATION:
/* The duration has changed, mark the current one as invalid */
data->duration = GST_CLOCK_TIME_NONE;
break;
case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin2)) {
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
/* Remember whether we are in the PLAYING state or not */
data->playing = (new_state == GST_STATE_PLAYING);
if (data->playing) {
/* We just moved to PLAYING. Check if seeking is possible */
GstQuery *query;
gint64 start, end;
query = gst_query_new_seeking (GST_FORMAT_TIME);
if (gst_element_query (data->playbin2, query)) {
gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end);
if (data->seek_enabled) {
g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",
GST_TIME_ARGS (start), GST_TIME_ARGS (end));
} else {
g_print ("Seeking is DISABLED for this stream.\n");
}
}
else {
g_printerr ("Seeking query failed.");
}
gst_query_unref (query);
}
}
} break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}

View file

@ -0,0 +1,248 @@
#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gdk/gdk.h>
#if defined (GDK_WINDOWING_X11)
#include <gdk/gdkx.h>
#elif defined (GDK_WINDOWING_WIN32)
#include <gdk/gdkwin32.h>
#elif defined (GDK_WINDOWING_QUARTZ)
#include <gdk/gdkquartzwindow.h>
#endif
#include <gst/interfaces/xoverlay.h>
#include <memory.h>
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstElement *playbin2; /* Our one and only pipeline */
GtkWidget *main_window;
GtkWidget *video_window;
GtkWidget *slider;
GstState state;
gint64 duration;
guintptr embed_xid;
} CustomData;
/* Forward definition of the message processing function */
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
static GstBusSyncReply bus_sync_handler (GstBus *bus, GstMessage *msg, CustomData *data);
static void realize_cb (GtkWidget *widget, CustomData *data) {
GdkWindow *window = gtk_widget_get_window (widget);
/* This is here just for pedagogical purposes, GDK_WINDOW_XID will call it
* as well */ /*
if (!gdk_window_ensure_native (window))
g_error ("Couldn't create native window needed for GstXOverlay!");*/
#if defined (GDK_WINDOWING_WIN32)
data->embed_xid = (guintptr)GDK_WINDOW_HWND (window);
#elif defined (GDK_WINDOWING_QUARTZ)
data->embed_xid = gdk_quartz_window_get_nsview (window);
#elif defined (GDK_WINDOWING_X11)
data->embed_xid = GDK_WINDOW_XID (window);
#endif
}
static void play_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin2, GST_STATE_PLAYING);
}
static void pause_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin2, GST_STATE_PAUSED);
}
static void stop_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin2, GST_STATE_READY);
}
static void delete_event_cb (GtkWidget *widget, GdkEvent *event, CustomData *data) {
stop_cb (NULL, data);
gtk_main_quit ();
}
static gboolean draw_cb (GtkWidget *widget, GdkEventExpose *event, CustomData *data) {
if (data->state < GST_STATE_PAUSED) {
GtkAllocation allocation;
GdkWindow *window = gtk_widget_get_window (widget);
cairo_t *cr;
gtk_widget_get_allocation (widget, &allocation);
cr = gdk_cairo_create (window);
cairo_set_source_rgb (cr, 0, 0, 0);
cairo_rectangle (cr, 0, 0, allocation.width, allocation.height);
cairo_fill (cr);
cairo_destroy (cr);
}
/*
if (app->xoverlay_element)
gst_x_overlay_expose (GST_X_OVERLAY (app->xoverlay_element));
*/
return FALSE;
}
static void create_ui (CustomData *data) {
GtkWidget *controls, *main_box;
GtkWidget *play_button, *pause_button, *stop_button;
data->main_window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
g_signal_connect (G_OBJECT (data->main_window), "delete-event", G_CALLBACK (delete_event_cb), data);
data->video_window = gtk_drawing_area_new ();
gtk_widget_set_double_buffered (data->video_window, FALSE);
g_signal_connect (data->video_window, "realize", G_CALLBACK (realize_cb), data);
g_signal_connect (data->video_window, "expose_event", G_CALLBACK (draw_cb), data);
play_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PLAY);
g_signal_connect (G_OBJECT (play_button), "clicked", G_CALLBACK (play_cb), data);
pause_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PAUSE);
g_signal_connect (G_OBJECT (pause_button), "clicked", G_CALLBACK (pause_cb), data);
stop_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_STOP);
g_signal_connect (G_OBJECT (stop_button), "clicked", G_CALLBACK (stop_cb), data);
data->slider = gtk_hscale_new_with_range (0, 100, 1);
gtk_scale_set_draw_value (GTK_SCALE (data->slider), 0);
controls = gtk_hbox_new (FALSE, 0);
gtk_box_pack_start (GTK_BOX (controls), play_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), pause_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), stop_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), data->slider, TRUE, TRUE, 2);
main_box = gtk_vbox_new (FALSE, 0);
gtk_box_pack_start (GTK_BOX (main_box), data->video_window, TRUE, TRUE, 0);
gtk_box_pack_start (GTK_BOX (main_box), controls, FALSE, FALSE, 0);
gtk_container_add (GTK_CONTAINER (data->main_window), main_box);
gtk_window_set_default_size (GTK_WINDOW (data->main_window), 640, 480);
gtk_widget_show_all (data->main_window);
gtk_widget_realize (data->main_window);
}
static gboolean refresh_ui (CustomData *data) {
GstFormat fmt = GST_FORMAT_TIME;
gint64 current = -1;
/* We do not want to update anything unless we are in the PLAYING state */
if (data->state != GST_STATE_PLAYING) return TRUE;
/* If we didn't know it yet, query the stream duration */
if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
if (!gst_element_query_duration (data->playbin2, &fmt, &data->duration)) {
g_printerr ("Could not query current duration.\n");
} else {
gtk_range_set_range (GTK_RANGE (data->slider), 0, data->duration / GST_SECOND);
}
}
if (gst_element_query_position (data->playbin2, &fmt, &current)) {
gtk_range_set_value (GTK_RANGE (data->slider), current / GST_SECOND);
}
return TRUE;
}
int main(int argc, char *argv[]) {
CustomData data;
GstStateChangeReturn ret;
GstBus *bus;
/* Initialize GTK */
gtk_init (&argc, &argv);
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Initialize our data structure */
memset (&data, 0, sizeof (data));
data.duration = GST_CLOCK_TIME_NONE;
/* Create the elements */
data.playbin2 = gst_element_factory_make ("playbin2", "playbin2");
if (!data.playbin2) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
// g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
g_object_set (data.playbin2, "uri", "file:///f:/media/big_buck_bunny_480p.H264.mov", NULL);
create_ui (&data);
bus = gst_element_get_bus (data.playbin2);
gst_bus_set_sync_handler (bus, (GstBusSyncHandler)bus_sync_handler, &data);
gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data);
gst_object_unref (bus);
/* Start playing */
ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.playbin2);
return -1;
}
g_timeout_add (500, (GSourceFunc)refresh_ui, &data);
// add timeout to refresh UI: query position and duration (if unknown), gtk_range_set_value() on the slider
gtk_main ();
/* Free resources */
gst_element_set_state (data.playbin2, GST_STATE_NULL);
gst_object_unref (data.playbin2);
return 0;
}
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
gtk_main_quit ();
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
gst_element_set_state (data->playbin2, GST_STATE_READY);
break;
case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin2)) {
data->state = new_state;
g_print ("State set to %s\n", gst_element_state_get_name (new_state));
}
} break;
}
return TRUE;
}
static GstBusSyncReply bus_sync_handler (GstBus *bus, GstMessage *msg, CustomData *data) {
/*ignore anything but 'prepare-xwindow-id' element messages */
if (GST_MESSAGE_TYPE (msg) != GST_MESSAGE_ELEMENT)
return GST_BUS_PASS;
if (!gst_structure_has_name (msg->structure, "prepare-xwindow-id"))
return GST_BUS_PASS;
if (data->embed_xid != 0) {
/* GST_MESSAGE_SRC (message) will be the video sink element */
GstXOverlay *xoverlay = GST_X_OVERLAY (GST_MESSAGE_SRC (msg));
gst_x_overlay_set_window_handle (xoverlay, data->embed_xid);
} else {
g_warning ("Should have obtained an xid by now!");
}
gst_message_unref (msg);
return GST_BUS_DROP;
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,218 @@
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstElement *playbin2; /* Our one and only element */
gint n_video; /* Number of embedded video streams */
gint n_audio; /* Number of embedded audio streams */
gint n_text; /* Number of embedded subtitle streams */
gint current_video; /* Currently playing video stream */
gint current_audio; /* Currently playing audio stream */
gint current_text; /* Currently playing subtitle stream */
GMainLoop *main_loop; /* GLib's Main Loop */
} CustomData;
/* playbin2 flags */
typedef enum {
GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */
GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
} GstPlayFlags;
/* Forward definition for the message and keyboard processing functions */
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data);
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstStateChangeReturn ret;
gint flags;
GIOChannel *io_stdin;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.playbin2 = gst_element_factory_make ("playbin2", "playbin2");
if (!data.playbin2) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_cropped_multilingual.webm", NULL);
/* Set flags to show Audio and Video but ignore Subtitles */
g_object_get (data.playbin2, "flags", &flags, NULL);
flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
flags &= ~GST_PLAY_FLAG_TEXT;
g_object_set (data.playbin2, "flags", flags, NULL);
/* Set connection speed. This will affect some internal decisions of playbin2 */
g_object_set (data.playbin2, "connection-speed", 56000, NULL);
/* Add a bus watch, so we get notified when a message arrives */
bus = gst_element_get_bus (data.playbin2);
gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data);
/* Add a keyboard watch so we get notified of keystrokes */
#ifdef _WIN32
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
#else
io_stdin = g_io_channel_unix_new (fileno (stdin));
#endif
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
/* Start playing */
ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.playbin2);
return -1;
}
/* Create a GLib Main Loop and set it to run */
data.main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.main_loop);
/* Free resources */
g_main_loop_unref (data.main_loop);
g_io_channel_unref (io_stdin);
gst_object_unref (bus);
gst_element_set_state (data.playbin2, GST_STATE_NULL);
gst_object_unref (data.playbin2);
return 0;
}
/* Extract some metadata from the streams and print it on the screen */
static void analyze_streams (CustomData *data) {
gint i;
GstTagList *tags;
gchar *str;
guint rate;
/* Read some properties */
g_object_get (data->playbin2, "n-video", &data->n_video, NULL);
g_object_get (data->playbin2, "n-audio", &data->n_audio, NULL);
g_object_get (data->playbin2, "n-text", &data->n_text, NULL);
g_print ("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
data->n_video, data->n_audio, data->n_text);
g_print ("\n");
for (i = 0; i < data->n_video; i++) {
tags = NULL;
/* Retrieve the stream's video tags */
g_signal_emit_by_name (data->playbin2, "get-video-tags", i, &tags);
if (tags) {
g_print ("video stream %d:\n", i);
gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str);
g_print (" codec: %s\n", str ? str : "unknown");
g_free (str);
gst_tag_list_free (tags);
}
}
g_print ("\n");
for (i = 0; i < data->n_audio; i++) {
tags = NULL;
/* Retrieve the stream's audio tags */
g_signal_emit_by_name (data->playbin2, "get-audio-tags", i, &tags);
if (tags) {
g_print ("audio stream %d:\n", i);
if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) {
g_print (" codec: %s\n", str);
g_free (str);
}
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print (" language: %s\n", str);
g_free (str);
}
if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) {
g_print (" bitrate: %d\n", rate);
}
gst_tag_list_free (tags);
}
}
g_print ("\n");
for (i = 0; i < data->n_text; i++) {
tags = NULL;
/* Retrieve the stream's subtitle tags */
g_signal_emit_by_name (data->playbin2, "get-text-tags", i, &tags);
if (tags) {
g_print ("subtitle stream %d:\n", i);
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print (" language: %s\n", str);
g_free (str);
}
gst_tag_list_free (tags);
}
}
g_object_get (data->playbin2, "current-video", &data->current_video, NULL);
g_object_get (data->playbin2, "current-audio", &data->current_audio, NULL);
g_object_get (data->playbin2, "current-text", &data->current_text, NULL);
g_print ("\n");
g_print ("Currently playing video stream %d, audio stream %d and text stream %d\n",
data->current_video, data->current_audio, data->current_text);
g_print ("Type any number and hit ENTER to select a different audio stream\n");
}
/* Process messages from GStreamer */
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
g_main_loop_quit (data->main_loop);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
g_main_loop_quit (data->main_loop);
break;
case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin2)) {
if (new_state == GST_STATE_PLAYING) {
/* Once we are in the playing state, analyze the streams */
analyze_streams (data);
}
}
} break;
}
/* We want to keep receiving messages */
return TRUE;
}
/* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
int index = atoi (str);
if (index < 0 || index >= data->n_audio) {
g_printerr ("Index out of bounds\n");
} else {
/* If the input was a valid audio stream index, set the current audio stream */
g_print ("Setting current audio stream to %d\n", index);
g_object_set (data->playbin2, "current-audio", index, NULL);
}
}
g_free (str);
return TRUE;
}

View file

@ -0,0 +1,84 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{9DEC64B5-E1FA-4A55-9174-58FF94BDF432}</ProjectGuid>
<Keyword>Win32Proj</Keyword>
<RootNamespace>basictutorial1</RootNamespace>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\..\libs\gstreamer-0.10.props" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
<Import Project="..\..\libs\gstreamer-0.10.props" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<LinkIncremental>true</LinkIncremental>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<LinkIncremental>false</LinkIncremental>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<PrecompiledHeader>
</PrecompiledHeader>
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
</ClCompile>
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
</Link>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="..\..\..\..\gst-sdk\tutorials\basic-tutorial-1.c" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View file

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<ClCompile Include="..\..\..\..\gst-sdk\tutorials\basic-tutorial-1.c" />
</ItemGroup>
</Project>

View file

@ -0,0 +1,20 @@

Microsoft Visual Studio Solution File, Format Version 11.00
# Visual C++ Express 2010
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "basic-tutorial-1", "basic-tutorial-1\basic-tutorial-1.vcxproj", "{9DEC64B5-E1FA-4A55-9174-58FF94BDF432}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Win32 = Debug|Win32
Release|Win32 = Release|Win32
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{9DEC64B5-E1FA-4A55-9174-58FF94BDF432}.Debug|Win32.ActiveCfg = Debug|Win32
{9DEC64B5-E1FA-4A55-9174-58FF94BDF432}.Debug|Win32.Build.0 = Debug|Win32
{9DEC64B5-E1FA-4A55-9174-58FF94BDF432}.Release|Win32.ActiveCfg = Release|Win32
{9DEC64B5-E1FA-4A55-9174-58FF94BDF432}.Release|Win32.Build.0 = Release|Win32
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal