Split out documentation into subfolders.

https://bugzilla.gnome.org/show_bug.cgi?id=773976
This commit is contained in:
Mathieu Duponchelle 2016-11-05 09:18:49 +01:00 committed by Tim-Philipp Müller
parent 870a116ccb
commit 09f71131fb
513 changed files with 5906 additions and 5995 deletions

2
.gitmodules vendored
View file

@ -1,3 +1,3 @@
[submodule "hotdoc_bootstrap_theme"] [submodule "hotdoc_bootstrap_theme"]
path = hotdoc_bootstrap_theme path = theme/hotdoc_bootstrap_theme
url = git@github.com:hotdoc/hotdoc_bootstrap_theme.git url = git@github.com:hotdoc/hotdoc_bootstrap_theme.git

View file

@ -26,18 +26,18 @@ gst_docs_HOTDOC_FLAGS = \
--conf-file hotdoc.json \ --conf-file hotdoc.json \
$(NULL) $(NULL)
theme.stamp: less/variables.less theme/theme.stamp: theme/less/variables.less
+make -C hotdoc_bootstrap_theme LESS_INCLUDE_PATH=$$PWD/less +make -C theme/hotdoc_bootstrap_theme LESS_INCLUDE_PATH=$$PWD/theme/less
@rm -rf hotdoc-private* @rm -rf hotdoc-private*
@touch theme.stamp @touch theme/theme.stamp
clean_theme: clean_theme:
rm -f theme.stamp rm -f theme/theme.stamp
+make -C hotdoc_bootstrap_theme clean +make -C theme/hotdoc_bootstrap_theme clean
clean: clean_theme clean: clean_theme
gst_docs_HOTDOC_EXTRA_DEPS = theme.stamp gst_docs_HOTDOC_EXTRA_DEPS = theme/theme.stamp
.PHONY: all install clean .PHONY: all install clean

View file

@ -5,10 +5,10 @@ gstreamer.com content to hotdoc
Pages to review: Pages to review:
- [installing] - [installing]
- installing-on-windows.md - installing/on-windows.md
For-later pages: For-later pages:
- tutorial-qt-tutorials.md [tpm: this should all be rewritten from scratch with qmlglsink; QtGStreamer is outdated and unmaintained, we should not promote it] - tutorials/qt-tutorials.md [tpm: this should all be rewritten from scratch with qmlglsink; QtGStreamer is outdated and unmaintained, we should not promote it]
- basic-media-player.md - basic-media-player.md
- qt-gstreamer-vs-c-gstreamer.md - qt-gstreamer-vs-c-gstreamer.md
- using-appsink-appsrc-in-qt.md - using-appsink-appsrc-in-qt.md

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.5 KiB

View file

@ -14,4 +14,4 @@
</intent-filter> </intent-filter>
</activity> </activity>
</application> </application>
</manifest> </manifest>

View file

@ -1,14 +1,14 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="match_parent"
android:orientation="vertical" > android:orientation="vertical" >
<TextView <TextView
android:id="@+id/textview_info" android:id="@+id/textview_info"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="match_parent"
android:gravity="center_vertical|center_horizontal" android:gravity="center_vertical|center_horizontal"
android:textAppearance="?android:attr/textAppearanceLarge" /> android:textAppearance="?android:attr/textAppearanceLarge" />
</LinearLayout> </LinearLayout>

View file

@ -20,7 +20,7 @@ public class Tutorial1 extends Activity {
GStreamer.init(this); GStreamer.init(this);
} catch (Exception e) { } catch (Exception e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show(); Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
finish(); finish();
return; return;
} }

View file

@ -14,4 +14,4 @@
</intent-filter> </intent-filter>
</activity> </activity>
</application> </application>
</manifest> </manifest>

View file

@ -1,38 +1,38 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="match_parent"
android:gravity="center_vertical" android:gravity="center_vertical"
android:orientation="vertical" > android:orientation="vertical" >
<TextView <TextView
android:id="@+id/textview_message" android:id="@+id/textview_message"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_marginBottom="16dip" android:layout_marginBottom="16dip"
android:gravity="center_horizontal" /> android:gravity="center_horizontal" />
<LinearLayout <LinearLayout
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:gravity="center_horizontal" android:gravity="center_horizontal"
android:orientation="horizontal" > android:orientation="horizontal" >
<ImageButton <ImageButton
android:id="@+id/button_play" android:id="@+id/button_play"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:contentDescription="@string/button_play" android:contentDescription="@string/button_play"
android:src="@android:drawable/ic_media_play" android:src="@android:drawable/ic_media_play"
android:text="@string/button_play" /> android:text="@string/button_play" />
<ImageButton <ImageButton
android:id="@+id/button_stop" android:id="@+id/button_stop"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:contentDescription="@string/button_stop" android:contentDescription="@string/button_stop"
android:src="@android:drawable/ic_media_pause" android:src="@android:drawable/ic_media_pause"
android:text="@string/button_stop" /> android:text="@string/button_stop" />
</LinearLayout> </LinearLayout>
</LinearLayout> </LinearLayout>

View file

@ -32,7 +32,7 @@ public class Tutorial2 extends Activity {
GStreamer.init(this); GStreamer.init(this);
} catch (Exception e) { } catch (Exception e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show(); Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
finish(); finish();
return; return;
} }

View file

@ -15,4 +15,4 @@
</intent-filter> </intent-filter>
</activity> </activity>
</application> </application>
</manifest> </manifest>

View file

@ -1,45 +1,45 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="match_parent"
android:gravity="center_vertical" android:gravity="center_vertical"
android:orientation="vertical" > android:orientation="vertical" >
<TextView <TextView
android:id="@+id/textview_message" android:id="@+id/textview_message"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_marginBottom="16dip" android:layout_marginBottom="16dip"
android:gravity="center_horizontal" /> android:gravity="center_horizontal" />
<LinearLayout <LinearLayout
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_marginBottom="16dip" android:layout_marginBottom="16dip"
android:gravity="center_horizontal" android:gravity="center_horizontal"
android:orientation="horizontal" > android:orientation="horizontal" >
<ImageButton <ImageButton
android:id="@+id/button_play" android:id="@+id/button_play"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:contentDescription="@string/button_play" android:contentDescription="@string/button_play"
android:src="@android:drawable/ic_media_play" android:src="@android:drawable/ic_media_play"
android:text="@string/button_play" /> android:text="@string/button_play" />
<ImageButton <ImageButton
android:id="@+id/button_stop" android:id="@+id/button_stop"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:contentDescription="@string/button_stop" android:contentDescription="@string/button_stop"
android:src="@android:drawable/ic_media_pause" android:src="@android:drawable/ic_media_pause"
android:text="@string/button_stop" /> android:text="@string/button_stop" />
</LinearLayout> </LinearLayout>
<org.freedesktop.gstreamer.tutorials.tutorial_3.GStreamerSurfaceView <org.freedesktop.gstreamer.tutorials.tutorial_3.GStreamerSurfaceView
android:id="@+id/surface_video" android:id="@+id/surface_video"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_gravity="center_vertical|center_horizontal" /> android:layout_gravity="center_vertical|center_horizontal" />
</LinearLayout> </LinearLayout>

View file

@ -36,7 +36,7 @@ public class Tutorial3 extends Activity implements SurfaceHolder.Callback {
GStreamer.init(this); GStreamer.init(this);
} catch (Exception e) { } catch (Exception e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show(); Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
finish(); finish();
return; return;
} }

View file

@ -16,4 +16,4 @@
</intent-filter> </intent-filter>
</activity> </activity>
</application> </application>
</manifest> </manifest>

View file

@ -1,69 +1,69 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="match_parent"
android:gravity="center_vertical" android:gravity="center_vertical"
android:orientation="vertical" > android:orientation="vertical" >
<TextView <TextView
android:id="@+id/textview_message" android:id="@+id/textview_message"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_marginBottom="16dip" android:layout_marginBottom="16dip"
android:gravity="center_horizontal" /> android:gravity="center_horizontal" />
<LinearLayout <LinearLayout
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_marginBottom="16dip" android:layout_marginBottom="16dip"
android:gravity="center_horizontal" android:gravity="center_horizontal"
android:orientation="horizontal" > android:orientation="horizontal" >
<ImageButton <ImageButton
android:id="@+id/button_play" android:id="@+id/button_play"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:contentDescription="@string/button_play" android:contentDescription="@string/button_play"
android:src="@android:drawable/ic_media_play" android:src="@android:drawable/ic_media_play"
android:text="@string/button_play" /> android:text="@string/button_play" />
<ImageButton <ImageButton
android:id="@+id/button_stop" android:id="@+id/button_stop"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:contentDescription="@string/button_stop" android:contentDescription="@string/button_stop"
android:src="@android:drawable/ic_media_pause" android:src="@android:drawable/ic_media_pause"
android:text="@string/button_stop" /> android:text="@string/button_stop" />
</LinearLayout> </LinearLayout>
<LinearLayout <LinearLayout
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_marginBottom="16dip" android:layout_marginBottom="16dip"
android:gravity="center_horizontal" android:gravity="center_horizontal"
android:orientation="horizontal" > android:orientation="horizontal" >
<TextView <TextView
android:id="@+id/textview_time" android:id="@+id/textview_time"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_gravity="center_vertical" android:layout_gravity="center_vertical"
android:layout_marginLeft="5dip" android:layout_marginLeft="5dip"
android:layout_marginRight="5dip" /> android:layout_marginRight="5dip" />
<SeekBar <SeekBar
android:id="@+id/seek_bar" android:id="@+id/seek_bar"
android:layout_width="0dip" android:layout_width="0dip"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_gravity="center_vertical" android:layout_gravity="center_vertical"
android:layout_weight="1" android:layout_weight="1"
android:indeterminate="false" /> android:indeterminate="false" />
</LinearLayout> </LinearLayout>
<com.gst_sdk_tutorials.tutorial_4.GStreamerSurfaceView <com.gst_sdk_tutorials.tutorial_4.GStreamerSurfaceView
android:id="@+id/surface_video" android:id="@+id/surface_video"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_gravity="center_vertical|center_horizontal" /> android:layout_gravity="center_vertical|center_horizontal" />
</LinearLayout> </LinearLayout>

View file

@ -51,7 +51,7 @@ public class Tutorial4 extends Activity implements SurfaceHolder.Callback, OnSee
GStreamer.init(this); GStreamer.init(this);
} catch (Exception e) { } catch (Exception e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show(); Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
finish(); finish();
return; return;
} }

View file

@ -1,2 +1 @@
APP_ABI = armeabi armeabi-v7a arm64-v8a x86 x86_64 APP_ABI = armeabi armeabi-v7a arm64-v8a x86 x86_64

View file

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

View file

Before

Width:  |  Height:  |  Size: 1.5 KiB

After

Width:  |  Height:  |  Size: 1.5 KiB

View file

@ -1,77 +1,77 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent" android:layout_height="match_parent"
android:gravity="center_vertical" android:gravity="center_vertical"
android:orientation="vertical" > android:orientation="vertical" >
<TextView <TextView
android:id="@+id/textview_message" android:id="@+id/textview_message"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_marginBottom="16dip" android:layout_marginBottom="16dip"
android:gravity="center_horizontal" /> android:gravity="center_horizontal" />
<LinearLayout <LinearLayout
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_marginBottom="16dip" android:layout_marginBottom="16dip"
android:gravity="center_horizontal" android:gravity="center_horizontal"
android:orientation="horizontal" > android:orientation="horizontal" >
<ImageButton <ImageButton
android:id="@+id/button_play" android:id="@+id/button_play"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:contentDescription="@string/button_play" android:contentDescription="@string/button_play"
android:src="@android:drawable/ic_media_play" android:src="@android:drawable/ic_media_play"
android:text="@string/button_play" /> android:text="@string/button_play" />
<ImageButton <ImageButton
android:id="@+id/button_stop" android:id="@+id/button_stop"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:contentDescription="@string/button_stop" android:contentDescription="@string/button_stop"
android:src="@android:drawable/ic_media_pause" android:src="@android:drawable/ic_media_pause"
android:text="@string/button_stop" /> android:text="@string/button_stop" />
<ImageButton <ImageButton
android:id="@+id/button_select" android:id="@+id/button_select"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:contentDescription="@string/button_select" android:contentDescription="@string/button_select"
android:src="@android:drawable/ic_media_next" android:src="@android:drawable/ic_media_next"
android:text="@string/button_select" /> android:text="@string/button_select" />
</LinearLayout> </LinearLayout>
<LinearLayout <LinearLayout
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_marginBottom="16dip" android:layout_marginBottom="16dip"
android:gravity="center_horizontal" android:gravity="center_horizontal"
android:orientation="horizontal" > android:orientation="horizontal" >
<TextView <TextView
android:id="@+id/textview_time" android:id="@+id/textview_time"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_gravity="center_vertical" android:layout_gravity="center_vertical"
android:layout_marginLeft="5dip" android:layout_marginLeft="5dip"
android:layout_marginRight="5dip" /> android:layout_marginRight="5dip" />
<SeekBar <SeekBar
android:id="@+id/seek_bar" android:id="@+id/seek_bar"
android:layout_width="0dip" android:layout_width="0dip"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_gravity="center_vertical" android:layout_gravity="center_vertical"
android:layout_weight="1" android:layout_weight="1"
android:indeterminate="false" /> android:indeterminate="false" />
</LinearLayout> </LinearLayout>
<com.gst_sdk_tutorials.tutorial_5.GStreamerSurfaceView <com.gst_sdk_tutorials.tutorial_5.GStreamerSurfaceView
android:id="@+id/surface_video" android:id="@+id/surface_video"
android:layout_width="wrap_content" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:layout_height="wrap_content"
android:layout_gravity="center_vertical|center_horizontal" /> android:layout_gravity="center_vertical|center_horizontal" />
</LinearLayout> </LinearLayout>

View file

@ -62,7 +62,7 @@ public class Tutorial5 extends Activity implements SurfaceHolder.Callback, OnSee
GStreamer.init(this); GStreamer.init(this);
} catch (Exception e) { } catch (Exception e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show(); Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
finish(); finish();
return; return;
} }

View file

@ -2,10 +2,10 @@
// //
// Copyright (c) 2011, 2012, Alexander Ponomarev <alexander.ponomarev.1@gmail.com> // Copyright (c) 2011, 2012, Alexander Ponomarev <alexander.ponomarev.1@gmail.com>
// All rights reserved. // All rights reserved.
// //
// Redistribution and use in source and binary forms, with or without modification, // Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met: // are permitted provided that the following conditions are met:
// //
// Redistributions of source code must retain the above copyright notice, this list // Redistributions of source code must retain the above copyright notice, this list
// of conditions and the following disclaimer. Redistributions in binary form must // of conditions and the following disclaimer. Redistributions in binary form must
// reproduce the above copyright notice, this list of conditions and the following // reproduce the above copyright notice, this list of conditions and the following
@ -49,9 +49,9 @@ import com.gst_sdk_tutorials.tutorial_5.R;
/** /**
* Activity para escolha de arquivos/diretorios. * Activity para escolha de arquivos/diretorios.
* *
* @author android * @author android
* *
*/ */
public class FileDialog extends ListActivity { public class FileDialog extends ListActivity {

View file

@ -1,28 +1,28 @@
#include <gst/gst.h> #include <gst/gst.h>
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
GstElement *pipeline; GstElement *pipeline;
GstBus *bus; GstBus *bus;
GstMessage *msg; GstMessage *msg;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Build the pipeline */ /* Build the pipeline */
pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL); pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
/* Start playing */ /* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING); gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */ /* Wait until error or EOS */
bus = gst_element_get_bus (pipeline); bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */ /* Free resources */
if (msg != NULL) if (msg != NULL)
gst_message_unref (msg); gst_message_unref (msg);
gst_object_unref (bus); gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL); gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline); gst_object_unref (pipeline);
return 0; return 0;
} }

View file

@ -1,99 +1,99 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <string.h> #include <string.h>
typedef struct _CustomData { typedef struct _CustomData {
gboolean is_live; gboolean is_live;
GstElement *pipeline; GstElement *pipeline;
GMainLoop *loop; GMainLoop *loop;
} CustomData; } CustomData;
static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) { static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
switch (GST_MESSAGE_TYPE (msg)) { switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR: { case GST_MESSAGE_ERROR: {
GError *err; GError *err;
gchar *debug; gchar *debug;
gst_message_parse_error (msg, &err, &debug); gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message); g_print ("Error: %s\n", err->message);
g_error_free (err); g_error_free (err);
g_free (debug); g_free (debug);
gst_element_set_state (data->pipeline, GST_STATE_READY); gst_element_set_state (data->pipeline, GST_STATE_READY);
g_main_loop_quit (data->loop); g_main_loop_quit (data->loop);
break; break;
} }
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
/* end-of-stream */ /* end-of-stream */
gst_element_set_state (data->pipeline, GST_STATE_READY); gst_element_set_state (data->pipeline, GST_STATE_READY);
g_main_loop_quit (data->loop); g_main_loop_quit (data->loop);
break; break;
case GST_MESSAGE_BUFFERING: { case GST_MESSAGE_BUFFERING: {
gint percent = 0; gint percent = 0;
/* If the stream is live, we do not care about buffering. */ /* If the stream is live, we do not care about buffering. */
if (data->is_live) break; if (data->is_live) break;
gst_message_parse_buffering (msg, &percent); gst_message_parse_buffering (msg, &percent);
g_print ("Buffering (%3d%%)\r", percent); g_print ("Buffering (%3d%%)\r", percent);
/* Wait until buffering is complete before start/resume playing */ /* Wait until buffering is complete before start/resume playing */
if (percent < 100) if (percent < 100)
gst_element_set_state (data->pipeline, GST_STATE_PAUSED); gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
else else
gst_element_set_state (data->pipeline, GST_STATE_PLAYING); gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
break; break;
} }
case GST_MESSAGE_CLOCK_LOST: case GST_MESSAGE_CLOCK_LOST:
/* Get a new clock */ /* Get a new clock */
gst_element_set_state (data->pipeline, GST_STATE_PAUSED); gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
gst_element_set_state (data->pipeline, GST_STATE_PLAYING); gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
break; break;
default: default:
/* Unhandled message */ /* Unhandled message */
break; break;
} }
} }
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
GstElement *pipeline; GstElement *pipeline;
GstBus *bus; GstBus *bus;
GstStateChangeReturn ret; GstStateChangeReturn ret;
GMainLoop *main_loop; GMainLoop *main_loop;
CustomData data; CustomData data;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Initialize our data structure */ /* Initialize our data structure */
memset (&data, 0, sizeof (data)); memset (&data, 0, sizeof (data));
/* Build the pipeline */ /* Build the pipeline */
pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL); pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
bus = gst_element_get_bus (pipeline); bus = gst_element_get_bus (pipeline);
/* Start playing */ /* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) { if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n"); g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline); gst_object_unref (pipeline);
return -1; return -1;
} else if (ret == GST_STATE_CHANGE_NO_PREROLL) { } else if (ret == GST_STATE_CHANGE_NO_PREROLL) {
data.is_live = TRUE; data.is_live = TRUE;
} }
main_loop = g_main_loop_new (NULL, FALSE); main_loop = g_main_loop_new (NULL, FALSE);
data.loop = main_loop; data.loop = main_loop;
data.pipeline = pipeline; data.pipeline = pipeline;
gst_bus_add_signal_watch (bus); gst_bus_add_signal_watch (bus);
g_signal_connect (bus, "message", G_CALLBACK (cb_message), &data); g_signal_connect (bus, "message", G_CALLBACK (cb_message), &data);
g_main_loop_run (main_loop); g_main_loop_run (main_loop);
/* Free resources */ /* Free resources */
g_main_loop_unref (main_loop); g_main_loop_unref (main_loop);
gst_object_unref (bus); gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL); gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline); gst_object_unref (pipeline);
return 0; return 0;
} }

View file

@ -1,27 +1,27 @@
#include <string.h> #include <string.h>
#include <stdio.h> #include <stdio.h>
#include <gst/gst.h> #include <gst/gst.h>
typedef struct _CustomData { typedef struct _CustomData {
GstElement *pipeline; GstElement *pipeline;
GstElement *video_sink; GstElement *video_sink;
GMainLoop *loop; GMainLoop *loop;
gboolean playing; /* Playing or Paused */ gboolean playing; /* Playing or Paused */
gdouble rate; /* Current playback rate (can be negative) */ gdouble rate; /* Current playback rate (can be negative) */
} CustomData; } CustomData;
/* Send seek event to change rate */ /* Send seek event to change rate */
static void send_seek_event (CustomData *data) { static void send_seek_event (CustomData *data) {
gint64 position; gint64 position;
GstEvent *seek_event; GstEvent *seek_event;
/* Obtain the current position, needed for the seek event */ /* Obtain the current position, needed for the seek event */
if (!gst_element_query_position (data->pipeline, GST_FORMAT_TIME, &position)) { if (!gst_element_query_position (data->pipeline, GST_FORMAT_TIME, &position)) {
g_printerr ("Unable to retrieve current position.\n"); g_printerr ("Unable to retrieve current position.\n");
return; return;
} }
/* Create the seek event */ /* Create the seek event */
if (data->rate > 0) { if (data->rate > 0) {
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
@ -30,26 +30,26 @@ static void send_seek_event (CustomData *data) {
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, position); GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, position);
} }
if (data->video_sink == NULL) { if (data->video_sink == NULL) {
/* If we have not done so, obtain the sink through which we will send the seek events */ /* If we have not done so, obtain the sink through which we will send the seek events */
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL); g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
} }
/* Send the event */ /* Send the event */
gst_element_send_event (data->video_sink, seek_event); gst_element_send_event (data->video_sink, seek_event);
g_print ("Current rate: %g\n", data->rate); g_print ("Current rate: %g\n", data->rate);
} }
/* Process keyboard input */ /* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) { static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL; gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) != G_IO_STATUS_NORMAL) { if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) != G_IO_STATUS_NORMAL) {
return TRUE; return TRUE;
} }
switch (g_ascii_tolower (str[0])) { switch (g_ascii_tolower (str[0])) {
case 'p': case 'p':
data->playing = !data->playing; data->playing = !data->playing;
@ -73,7 +73,7 @@ static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomDa
/* If we have not done so, obtain the sink through which we will send the step events */ /* If we have not done so, obtain the sink through which we will send the step events */
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL); g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
} }
gst_element_send_event (data->video_sink, gst_element_send_event (data->video_sink,
gst_event_new_step (GST_FORMAT_BUFFERS, 1, data->rate, TRUE, FALSE)); gst_event_new_step (GST_FORMAT_BUFFERS, 1, data->rate, TRUE, FALSE));
g_print ("Stepping one frame\n"); g_print ("Stepping one frame\n");
@ -84,23 +84,23 @@ static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomDa
default: default:
break; break;
} }
g_free (str); g_free (str);
return TRUE; return TRUE;
} }
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
CustomData data; CustomData data;
GstStateChangeReturn ret; GstStateChangeReturn ret;
GIOChannel *io_stdin; GIOChannel *io_stdin;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Initialize our data structure */ /* Initialize our data structure */
memset (&data, 0, sizeof (data)); memset (&data, 0, sizeof (data));
/* Print usage map */ /* Print usage map */
g_print ( g_print (
"USAGE: Choose one of the following options, then press enter:\n" "USAGE: Choose one of the following options, then press enter:\n"
@ -109,10 +109,10 @@ int main(int argc, char *argv[]) {
" 'D' to toggle playback direction\n" " 'D' to toggle playback direction\n"
" 'N' to move to next frame (in the current direction, better in PAUSE)\n" " 'N' to move to next frame (in the current direction, better in PAUSE)\n"
" 'Q' to quit\n"); " 'Q' to quit\n");
/* Build the pipeline */ /* Build the pipeline */
data.pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL); data.pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
/* Add a keyboard watch so we get notified of keystrokes */ /* Add a keyboard watch so we get notified of keystrokes */
#ifdef G_OS_WIN32 #ifdef G_OS_WIN32
io_stdin = g_io_channel_win32_new_fd (fileno (stdin)); io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
@ -120,7 +120,7 @@ int main(int argc, char *argv[]) {
io_stdin = g_io_channel_unix_new (fileno (stdin)); io_stdin = g_io_channel_unix_new (fileno (stdin));
#endif #endif
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data); g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
/* Start playing */ /* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING); ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) { if (ret == GST_STATE_CHANGE_FAILURE) {
@ -130,11 +130,11 @@ int main(int argc, char *argv[]) {
} }
data.playing = TRUE; data.playing = TRUE;
data.rate = 1.0; data.rate = 1.0;
/* Create a GLib Main Loop and set it to run */ /* Create a GLib Main Loop and set it to run */
data.loop = g_main_loop_new (NULL, FALSE); data.loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.loop); g_main_loop_run (data.loop);
/* Free resources */ /* Free resources */
g_main_loop_unref (data.loop); g_main_loop_unref (data.loop);
g_io_channel_unref (io_stdin); g_io_channel_unref (io_stdin);

View file

@ -1,29 +1,29 @@
#include <clutter-gst/clutter-gst.h> #include <clutter-gst/clutter-gst.h>
/* Setup the video texture once its size is known */ /* Setup the video texture once its size is known */
void size_change (ClutterActor *texture, gint width, gint height, gpointer user_data) { void size_change (ClutterActor *texture, gint width, gint height, gpointer user_data) {
ClutterActor *stage; ClutterActor *stage;
gfloat new_x, new_y, new_width, new_height; gfloat new_x, new_y, new_width, new_height;
gfloat stage_width, stage_height; gfloat stage_width, stage_height;
ClutterAnimation *animation = NULL; ClutterAnimation *animation = NULL;
stage = clutter_actor_get_stage (texture); stage = clutter_actor_get_stage (texture);
if (stage == NULL) if (stage == NULL)
return; return;
clutter_actor_get_size (stage, &stage_width, &stage_height); clutter_actor_get_size (stage, &stage_width, &stage_height);
/* Center video on window and calculate new size preserving aspect ratio */ /* Center video on window and calculate new size preserving aspect ratio */
new_height = (height * stage_width) / width; new_height = (height * stage_width) / width;
if (new_height <= stage_height) { if (new_height <= stage_height) {
new_width = stage_width; new_width = stage_width;
new_x = 0; new_x = 0;
new_y = (stage_height - new_height) / 2; new_y = (stage_height - new_height) / 2;
} else { } else {
new_width = (width * stage_height) / height; new_width = (width * stage_height) / height;
new_height = stage_height; new_height = stage_height;
new_x = (stage_width - new_width) / 2; new_x = (stage_width - new_width) / 2;
new_y = 0; new_y = 0;
} }
@ -34,31 +34,31 @@ void size_change (ClutterActor *texture, gint width, gint height, gpointer user_
animation = clutter_actor_animate (texture, CLUTTER_LINEAR, 10000, "rotation-angle-y", 360.0, NULL); animation = clutter_actor_animate (texture, CLUTTER_LINEAR, 10000, "rotation-angle-y", 360.0, NULL);
clutter_animation_set_loop (animation, TRUE); clutter_animation_set_loop (animation, TRUE);
} }
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
GstElement *pipeline, *sink; GstElement *pipeline, *sink;
ClutterTimeline *timeline; ClutterTimeline *timeline;
ClutterActor *stage, *texture; ClutterActor *stage, *texture;
/* clutter-gst takes care of initializing Clutter and GStreamer */ /* clutter-gst takes care of initializing Clutter and GStreamer */
if (clutter_gst_init (&argc, &argv) != CLUTTER_INIT_SUCCESS) { if (clutter_gst_init (&argc, &argv) != CLUTTER_INIT_SUCCESS) {
g_error ("Failed to initialize clutter\n"); g_error ("Failed to initialize clutter\n");
return -1; return -1;
} }
stage = clutter_stage_get_default (); stage = clutter_stage_get_default ();
/* Make a timeline */ /* Make a timeline */
timeline = clutter_timeline_new (1000); timeline = clutter_timeline_new (1000);
g_object_set(timeline, "loop", TRUE, NULL); g_object_set(timeline, "loop", TRUE, NULL);
/* Create new texture and disable slicing so the video is properly mapped onto it */ /* Create new texture and disable slicing so the video is properly mapped onto it */
texture = CLUTTER_ACTOR (g_object_new (CLUTTER_TYPE_TEXTURE, "disable-slicing", TRUE, NULL)); texture = CLUTTER_ACTOR (g_object_new (CLUTTER_TYPE_TEXTURE, "disable-slicing", TRUE, NULL));
g_signal_connect (texture, "size-change", G_CALLBACK (size_change), NULL); g_signal_connect (texture, "size-change", G_CALLBACK (size_change), NULL);
/* Build the GStreamer pipeline */ /* Build the GStreamer pipeline */
pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL); pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
/* Instantiate the Clutter sink */ /* Instantiate the Clutter sink */
sink = gst_element_factory_make ("autocluttersink", NULL); sink = gst_element_factory_make ("autocluttersink", NULL);
if (sink == NULL) { if (sink == NULL) {
@ -69,25 +69,25 @@ int main(int argc, char *argv[]) {
g_printerr ("Unable to find a Clutter sink.\n"); g_printerr ("Unable to find a Clutter sink.\n");
return -1; return -1;
} }
/* Link GStreamer with Clutter by passing the Clutter texture to the Clutter sink*/ /* Link GStreamer with Clutter by passing the Clutter texture to the Clutter sink*/
g_object_set (sink, "texture", texture, NULL); g_object_set (sink, "texture", texture, NULL);
/* Add the Clutter sink to the pipeline */ /* Add the Clutter sink to the pipeline */
g_object_set (pipeline, "video-sink", sink, NULL); g_object_set (pipeline, "video-sink", sink, NULL);
/* Start playing */ /* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING); gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* start the timeline */ /* start the timeline */
clutter_timeline_start (timeline); clutter_timeline_start (timeline);
/* Add texture to the stage, and show it */ /* Add texture to the stage, and show it */
clutter_group_add (CLUTTER_GROUP (stage), texture); clutter_group_add (CLUTTER_GROUP (stage), texture);
clutter_actor_show_all (stage); clutter_actor_show_all (stage);
clutter_main(); clutter_main();
/* Free resources */ /* Free resources */
gst_element_set_state (pipeline, GST_STATE_NULL); gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline); gst_object_unref (pipeline);

View file

@ -1,76 +1,76 @@
#include <gst/gst.h> #include <gst/gst.h>
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *sink; GstElement *pipeline, *source, *sink;
GstBus *bus; GstBus *bus;
GstMessage *msg; GstMessage *msg;
GstStateChangeReturn ret; GstStateChangeReturn ret;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Create the elements */ /* Create the elements */
source = gst_element_factory_make ("videotestsrc", "source"); source = gst_element_factory_make ("videotestsrc", "source");
sink = gst_element_factory_make ("autovideosink", "sink"); sink = gst_element_factory_make ("autovideosink", "sink");
/* Create the empty pipeline */ /* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline"); pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink) { if (!pipeline || !source || !sink) {
g_printerr ("Not all elements could be created.\n"); g_printerr ("Not all elements could be created.\n");
return -1; return -1;
} }
/* Build the pipeline */ /* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL); gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
if (gst_element_link (source, sink) != TRUE) { if (gst_element_link (source, sink) != TRUE) {
g_printerr ("Elements could not be linked.\n"); g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline); gst_object_unref (pipeline);
return -1; return -1;
} }
/* Modify the source's properties */ /* Modify the source's properties */
g_object_set (source, "pattern", 0, NULL); g_object_set (source, "pattern", 0, NULL);
/* Start playing */ /* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) { if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n"); g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline); gst_object_unref (pipeline);
return -1; return -1;
} }
/* Wait until error or EOS */ /* Wait until error or EOS */
bus = gst_element_get_bus (pipeline); bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */ /* Parse message */
if (msg != NULL) { if (msg != NULL) {
GError *err; GError *err;
gchar *debug_info; gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) { switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR: case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info); gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err); g_clear_error (&err);
g_free (debug_info); g_free (debug_info);
break; break;
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n"); g_print ("End-Of-Stream reached.\n");
break; break;
default: default:
/* We should not reach here because we only asked for ERRORs and EOS */ /* We should not reach here because we only asked for ERRORs and EOS */
g_printerr ("Unexpected message received.\n"); g_printerr ("Unexpected message received.\n");
break; break;
} }
gst_message_unref (msg); gst_message_unref (msg);
} }
/* Free resources */ /* Free resources */
gst_object_unref (bus); gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL); gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline); gst_object_unref (pipeline);
return 0; return 0;
} }

View file

@ -1,149 +1,149 @@
#include <gst/gst.h> #include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */ /* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData { typedef struct _CustomData {
GstElement *pipeline; GstElement *pipeline;
GstElement *source; GstElement *source;
GstElement *convert; GstElement *convert;
GstElement *sink; GstElement *sink;
} CustomData; } CustomData;
/* Handler for the pad-added signal */ /* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data); static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
CustomData data; CustomData data;
GstBus *bus; GstBus *bus;
GstMessage *msg; GstMessage *msg;
GstStateChangeReturn ret; GstStateChangeReturn ret;
gboolean terminate = FALSE; gboolean terminate = FALSE;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Create the elements */ /* Create the elements */
data.source = gst_element_factory_make ("uridecodebin", "source"); data.source = gst_element_factory_make ("uridecodebin", "source");
data.convert = gst_element_factory_make ("audioconvert", "convert"); data.convert = gst_element_factory_make ("audioconvert", "convert");
data.sink = gst_element_factory_make ("autoaudiosink", "sink"); data.sink = gst_element_factory_make ("autoaudiosink", "sink");
/* Create the empty pipeline */ /* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline"); data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink) { if (!data.pipeline || !data.source || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n"); g_printerr ("Not all elements could be created.\n");
return -1; return -1;
} }
/* Build the pipeline. Note that we are NOT linking the source at this /* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */ * point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.sink, NULL); gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.sink, NULL);
if (!gst_element_link (data.convert, data.sink)) { if (!gst_element_link (data.convert, data.sink)) {
g_printerr ("Elements could not be linked.\n"); g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline); gst_object_unref (data.pipeline);
return -1; return -1;
} }
/* Set the URI to play */ /* Set the URI to play */
g_object_set (data.source, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL); g_object_set (data.source, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
/* Connect to the pad-added signal */ /* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data); g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */ /* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING); ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) { if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n"); g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline); gst_object_unref (data.pipeline);
return -1; return -1;
} }
/* Listen to the bus */ /* Listen to the bus */
bus = gst_element_get_bus (data.pipeline); bus = gst_element_get_bus (data.pipeline);
do { do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS); GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */ /* Parse message */
if (msg != NULL) { if (msg != NULL) {
GError *err; GError *err;
gchar *debug_info; gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) { switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR: case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info); gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err); g_clear_error (&err);
g_free (debug_info); g_free (debug_info);
terminate = TRUE; terminate = TRUE;
break; break;
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n"); g_print ("End-Of-Stream reached.\n");
terminate = TRUE; terminate = TRUE;
break; break;
case GST_MESSAGE_STATE_CHANGED: case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */ /* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) { if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state; GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n", g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state)); gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
} }
break; break;
default: default:
/* We should not reach here */ /* We should not reach here */
g_printerr ("Unexpected message received.\n"); g_printerr ("Unexpected message received.\n");
break; break;
} }
gst_message_unref (msg); gst_message_unref (msg);
} }
} while (!terminate); } while (!terminate);
/* Free resources */ /* Free resources */
gst_object_unref (bus); gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL); gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline); gst_object_unref (data.pipeline);
return 0; return 0;
} }
/* This function will be called by the pad-added signal */ /* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) { static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink"); GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
GstPadLinkReturn ret; GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL; GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL; GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL; const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src)); g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */ /* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) { if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n"); g_print (" We are already linked. Ignoring.\n");
goto exit; goto exit;
} }
/* Check the new pad's type */ /* Check the new pad's type */
new_pad_caps = gst_pad_query_caps (new_pad, NULL); new_pad_caps = gst_pad_query_caps (new_pad, NULL);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0); new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct); new_pad_type = gst_structure_get_name (new_pad_struct);
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) { if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type); g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
goto exit; goto exit;
} }
/* Attempt the link */ /* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad); ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) { if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type); g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else { } else {
g_print (" Link succeeded (type '%s').\n", new_pad_type); g_print (" Link succeeded (type '%s').\n", new_pad_type);
} }
exit: exit:
/* Unreference the new pad's caps, if we got them */ /* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL) if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps); gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */ /* Unreference the sink pad */
gst_object_unref (sink_pad); gst_object_unref (sink_pad);
} }

View file

@ -1,156 +1,156 @@
#include <gst/gst.h> #include <gst/gst.h>
/* Structure to contain all our information, so we can pass it around */ /* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData { typedef struct _CustomData {
GstElement *playbin; /* Our one and only element */ GstElement *playbin; /* Our one and only element */
gboolean playing; /* Are we in the PLAYING state? */ gboolean playing; /* Are we in the PLAYING state? */
gboolean terminate; /* Should we terminate execution? */ gboolean terminate; /* Should we terminate execution? */
gboolean seek_enabled; /* Is seeking enabled for this media? */ gboolean seek_enabled; /* Is seeking enabled for this media? */
gboolean seek_done; /* Have we performed the seek already? */ gboolean seek_done; /* Have we performed the seek already? */
gint64 duration; /* How long does this media last, in nanoseconds */ gint64 duration; /* How long does this media last, in nanoseconds */
} CustomData; } CustomData;
/* Forward definition of the message processing function */ /* Forward definition of the message processing function */
static void handle_message (CustomData *data, GstMessage *msg); static void handle_message (CustomData *data, GstMessage *msg);
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
CustomData data; CustomData data;
GstBus *bus; GstBus *bus;
GstMessage *msg; GstMessage *msg;
GstStateChangeReturn ret; GstStateChangeReturn ret;
data.playing = FALSE; data.playing = FALSE;
data.terminate = FALSE; data.terminate = FALSE;
data.seek_enabled = FALSE; data.seek_enabled = FALSE;
data.seek_done = FALSE; data.seek_done = FALSE;
data.duration = GST_CLOCK_TIME_NONE; data.duration = GST_CLOCK_TIME_NONE;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Create the elements */ /* Create the elements */
data.playbin = gst_element_factory_make ("playbin", "playbin"); data.playbin = gst_element_factory_make ("playbin", "playbin");
if (!data.playbin) { if (!data.playbin) {
g_printerr ("Not all elements could be created.\n"); g_printerr ("Not all elements could be created.\n");
return -1; return -1;
} }
/* Set the URI to play */ /* Set the URI to play */
g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL); g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
/* Start playing */ /* Start playing */
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING); ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) { if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n"); g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.playbin); gst_object_unref (data.playbin);
return -1; return -1;
} }
/* Listen to the bus */ /* Listen to the bus */
bus = gst_element_get_bus (data.playbin); bus = gst_element_get_bus (data.playbin);
do { do {
msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND, msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION); GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION);
/* Parse message */ /* Parse message */
if (msg != NULL) { if (msg != NULL) {
handle_message (&data, msg); handle_message (&data, msg);
} else { } else {
/* We got no message, this means the timeout expired */ /* We got no message, this means the timeout expired */
if (data.playing) { if (data.playing) {
gint64 current = -1; gint64 current = -1;
/* Query the current position of the stream */ /* Query the current position of the stream */
if (!gst_element_query_position (data.playbin, GST_FORMAT_TIME, &current)) { if (!gst_element_query_position (data.playbin, GST_FORMAT_TIME, &current)) {
g_printerr ("Could not query current position.\n"); g_printerr ("Could not query current position.\n");
} }
/* If we didn't know it yet, query the stream duration */ /* If we didn't know it yet, query the stream duration */
if (!GST_CLOCK_TIME_IS_VALID (data.duration)) { if (!GST_CLOCK_TIME_IS_VALID (data.duration)) {
if (!gst_element_query_duration (data.playbin, GST_FORMAT_TIME, &data.duration)) { if (!gst_element_query_duration (data.playbin, GST_FORMAT_TIME, &data.duration)) {
g_printerr ("Could not query current duration.\n"); g_printerr ("Could not query current duration.\n");
} }
} }
/* Print current position and total duration */ /* Print current position and total duration */
g_print ("Position %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r", g_print ("Position %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
GST_TIME_ARGS (current), GST_TIME_ARGS (data.duration)); GST_TIME_ARGS (current), GST_TIME_ARGS (data.duration));
/* If seeking is enabled, we have not done it yet, and the time is right, seek */ /* If seeking is enabled, we have not done it yet, and the time is right, seek */
if (data.seek_enabled && !data.seek_done && current > 10 * GST_SECOND) { if (data.seek_enabled && !data.seek_done && current > 10 * GST_SECOND) {
g_print ("\nReached 10s, performing seek...\n"); g_print ("\nReached 10s, performing seek...\n");
gst_element_seek_simple (data.playbin, GST_FORMAT_TIME, gst_element_seek_simple (data.playbin, GST_FORMAT_TIME,
GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, 30 * GST_SECOND); GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, 30 * GST_SECOND);
data.seek_done = TRUE; data.seek_done = TRUE;
} }
} }
} }
} while (!data.terminate); } while (!data.terminate);
/* Free resources */ /* Free resources */
gst_object_unref (bus); gst_object_unref (bus);
gst_element_set_state (data.playbin, GST_STATE_NULL); gst_element_set_state (data.playbin, GST_STATE_NULL);
gst_object_unref (data.playbin); gst_object_unref (data.playbin);
return 0; return 0;
} }
static void handle_message (CustomData *data, GstMessage *msg) { static void handle_message (CustomData *data, GstMessage *msg) {
GError *err; GError *err;
gchar *debug_info; gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) { switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR: case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info); gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err); g_clear_error (&err);
g_free (debug_info); g_free (debug_info);
data->terminate = TRUE; data->terminate = TRUE;
break; break;
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n"); g_print ("End-Of-Stream reached.\n");
data->terminate = TRUE; data->terminate = TRUE;
break; break;
case GST_MESSAGE_DURATION: case GST_MESSAGE_DURATION:
/* The duration has changed, mark the current one as invalid */ /* The duration has changed, mark the current one as invalid */
data->duration = GST_CLOCK_TIME_NONE; data->duration = GST_CLOCK_TIME_NONE;
break; break;
case GST_MESSAGE_STATE_CHANGED: { case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state; GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) { if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
g_print ("Pipeline state changed from %s to %s:\n", g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state)); gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
/* Remember whether we are in the PLAYING state or not */ /* Remember whether we are in the PLAYING state or not */
data->playing = (new_state == GST_STATE_PLAYING); data->playing = (new_state == GST_STATE_PLAYING);
if (data->playing) { if (data->playing) {
/* We just moved to PLAYING. Check if seeking is possible */ /* We just moved to PLAYING. Check if seeking is possible */
GstQuery *query; GstQuery *query;
gint64 start, end; gint64 start, end;
query = gst_query_new_seeking (GST_FORMAT_TIME); query = gst_query_new_seeking (GST_FORMAT_TIME);
if (gst_element_query (data->playbin, query)) { if (gst_element_query (data->playbin, query)) {
gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end); gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end);
if (data->seek_enabled) { if (data->seek_enabled) {
g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n", g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",
GST_TIME_ARGS (start), GST_TIME_ARGS (end)); GST_TIME_ARGS (start), GST_TIME_ARGS (end));
} else { } else {
g_print ("Seeking is DISABLED for this stream.\n"); g_print ("Seeking is DISABLED for this stream.\n");
} }
} }
else { else {
g_printerr ("Seeking query failed."); g_printerr ("Seeking query failed.");
} }
gst_query_unref (query); gst_query_unref (query);
} }
} }
} break; } break;
default: default:
/* We should not reach here */ /* We should not reach here */
g_printerr ("Unexpected message received.\n"); g_printerr ("Unexpected message received.\n");
break; break;
} }
gst_message_unref (msg); gst_message_unref (msg);
} }

View file

@ -1,9 +1,9 @@
#include <string.h> #include <string.h>
#include <gtk/gtk.h> #include <gtk/gtk.h>
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/video/videooverlay.h> #include <gst/video/videooverlay.h>
#include <gdk/gdk.h> #include <gdk/gdk.h>
#if defined (GDK_WINDOWING_X11) #if defined (GDK_WINDOWING_X11)
#include <gdk/gdkx.h> #include <gdk/gdkx.h>
@ -12,29 +12,29 @@
#elif defined (GDK_WINDOWING_QUARTZ) #elif defined (GDK_WINDOWING_QUARTZ)
#include <gdk/gdkquartz.h> #include <gdk/gdkquartz.h>
#endif #endif
/* Structure to contain all our information, so we can pass it around */ /* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData { typedef struct _CustomData {
GstElement *playbin; /* Our one and only pipeline */ GstElement *playbin; /* Our one and only pipeline */
GtkWidget *slider; /* Slider widget to keep track of current position */ GtkWidget *slider; /* Slider widget to keep track of current position */
GtkWidget *streams_list; /* Text widget to display info about the streams */ GtkWidget *streams_list; /* Text widget to display info about the streams */
gulong slider_update_signal_id; /* Signal ID for the slider update signal */ gulong slider_update_signal_id; /* Signal ID for the slider update signal */
GstState state; /* Current state of the pipeline */ GstState state; /* Current state of the pipeline */
gint64 duration; /* Duration of the clip, in nanoseconds */ gint64 duration; /* Duration of the clip, in nanoseconds */
} CustomData; } CustomData;
/* This function is called when the GUI toolkit creates the physical window that will hold the video. /* This function is called when the GUI toolkit creates the physical window that will hold the video.
* At this point we can retrieve its handler (which has a different meaning depending on the windowing system) * At this point we can retrieve its handler (which has a different meaning depending on the windowing system)
* and pass it to GStreamer through the XOverlay interface. */ * and pass it to GStreamer through the XOverlay interface. */
static void realize_cb (GtkWidget *widget, CustomData *data) { static void realize_cb (GtkWidget *widget, CustomData *data) {
GdkWindow *window = gtk_widget_get_window (widget); GdkWindow *window = gtk_widget_get_window (widget);
guintptr window_handle; guintptr window_handle;
if (!gdk_window_ensure_native (window)) if (!gdk_window_ensure_native (window))
g_error ("Couldn't create native window needed for GstXOverlay!"); g_error ("Couldn't create native window needed for GstXOverlay!");
/* Retrieve window handler from GDK */ /* Retrieve window handler from GDK */
#if defined (GDK_WINDOWING_WIN32) #if defined (GDK_WINDOWING_WIN32)
window_handle = (guintptr)GDK_WINDOW_HWND (window); window_handle = (guintptr)GDK_WINDOW_HWND (window);
@ -46,35 +46,35 @@ static void realize_cb (GtkWidget *widget, CustomData *data) {
/* Pass it to playbin, which implements XOverlay and will forward it to the video sink */ /* Pass it to playbin, which implements XOverlay and will forward it to the video sink */
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->playbin), window_handle); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->playbin), window_handle);
} }
/* This function is called when the PLAY button is clicked */ /* This function is called when the PLAY button is clicked */
static void play_cb (GtkButton *button, CustomData *data) { static void play_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_PLAYING); gst_element_set_state (data->playbin, GST_STATE_PLAYING);
} }
/* This function is called when the PAUSE button is clicked */ /* This function is called when the PAUSE button is clicked */
static void pause_cb (GtkButton *button, CustomData *data) { static void pause_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_PAUSED); gst_element_set_state (data->playbin, GST_STATE_PAUSED);
} }
/* This function is called when the STOP button is clicked */ /* This function is called when the STOP button is clicked */
static void stop_cb (GtkButton *button, CustomData *data) { static void stop_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_READY); gst_element_set_state (data->playbin, GST_STATE_READY);
} }
/* This function is called when the main window is closed */ /* This function is called when the main window is closed */
static void delete_event_cb (GtkWidget *widget, GdkEvent *event, CustomData *data) { static void delete_event_cb (GtkWidget *widget, GdkEvent *event, CustomData *data) {
stop_cb (NULL, data); stop_cb (NULL, data);
gtk_main_quit (); gtk_main_quit ();
} }
/* This function is called everytime the video window needs to be redrawn (due to damage/exposure, /* This function is called everytime the video window needs to be redrawn (due to damage/exposure,
* rescaling, etc). GStreamer takes care of this in the PAUSED and PLAYING states, otherwise, * rescaling, etc). GStreamer takes care of this in the PAUSED and PLAYING states, otherwise,
* we simply draw a black rectangle to avoid garbage showing up. */ * we simply draw a black rectangle to avoid garbage showing up. */
static gboolean draw_cb (GtkWidget *widget, cairo_t *cr, CustomData *data) { static gboolean draw_cb (GtkWidget *widget, cairo_t *cr, CustomData *data) {
if (data->state < GST_STATE_PAUSED) { if (data->state < GST_STATE_PAUSED) {
GtkAllocation allocation; GtkAllocation allocation;
/* Cairo is a 2D graphics library which we use here to clean the video window. /* Cairo is a 2D graphics library which we use here to clean the video window.
* It is used by GStreamer for other reasons, so it will always be available to us. */ * It is used by GStreamer for other reasons, so it will always be available to us. */
gtk_widget_get_allocation (widget, &allocation); gtk_widget_get_allocation (widget, &allocation);
@ -82,10 +82,10 @@ static gboolean draw_cb (GtkWidget *widget, cairo_t *cr, CustomData *data) {
cairo_rectangle (cr, 0, 0, allocation.width, allocation.height); cairo_rectangle (cr, 0, 0, allocation.width, allocation.height);
cairo_fill (cr); cairo_fill (cr);
} }
return FALSE; return FALSE;
} }
/* This function is called when the slider changes its position. We perform a seek to the /* This function is called when the slider changes its position. We perform a seek to the
* new position here. */ * new position here. */
static void slider_cb (GtkRange *range, CustomData *data) { static void slider_cb (GtkRange *range, CustomData *data) {
@ -93,7 +93,7 @@ static void slider_cb (GtkRange *range, CustomData *data) {
gst_element_seek_simple (data->playbin, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, gst_element_seek_simple (data->playbin, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,
(gint64)(value * GST_SECOND)); (gint64)(value * GST_SECOND));
} }
/* This creates all the GTK+ widgets that compose our application, and registers the callbacks */ /* This creates all the GTK+ widgets that compose our application, and registers the callbacks */
static void create_ui (CustomData *data) { static void create_ui (CustomData *data) {
GtkWidget *main_window; /* The uppermost window, containing all other windows */ GtkWidget *main_window; /* The uppermost window, containing all other windows */
@ -102,58 +102,58 @@ static void create_ui (CustomData *data) {
GtkWidget *main_hbox; /* HBox to hold the video_window and the stream info text widget */ GtkWidget *main_hbox; /* HBox to hold the video_window and the stream info text widget */
GtkWidget *controls; /* HBox to hold the buttons and the slider */ GtkWidget *controls; /* HBox to hold the buttons and the slider */
GtkWidget *play_button, *pause_button, *stop_button; /* Buttons */ GtkWidget *play_button, *pause_button, *stop_button; /* Buttons */
main_window = gtk_window_new (GTK_WINDOW_TOPLEVEL); main_window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
g_signal_connect (G_OBJECT (main_window), "delete-event", G_CALLBACK (delete_event_cb), data); g_signal_connect (G_OBJECT (main_window), "delete-event", G_CALLBACK (delete_event_cb), data);
video_window = gtk_drawing_area_new (); video_window = gtk_drawing_area_new ();
gtk_widget_set_double_buffered (video_window, FALSE); gtk_widget_set_double_buffered (video_window, FALSE);
g_signal_connect (video_window, "realize", G_CALLBACK (realize_cb), data); g_signal_connect (video_window, "realize", G_CALLBACK (realize_cb), data);
g_signal_connect (video_window, "draw", G_CALLBACK (draw_cb), data); g_signal_connect (video_window, "draw", G_CALLBACK (draw_cb), data);
play_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PLAY); play_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PLAY);
g_signal_connect (G_OBJECT (play_button), "clicked", G_CALLBACK (play_cb), data); g_signal_connect (G_OBJECT (play_button), "clicked", G_CALLBACK (play_cb), data);
pause_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PAUSE); pause_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PAUSE);
g_signal_connect (G_OBJECT (pause_button), "clicked", G_CALLBACK (pause_cb), data); g_signal_connect (G_OBJECT (pause_button), "clicked", G_CALLBACK (pause_cb), data);
stop_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_STOP); stop_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_STOP);
g_signal_connect (G_OBJECT (stop_button), "clicked", G_CALLBACK (stop_cb), data); g_signal_connect (G_OBJECT (stop_button), "clicked", G_CALLBACK (stop_cb), data);
data->slider = gtk_scale_new_with_range (GTK_ORIENTATION_HORIZONTAL, 0, 100, 1); data->slider = gtk_scale_new_with_range (GTK_ORIENTATION_HORIZONTAL, 0, 100, 1);
gtk_scale_set_draw_value (GTK_SCALE (data->slider), 0); gtk_scale_set_draw_value (GTK_SCALE (data->slider), 0);
data->slider_update_signal_id = g_signal_connect (G_OBJECT (data->slider), "value-changed", G_CALLBACK (slider_cb), data); data->slider_update_signal_id = g_signal_connect (G_OBJECT (data->slider), "value-changed", G_CALLBACK (slider_cb), data);
data->streams_list = gtk_text_view_new (); data->streams_list = gtk_text_view_new ();
gtk_text_view_set_editable (GTK_TEXT_VIEW (data->streams_list), FALSE); gtk_text_view_set_editable (GTK_TEXT_VIEW (data->streams_list), FALSE);
controls = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0); controls = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0);
gtk_box_pack_start (GTK_BOX (controls), play_button, FALSE, FALSE, 2); gtk_box_pack_start (GTK_BOX (controls), play_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), pause_button, FALSE, FALSE, 2); gtk_box_pack_start (GTK_BOX (controls), pause_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), stop_button, FALSE, FALSE, 2); gtk_box_pack_start (GTK_BOX (controls), stop_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), data->slider, TRUE, TRUE, 2); gtk_box_pack_start (GTK_BOX (controls), data->slider, TRUE, TRUE, 2);
main_hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0); main_hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0);
gtk_box_pack_start (GTK_BOX (main_hbox), video_window, TRUE, TRUE, 0); gtk_box_pack_start (GTK_BOX (main_hbox), video_window, TRUE, TRUE, 0);
gtk_box_pack_start (GTK_BOX (main_hbox), data->streams_list, FALSE, FALSE, 2); gtk_box_pack_start (GTK_BOX (main_hbox), data->streams_list, FALSE, FALSE, 2);
main_box = gtk_box_new (GTK_ORIENTATION_VERTICAL, 0); main_box = gtk_box_new (GTK_ORIENTATION_VERTICAL, 0);
gtk_box_pack_start (GTK_BOX (main_box), main_hbox, TRUE, TRUE, 0); gtk_box_pack_start (GTK_BOX (main_box), main_hbox, TRUE, TRUE, 0);
gtk_box_pack_start (GTK_BOX (main_box), controls, FALSE, FALSE, 0); gtk_box_pack_start (GTK_BOX (main_box), controls, FALSE, FALSE, 0);
gtk_container_add (GTK_CONTAINER (main_window), main_box); gtk_container_add (GTK_CONTAINER (main_window), main_box);
gtk_window_set_default_size (GTK_WINDOW (main_window), 640, 480); gtk_window_set_default_size (GTK_WINDOW (main_window), 640, 480);
gtk_widget_show_all (main_window); gtk_widget_show_all (main_window);
} }
/* This function is called periodically to refresh the GUI */ /* This function is called periodically to refresh the GUI */
static gboolean refresh_ui (CustomData *data) { static gboolean refresh_ui (CustomData *data) {
gint64 current = -1; gint64 current = -1;
/* We do not want to update anything unless we are in the PAUSED or PLAYING states */ /* We do not want to update anything unless we are in the PAUSED or PLAYING states */
if (data->state < GST_STATE_PAUSED) if (data->state < GST_STATE_PAUSED)
return TRUE; return TRUE;
/* If we didn't know it yet, query the stream duration */ /* If we didn't know it yet, query the stream duration */
if (!GST_CLOCK_TIME_IS_VALID (data->duration)) { if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
if (!gst_element_query_duration (data->playbin, GST_FORMAT_TIME, &data->duration)) { if (!gst_element_query_duration (data->playbin, GST_FORMAT_TIME, &data->duration)) {
@ -163,7 +163,7 @@ static gboolean refresh_ui (CustomData *data) {
gtk_range_set_range (GTK_RANGE (data->slider), 0, (gdouble)data->duration / GST_SECOND); gtk_range_set_range (GTK_RANGE (data->slider), 0, (gdouble)data->duration / GST_SECOND);
} }
} }
if (gst_element_query_position (data->playbin, GST_FORMAT_TIME, &current)) { if (gst_element_query_position (data->playbin, GST_FORMAT_TIME, &current)) {
/* Block the "value-changed" signal, so the slider_cb function is not called /* Block the "value-changed" signal, so the slider_cb function is not called
* (which would trigger a seek the user has not requested) */ * (which would trigger a seek the user has not requested) */
@ -175,7 +175,7 @@ static gboolean refresh_ui (CustomData *data) {
} }
return TRUE; return TRUE;
} }
/* This function is called when new metadata is discovered in the stream */ /* This function is called when new metadata is discovered in the stream */
static void tags_cb (GstElement *playbin, gint stream, CustomData *data) { static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
/* We are possibly in a GStreamer working thread, so we notify the main /* We are possibly in a GStreamer working thread, so we notify the main
@ -184,30 +184,30 @@ static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
gst_message_new_application (GST_OBJECT (playbin), gst_message_new_application (GST_OBJECT (playbin),
gst_structure_new_empty ("tags-changed"))); gst_structure_new_empty ("tags-changed")));
} }
/* This function is called when an error message is posted on the bus */ /* This function is called when an error message is posted on the bus */
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) { static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err; GError *err;
gchar *debug_info; gchar *debug_info;
/* Print error details on the screen */ /* Print error details on the screen */
gst_message_parse_error (msg, &err, &debug_info); gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err); g_clear_error (&err);
g_free (debug_info); g_free (debug_info);
/* Set the pipeline to READY (which stops playback) */ /* Set the pipeline to READY (which stops playback) */
gst_element_set_state (data->playbin, GST_STATE_READY); gst_element_set_state (data->playbin, GST_STATE_READY);
} }
/* This function is called when an End-Of-Stream message is posted on the bus. /* This function is called when an End-Of-Stream message is posted on the bus.
* We just set the pipeline to READY (which stops playback) */ * We just set the pipeline to READY (which stops playback) */
static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) { static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
g_print ("End-Of-Stream reached.\n"); g_print ("End-Of-Stream reached.\n");
gst_element_set_state (data->playbin, GST_STATE_READY); gst_element_set_state (data->playbin, GST_STATE_READY);
} }
/* This function is called when the pipeline changes states. We use it to /* This function is called when the pipeline changes states. We use it to
* keep track of the current state. */ * keep track of the current state. */
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) { static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
@ -222,7 +222,7 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
} }
} }
} }
/* Extract metadata from all the streams and write it to the text widget in the GUI */ /* Extract metadata from all the streams and write it to the text widget in the GUI */
static void analyze_streams (CustomData *data) { static void analyze_streams (CustomData *data) {
gint i; gint i;
@ -231,16 +231,16 @@ static void analyze_streams (CustomData *data) {
guint rate; guint rate;
gint n_video, n_audio, n_text; gint n_video, n_audio, n_text;
GtkTextBuffer *text; GtkTextBuffer *text;
/* Clean current contents of the widget */ /* Clean current contents of the widget */
text = gtk_text_view_get_buffer (GTK_TEXT_VIEW (data->streams_list)); text = gtk_text_view_get_buffer (GTK_TEXT_VIEW (data->streams_list));
gtk_text_buffer_set_text (text, "", -1); gtk_text_buffer_set_text (text, "", -1);
/* Read some properties */ /* Read some properties */
g_object_get (data->playbin, "n-video", &n_video, NULL); g_object_get (data->playbin, "n-video", &n_video, NULL);
g_object_get (data->playbin, "n-audio", &n_audio, NULL); g_object_get (data->playbin, "n-audio", &n_audio, NULL);
g_object_get (data->playbin, "n-text", &n_text, NULL); g_object_get (data->playbin, "n-text", &n_text, NULL);
for (i = 0; i < n_video; i++) { for (i = 0; i < n_video; i++) {
tags = NULL; tags = NULL;
/* Retrieve the stream's video tags */ /* Retrieve the stream's video tags */
@ -257,7 +257,7 @@ static void analyze_streams (CustomData *data) {
gst_tag_list_free (tags); gst_tag_list_free (tags);
} }
} }
for (i = 0; i < n_audio; i++) { for (i = 0; i < n_audio; i++) {
tags = NULL; tags = NULL;
/* Retrieve the stream's audio tags */ /* Retrieve the stream's audio tags */
@ -286,7 +286,7 @@ static void analyze_streams (CustomData *data) {
gst_tag_list_free (tags); gst_tag_list_free (tags);
} }
} }
for (i = 0; i < n_text; i++) { for (i = 0; i < n_text; i++) {
tags = NULL; tags = NULL;
/* Retrieve the stream's subtitle tags */ /* Retrieve the stream's subtitle tags */
@ -305,7 +305,7 @@ static void analyze_streams (CustomData *data) {
} }
} }
} }
/* This function is called when an "application" message is posted on the bus. /* This function is called when an "application" message is posted on the bus.
* Here we retrieve the message posted by the tags_cb callback */ * Here we retrieve the message posted by the tags_cb callback */
static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) { static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
@ -315,41 +315,41 @@ static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
analyze_streams (data); analyze_streams (data);
} }
} }
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
CustomData data; CustomData data;
GstStateChangeReturn ret; GstStateChangeReturn ret;
GstBus *bus; GstBus *bus;
/* Initialize GTK */ /* Initialize GTK */
gtk_init (&argc, &argv); gtk_init (&argc, &argv);
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Initialize our data structure */ /* Initialize our data structure */
memset (&data, 0, sizeof (data)); memset (&data, 0, sizeof (data));
data.duration = GST_CLOCK_TIME_NONE; data.duration = GST_CLOCK_TIME_NONE;
/* Create the elements */ /* Create the elements */
data.playbin = gst_element_factory_make ("playbin", "playbin"); data.playbin = gst_element_factory_make ("playbin", "playbin");
if (!data.playbin) { if (!data.playbin) {
g_printerr ("Not all elements could be created.\n"); g_printerr ("Not all elements could be created.\n");
return -1; return -1;
} }
/* Set the URI to play */ /* Set the URI to play */
g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL); g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
/* Connect to interesting signals in playbin */ /* Connect to interesting signals in playbin */
g_signal_connect (G_OBJECT (data.playbin), "video-tags-changed", (GCallback) tags_cb, &data); g_signal_connect (G_OBJECT (data.playbin), "video-tags-changed", (GCallback) tags_cb, &data);
g_signal_connect (G_OBJECT (data.playbin), "audio-tags-changed", (GCallback) tags_cb, &data); g_signal_connect (G_OBJECT (data.playbin), "audio-tags-changed", (GCallback) tags_cb, &data);
g_signal_connect (G_OBJECT (data.playbin), "text-tags-changed", (GCallback) tags_cb, &data); g_signal_connect (G_OBJECT (data.playbin), "text-tags-changed", (GCallback) tags_cb, &data);
/* Create the GUI */ /* Create the GUI */
create_ui (&data); create_ui (&data);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (data.playbin); bus = gst_element_get_bus (data.playbin);
gst_bus_add_signal_watch (bus); gst_bus_add_signal_watch (bus);
@ -358,7 +358,7 @@ int main(int argc, char *argv[]) {
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, &data); g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, &data);
g_signal_connect (G_OBJECT (bus), "message::application", (GCallback)application_cb, &data); g_signal_connect (G_OBJECT (bus), "message::application", (GCallback)application_cb, &data);
gst_object_unref (bus); gst_object_unref (bus);
/* Start playing */ /* Start playing */
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING); ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) { if (ret == GST_STATE_CHANGE_FAILURE) {
@ -366,13 +366,13 @@ int main(int argc, char *argv[]) {
gst_object_unref (data.playbin); gst_object_unref (data.playbin);
return -1; return -1;
} }
/* Register a function that GLib will call every second */ /* Register a function that GLib will call every second */
g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data); g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);
/* Start the GTK main loop. We will not regain control until gtk_main_quit is called. */ /* Start the GTK main loop. We will not regain control until gtk_main_quit is called. */
gtk_main (); gtk_main ();
/* Free resources */ /* Free resources */
gst_element_set_state (data.playbin, GST_STATE_NULL); gst_element_set_state (data.playbin, GST_STATE_NULL);
gst_object_unref (data.playbin); gst_object_unref (data.playbin);

View file

@ -1,207 +1,207 @@
#include <gst/gst.h> #include <gst/gst.h>
/* Functions below print the Capabilities in a human-friendly format */ /* Functions below print the Capabilities in a human-friendly format */
static gboolean print_field (GQuark field, const GValue * value, gpointer pfx) { static gboolean print_field (GQuark field, const GValue * value, gpointer pfx) {
gchar *str = gst_value_serialize (value); gchar *str = gst_value_serialize (value);
g_print ("%s %15s: %s\n", (gchar *) pfx, g_quark_to_string (field), str); g_print ("%s %15s: %s\n", (gchar *) pfx, g_quark_to_string (field), str);
g_free (str); g_free (str);
return TRUE; return TRUE;
} }
static void print_caps (const GstCaps * caps, const gchar * pfx) { static void print_caps (const GstCaps * caps, const gchar * pfx) {
guint i; guint i;
g_return_if_fail (caps != NULL); g_return_if_fail (caps != NULL);
if (gst_caps_is_any (caps)) { if (gst_caps_is_any (caps)) {
g_print ("%sANY\n", pfx); g_print ("%sANY\n", pfx);
return; return;
} }
if (gst_caps_is_empty (caps)) { if (gst_caps_is_empty (caps)) {
g_print ("%sEMPTY\n", pfx); g_print ("%sEMPTY\n", pfx);
return; return;
} }
for (i = 0; i < gst_caps_get_size (caps); i++) { for (i = 0; i < gst_caps_get_size (caps); i++) {
GstStructure *structure = gst_caps_get_structure (caps, i); GstStructure *structure = gst_caps_get_structure (caps, i);
g_print ("%s%s\n", pfx, gst_structure_get_name (structure)); g_print ("%s%s\n", pfx, gst_structure_get_name (structure));
gst_structure_foreach (structure, print_field, (gpointer) pfx); gst_structure_foreach (structure, print_field, (gpointer) pfx);
} }
} }
/* Prints information about a Pad Template, including its Capabilities */ /* Prints information about a Pad Template, including its Capabilities */
static void print_pad_templates_information (GstElementFactory * factory) { static void print_pad_templates_information (GstElementFactory * factory) {
const GList *pads; const GList *pads;
GstStaticPadTemplate *padtemplate; GstStaticPadTemplate *padtemplate;
g_print ("Pad Templates for %s:\n", gst_element_factory_get_longname (factory)); g_print ("Pad Templates for %s:\n", gst_element_factory_get_longname (factory));
if (!gst_element_factory_get_num_pad_templates (factory)) { if (!gst_element_factory_get_num_pad_templates (factory)) {
g_print (" none\n"); g_print (" none\n");
return; return;
} }
pads = gst_element_factory_get_static_pad_templates (factory); pads = gst_element_factory_get_static_pad_templates (factory);
while (pads) { while (pads) {
padtemplate = pads->data; padtemplate = pads->data;
pads = g_list_next (pads); pads = g_list_next (pads);
if (padtemplate->direction == GST_PAD_SRC) if (padtemplate->direction == GST_PAD_SRC)
g_print (" SRC template: '%s'\n", padtemplate->name_template); g_print (" SRC template: '%s'\n", padtemplate->name_template);
else if (padtemplate->direction == GST_PAD_SINK) else if (padtemplate->direction == GST_PAD_SINK)
g_print (" SINK template: '%s'\n", padtemplate->name_template); g_print (" SINK template: '%s'\n", padtemplate->name_template);
else else
g_print (" UNKNOWN!!! template: '%s'\n", padtemplate->name_template); g_print (" UNKNOWN!!! template: '%s'\n", padtemplate->name_template);
if (padtemplate->presence == GST_PAD_ALWAYS) if (padtemplate->presence == GST_PAD_ALWAYS)
g_print (" Availability: Always\n"); g_print (" Availability: Always\n");
else if (padtemplate->presence == GST_PAD_SOMETIMES) else if (padtemplate->presence == GST_PAD_SOMETIMES)
g_print (" Availability: Sometimes\n"); g_print (" Availability: Sometimes\n");
else if (padtemplate->presence == GST_PAD_REQUEST) { else if (padtemplate->presence == GST_PAD_REQUEST) {
g_print (" Availability: On request\n"); g_print (" Availability: On request\n");
} else } else
g_print (" Availability: UNKNOWN!!!\n"); g_print (" Availability: UNKNOWN!!!\n");
if (padtemplate->static_caps.string) { if (padtemplate->static_caps.string) {
GstCaps *caps; GstCaps *caps;
g_print (" Capabilities:\n"); g_print (" Capabilities:\n");
caps = gst_static_caps_get (&padtemplate->static_caps); caps = gst_static_caps_get (&padtemplate->static_caps);
print_caps (caps, " "); print_caps (caps, " ");
gst_caps_unref (caps); gst_caps_unref (caps);
} }
g_print ("\n"); g_print ("\n");
} }
} }
/* Shows the CURRENT capabilities of the requested pad in the given element */ /* Shows the CURRENT capabilities of the requested pad in the given element */
static void print_pad_capabilities (GstElement *element, gchar *pad_name) { static void print_pad_capabilities (GstElement *element, gchar *pad_name) {
GstPad *pad = NULL; GstPad *pad = NULL;
GstCaps *caps = NULL; GstCaps *caps = NULL;
/* Retrieve pad */ /* Retrieve pad */
pad = gst_element_get_static_pad (element, pad_name); pad = gst_element_get_static_pad (element, pad_name);
if (!pad) { if (!pad) {
g_printerr ("Could not retrieve pad '%s'\n", pad_name); g_printerr ("Could not retrieve pad '%s'\n", pad_name);
return; return;
} }
/* Retrieve negotiated caps (or acceptable caps if negotiation is not finished yet) */ /* Retrieve negotiated caps (or acceptable caps if negotiation is not finished yet) */
caps = gst_pad_get_current_caps (pad); caps = gst_pad_get_current_caps (pad);
if (!caps) if (!caps)
caps = gst_pad_query_caps (pad, NULL); caps = gst_pad_query_caps (pad, NULL);
/* Print and free */ /* Print and free */
g_print ("Caps for the %s pad:\n", pad_name); g_print ("Caps for the %s pad:\n", pad_name);
print_caps (caps, " "); print_caps (caps, " ");
gst_caps_unref (caps); gst_caps_unref (caps);
gst_object_unref (pad); gst_object_unref (pad);
} }
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *sink; GstElement *pipeline, *source, *sink;
GstElementFactory *source_factory, *sink_factory; GstElementFactory *source_factory, *sink_factory;
GstBus *bus; GstBus *bus;
GstMessage *msg; GstMessage *msg;
GstStateChangeReturn ret; GstStateChangeReturn ret;
gboolean terminate = FALSE; gboolean terminate = FALSE;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Create the element factories */ /* Create the element factories */
source_factory = gst_element_factory_find ("audiotestsrc"); source_factory = gst_element_factory_find ("audiotestsrc");
sink_factory = gst_element_factory_find ("autoaudiosink"); sink_factory = gst_element_factory_find ("autoaudiosink");
if (!source_factory || !sink_factory) { if (!source_factory || !sink_factory) {
g_printerr ("Not all element factories could be created.\n"); g_printerr ("Not all element factories could be created.\n");
return -1; return -1;
} }
/* Print information about the pad templates of these factories */ /* Print information about the pad templates of these factories */
print_pad_templates_information (source_factory); print_pad_templates_information (source_factory);
print_pad_templates_information (sink_factory); print_pad_templates_information (sink_factory);
/* Ask the factories to instantiate actual elements */ /* Ask the factories to instantiate actual elements */
source = gst_element_factory_create (source_factory, "source"); source = gst_element_factory_create (source_factory, "source");
sink = gst_element_factory_create (sink_factory, "sink"); sink = gst_element_factory_create (sink_factory, "sink");
/* Create the empty pipeline */ /* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline"); pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink) { if (!pipeline || !source || !sink) {
g_printerr ("Not all elements could be created.\n"); g_printerr ("Not all elements could be created.\n");
return -1; return -1;
} }
/* Build the pipeline */ /* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL); gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
if (gst_element_link (source, sink) != TRUE) { if (gst_element_link (source, sink) != TRUE) {
g_printerr ("Elements could not be linked.\n"); g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline); gst_object_unref (pipeline);
return -1; return -1;
} }
/* Print initial negotiated caps (in NULL state) */ /* Print initial negotiated caps (in NULL state) */
g_print ("In NULL state:\n"); g_print ("In NULL state:\n");
print_pad_capabilities (sink, "sink"); print_pad_capabilities (sink, "sink");
/* Start playing */ /* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING); ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) { if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state (check the bus for error messages).\n"); g_printerr ("Unable to set the pipeline to the playing state (check the bus for error messages).\n");
} }
/* Wait until error, EOS or State Change */ /* Wait until error, EOS or State Change */
bus = gst_element_get_bus (pipeline); bus = gst_element_get_bus (pipeline);
do { do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS | msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS |
GST_MESSAGE_STATE_CHANGED); GST_MESSAGE_STATE_CHANGED);
/* Parse message */ /* Parse message */
if (msg != NULL) { if (msg != NULL) {
GError *err; GError *err;
gchar *debug_info; gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) { switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR: case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info); gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err); g_clear_error (&err);
g_free (debug_info); g_free (debug_info);
terminate = TRUE; terminate = TRUE;
break; break;
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n"); g_print ("End-Of-Stream reached.\n");
terminate = TRUE; terminate = TRUE;
break; break;
case GST_MESSAGE_STATE_CHANGED: case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */ /* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (pipeline)) { if (GST_MESSAGE_SRC (msg) == GST_OBJECT (pipeline)) {
GstState old_state, new_state, pending_state; GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("\nPipeline state changed from %s to %s:\n", g_print ("\nPipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state)); gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
/* Print the current capabilities of the sink element */ /* Print the current capabilities of the sink element */
print_pad_capabilities (sink, "sink"); print_pad_capabilities (sink, "sink");
} }
break; break;
default: default:
/* We should not reach here because we only asked for ERRORs, EOS and STATE_CHANGED */ /* We should not reach here because we only asked for ERRORs, EOS and STATE_CHANGED */
g_printerr ("Unexpected message received.\n"); g_printerr ("Unexpected message received.\n");
break; break;
} }
gst_message_unref (msg); gst_message_unref (msg);
} }
} while (!terminate); } while (!terminate);
/* Free resources */ /* Free resources */
gst_object_unref (bus); gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL); gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline); gst_object_unref (pipeline);
gst_object_unref (source_factory); gst_object_unref (source_factory);
gst_object_unref (sink_factory); gst_object_unref (sink_factory);
return 0; return 0;
} }

View file

@ -1,5 +1,5 @@
#include <gst/gst.h> #include <gst/gst.h>
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
GstElement *pipeline, *audio_source, *tee, *audio_queue, *audio_convert, *audio_resample, *audio_sink; GstElement *pipeline, *audio_source, *tee, *audio_queue, *audio_convert, *audio_resample, *audio_sink;
GstElement *video_queue, *visual, *video_convert, *video_sink; GstElement *video_queue, *visual, *video_convert, *video_sink;
@ -8,10 +8,10 @@ int main(int argc, char *argv[]) {
GstPadTemplate *tee_src_pad_template; GstPadTemplate *tee_src_pad_template;
GstPad *tee_audio_pad, *tee_video_pad; GstPad *tee_audio_pad, *tee_video_pad;
GstPad *queue_audio_pad, *queue_video_pad; GstPad *queue_audio_pad, *queue_video_pad;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Create the elements */ /* Create the elements */
audio_source = gst_element_factory_make ("audiotestsrc", "audio_source"); audio_source = gst_element_factory_make ("audiotestsrc", "audio_source");
tee = gst_element_factory_make ("tee", "tee"); tee = gst_element_factory_make ("tee", "tee");
@ -23,20 +23,20 @@ int main(int argc, char *argv[]) {
visual = gst_element_factory_make ("wavescope", "visual"); visual = gst_element_factory_make ("wavescope", "visual");
video_convert = gst_element_factory_make ("videoconvert", "video_convert"); video_convert = gst_element_factory_make ("videoconvert", "video_convert");
video_sink = gst_element_factory_make ("autovideosink", "video_sink"); video_sink = gst_element_factory_make ("autovideosink", "video_sink");
/* Create the empty pipeline */ /* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline"); pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !audio_source || !tee || !audio_queue || !audio_convert || !audio_resample || !audio_sink || if (!pipeline || !audio_source || !tee || !audio_queue || !audio_convert || !audio_resample || !audio_sink ||
!video_queue || !visual || !video_convert || !video_sink) { !video_queue || !visual || !video_convert || !video_sink) {
g_printerr ("Not all elements could be created.\n"); g_printerr ("Not all elements could be created.\n");
return -1; return -1;
} }
/* Configure elements */ /* Configure elements */
g_object_set (audio_source, "freq", 215.0f, NULL); g_object_set (audio_source, "freq", 215.0f, NULL);
g_object_set (visual, "shader", 0, "style", 1, NULL); g_object_set (visual, "shader", 0, "style", 1, NULL);
/* Link all elements that can be automatically linked because they have "Always" pads */ /* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_resample, audio_sink, gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_resample, audio_sink,
video_queue, visual, video_convert, video_sink, NULL); video_queue, visual, video_convert, video_sink, NULL);
@ -47,7 +47,7 @@ int main(int argc, char *argv[]) {
gst_object_unref (pipeline); gst_object_unref (pipeline);
return -1; return -1;
} }
/* Manually link the Tee, which has "Request" pads */ /* Manually link the Tee, which has "Request" pads */
tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src_%u"); tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src_%u");
tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL); tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL);
@ -64,26 +64,26 @@ int main(int argc, char *argv[]) {
} }
gst_object_unref (queue_audio_pad); gst_object_unref (queue_audio_pad);
gst_object_unref (queue_video_pad); gst_object_unref (queue_video_pad);
/* Start playing the pipeline */ /* Start playing the pipeline */
gst_element_set_state (pipeline, GST_STATE_PLAYING); gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */ /* Wait until error or EOS */
bus = gst_element_get_bus (pipeline); bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Release the request pads from the Tee, and unref them */ /* Release the request pads from the Tee, and unref them */
gst_element_release_request_pad (tee, tee_audio_pad); gst_element_release_request_pad (tee, tee_audio_pad);
gst_element_release_request_pad (tee, tee_video_pad); gst_element_release_request_pad (tee, tee_video_pad);
gst_object_unref (tee_audio_pad); gst_object_unref (tee_audio_pad);
gst_object_unref (tee_video_pad); gst_object_unref (tee_video_pad);
/* Free resources */ /* Free resources */
if (msg != NULL) if (msg != NULL)
gst_message_unref (msg); gst_message_unref (msg);
gst_object_unref (bus); gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL); gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline); gst_object_unref (pipeline);
return 0; return 0;
} }

View file

@ -1,24 +1,24 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/audio/audio.h> #include <gst/audio/audio.h>
#include <string.h> #include <string.h>
#define CHUNK_SIZE 1024 /* Amount of bytes we are sending in each buffer */ #define CHUNK_SIZE 1024 /* Amount of bytes we are sending in each buffer */
#define SAMPLE_RATE 44100 /* Samples per second we are sending */ #define SAMPLE_RATE 44100 /* Samples per second we are sending */
/* Structure to contain all our information, so we can pass it to callbacks */ /* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData { typedef struct _CustomData {
GstElement *pipeline, *app_source, *tee, *audio_queue, *audio_convert1, *audio_resample, *audio_sink; GstElement *pipeline, *app_source, *tee, *audio_queue, *audio_convert1, *audio_resample, *audio_sink;
GstElement *video_queue, *audio_convert2, *visual, *video_convert, *video_sink; GstElement *video_queue, *audio_convert2, *visual, *video_convert, *video_sink;
GstElement *app_queue, *app_sink; GstElement *app_queue, *app_sink;
guint64 num_samples; /* Number of samples generated so far (for timestamp generation) */ guint64 num_samples; /* Number of samples generated so far (for timestamp generation) */
gfloat a, b, c, d; /* For waveform generation */ gfloat a, b, c, d; /* For waveform generation */
guint sourceid; /* To control the GSource */ guint sourceid; /* To control the GSource */
GMainLoop *main_loop; /* GLib's Main Loop */ GMainLoop *main_loop; /* GLib's Main Loop */
} CustomData; } CustomData;
/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc. /* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc.
* The idle handler is added to the mainloop when appsrc requests us to start sending data (need-data signal) * The idle handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
* and is removed when appsrc has enough data (enough-data signal). * and is removed when appsrc has enough data (enough-data signal).
@ -31,14 +31,14 @@ static gboolean push_data (CustomData *data) {
gint16 *raw; gint16 *raw;
gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */ gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
gfloat freq; gfloat freq;
/* Create a new empty buffer */ /* Create a new empty buffer */
buffer = gst_buffer_new_and_alloc (CHUNK_SIZE); buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);
/* Set its timestamp and duration */ /* Set its timestamp and duration */
GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE); GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE);
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, SAMPLE_RATE); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, SAMPLE_RATE);
/* Generate some psychodelic waveforms */ /* Generate some psychodelic waveforms */
gst_buffer_map (buffer, &map, GST_MAP_WRITE); gst_buffer_map (buffer, &map, GST_MAP_WRITE);
raw = (gint16 *)map.data; raw = (gint16 *)map.data;
@ -52,21 +52,21 @@ static gboolean push_data (CustomData *data) {
} }
gst_buffer_unmap (buffer, &map); gst_buffer_unmap (buffer, &map);
data->num_samples += num_samples; data->num_samples += num_samples;
/* Push the buffer into the appsrc */ /* Push the buffer into the appsrc */
g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret); g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
/* Free the buffer now that we are done with it */ /* Free the buffer now that we are done with it */
gst_buffer_unref (buffer); gst_buffer_unref (buffer);
if (ret != GST_FLOW_OK) { if (ret != GST_FLOW_OK) {
/* We got some error, stop sending data */ /* We got some error, stop sending data */
return FALSE; return FALSE;
} }
return TRUE; return TRUE;
} }
/* This signal callback triggers when appsrc needs data. Here, we add an idle handler /* This signal callback triggers when appsrc needs data. Here, we add an idle handler
* to the mainloop to start pushing data into the appsrc */ * to the mainloop to start pushing data into the appsrc */
static void start_feed (GstElement *source, guint size, CustomData *data) { static void start_feed (GstElement *source, guint size, CustomData *data) {
@ -75,7 +75,7 @@ static void start_feed (GstElement *source, guint size, CustomData *data) {
data->sourceid = g_idle_add ((GSourceFunc) push_data, data); data->sourceid = g_idle_add ((GSourceFunc) push_data, data);
} }
} }
/* This callback triggers when appsrc has enough data and we can stop sending. /* This callback triggers when appsrc has enough data and we can stop sending.
* We remove the idle handler from the mainloop */ * We remove the idle handler from the mainloop */
static void stop_feed (GstElement *source, CustomData *data) { static void stop_feed (GstElement *source, CustomData *data) {
@ -85,11 +85,11 @@ static void stop_feed (GstElement *source, CustomData *data) {
data->sourceid = 0; data->sourceid = 0;
} }
} }
/* The appsink has received a buffer */ /* The appsink has received a buffer */
static void new_sample (GstElement *sink, CustomData *data) { static void new_sample (GstElement *sink, CustomData *data) {
GstSample *sample; GstSample *sample;
/* Retrieve the buffer */ /* Retrieve the buffer */
g_signal_emit_by_name (sink, "pull-sample", &sample); g_signal_emit_by_name (sink, "pull-sample", &sample);
if (sample) { if (sample) {
@ -98,22 +98,22 @@ static void new_sample (GstElement *sink, CustomData *data) {
gst_sample_unref (sample); gst_sample_unref (sample);
} }
} }
/* This function is called when an error message is posted on the bus */ /* This function is called when an error message is posted on the bus */
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) { static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err; GError *err;
gchar *debug_info; gchar *debug_info;
/* Print error details on the screen */ /* Print error details on the screen */
gst_message_parse_error (msg, &err, &debug_info); gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err); g_clear_error (&err);
g_free (debug_info); g_free (debug_info);
g_main_loop_quit (data->main_loop); g_main_loop_quit (data->main_loop);
} }
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
CustomData data; CustomData data;
GstPadTemplate *tee_src_pad_template; GstPadTemplate *tee_src_pad_template;
@ -122,15 +122,15 @@ int main(int argc, char *argv[]) {
GstAudioInfo info; GstAudioInfo info;
GstCaps *audio_caps; GstCaps *audio_caps;
GstBus *bus; GstBus *bus;
/* Initialize cumstom data structure */ /* Initialize cumstom data structure */
memset (&data, 0, sizeof (data)); memset (&data, 0, sizeof (data));
data.b = 1; /* For waveform generation */ data.b = 1; /* For waveform generation */
data.d = 1; data.d = 1;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Create the elements */ /* Create the elements */
data.app_source = gst_element_factory_make ("appsrc", "audio_source"); data.app_source = gst_element_factory_make ("appsrc", "audio_source");
data.tee = gst_element_factory_make ("tee", "tee"); data.tee = gst_element_factory_make ("tee", "tee");
@ -145,34 +145,34 @@ int main(int argc, char *argv[]) {
data.video_sink = gst_element_factory_make ("autovideosink", "video_sink"); data.video_sink = gst_element_factory_make ("autovideosink", "video_sink");
data.app_queue = gst_element_factory_make ("queue", "app_queue"); data.app_queue = gst_element_factory_make ("queue", "app_queue");
data.app_sink = gst_element_factory_make ("appsink", "app_sink"); data.app_sink = gst_element_factory_make ("appsink", "app_sink");
/* Create the empty pipeline */ /* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline"); data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.app_source || !data.tee || !data.audio_queue || !data.audio_convert1 || if (!data.pipeline || !data.app_source || !data.tee || !data.audio_queue || !data.audio_convert1 ||
!data.audio_resample || !data.audio_sink || !data.video_queue || !data.audio_convert2 || !data.visual || !data.audio_resample || !data.audio_sink || !data.video_queue || !data.audio_convert2 || !data.visual ||
!data.video_convert || !data.video_sink || !data.app_queue || !data.app_sink) { !data.video_convert || !data.video_sink || !data.app_queue || !data.app_sink) {
g_printerr ("Not all elements could be created.\n"); g_printerr ("Not all elements could be created.\n");
return -1; return -1;
} }
/* Configure wavescope */ /* Configure wavescope */
g_object_set (data.visual, "shader", 0, "style", 0, NULL); g_object_set (data.visual, "shader", 0, "style", 0, NULL);
/* Configure appsrc */ /* Configure appsrc */
gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1, NULL); gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1, NULL);
audio_caps = gst_audio_info_to_caps (&info); audio_caps = gst_audio_info_to_caps (&info);
g_object_set (data.app_source, "caps", audio_caps, "format", GST_FORMAT_TIME, NULL); g_object_set (data.app_source, "caps", audio_caps, "format", GST_FORMAT_TIME, NULL);
g_signal_connect (data.app_source, "need-data", G_CALLBACK (start_feed), &data); g_signal_connect (data.app_source, "need-data", G_CALLBACK (start_feed), &data);
g_signal_connect (data.app_source, "enough-data", G_CALLBACK (stop_feed), &data); g_signal_connect (data.app_source, "enough-data", G_CALLBACK (stop_feed), &data);
/* Configure appsink */ /* Configure appsink */
g_object_set (data.app_sink, "emit-signals", TRUE, "caps", audio_caps, NULL); g_object_set (data.app_sink, "emit-signals", TRUE, "caps", audio_caps, NULL);
g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data); g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data);
gst_caps_unref (audio_caps); gst_caps_unref (audio_caps);
/* Link all elements that can be automatically linked because they have "Always" pads */ /* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (data.pipeline), data.app_source, data.tee, data.audio_queue, data.audio_convert1, data.audio_resample, gst_bin_add_many (GST_BIN (data.pipeline), data.app_source, data.tee, data.audio_queue, data.audio_convert1, data.audio_resample,
data.audio_sink, data.video_queue, data.audio_convert2, data.visual, data.video_convert, data.video_sink, data.app_queue, data.audio_sink, data.video_queue, data.audio_convert2, data.visual, data.video_convert, data.video_sink, data.app_queue,
data.app_sink, NULL); data.app_sink, NULL);
if (gst_element_link_many (data.app_source, data.tee, NULL) != TRUE || if (gst_element_link_many (data.app_source, data.tee, NULL) != TRUE ||
@ -183,7 +183,7 @@ int main(int argc, char *argv[]) {
gst_object_unref (data.pipeline); gst_object_unref (data.pipeline);
return -1; return -1;
} }
/* Manually link the Tee, which has "Request" pads */ /* Manually link the Tee, which has "Request" pads */
tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (data.tee), "src_%u"); tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (data.tee), "src_%u");
tee_audio_pad = gst_element_request_pad (data.tee, tee_src_pad_template, NULL, NULL); tee_audio_pad = gst_element_request_pad (data.tee, tee_src_pad_template, NULL, NULL);
@ -205,20 +205,20 @@ int main(int argc, char *argv[]) {
gst_object_unref (queue_audio_pad); gst_object_unref (queue_audio_pad);
gst_object_unref (queue_video_pad); gst_object_unref (queue_video_pad);
gst_object_unref (queue_app_pad); gst_object_unref (queue_app_pad);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (data.pipeline); bus = gst_element_get_bus (data.pipeline);
gst_bus_add_signal_watch (bus); gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data); g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
gst_object_unref (bus); gst_object_unref (bus);
/* Start playing the pipeline */ /* Start playing the pipeline */
gst_element_set_state (data.pipeline, GST_STATE_PLAYING); gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
/* Create a GLib Main Loop and set it to run */ /* Create a GLib Main Loop and set it to run */
data.main_loop = g_main_loop_new (NULL, FALSE); data.main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.main_loop); g_main_loop_run (data.main_loop);
/* Release the request pads from the Tee, and unref them */ /* Release the request pads from the Tee, and unref them */
gst_element_release_request_pad (data.tee, tee_audio_pad); gst_element_release_request_pad (data.tee, tee_audio_pad);
gst_element_release_request_pad (data.tee, tee_video_pad); gst_element_release_request_pad (data.tee, tee_video_pad);
@ -226,7 +226,7 @@ int main(int argc, char *argv[]) {
gst_object_unref (tee_audio_pad); gst_object_unref (tee_audio_pad);
gst_object_unref (tee_video_pad); gst_object_unref (tee_video_pad);
gst_object_unref (tee_app_pad); gst_object_unref (tee_app_pad);
/* Free resources */ /* Free resources */
gst_element_set_state (data.pipeline, GST_STATE_NULL); gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline); gst_object_unref (data.pipeline);

View file

@ -1,40 +1,40 @@
#include <string.h> #include <string.h>
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/pbutils/pbutils.h> #include <gst/pbutils/pbutils.h>
/* Structure to contain all our information, so we can pass it around */ /* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData { typedef struct _CustomData {
GstDiscoverer *discoverer; GstDiscoverer *discoverer;
GMainLoop *loop; GMainLoop *loop;
} CustomData; } CustomData;
/* Print a tag in a human-readable format (name: value) */ /* Print a tag in a human-readable format (name: value) */
static void print_tag_foreach (const GstTagList *tags, const gchar *tag, gpointer user_data) { static void print_tag_foreach (const GstTagList *tags, const gchar *tag, gpointer user_data) {
GValue val = { 0, }; GValue val = { 0, };
gchar *str; gchar *str;
gint depth = GPOINTER_TO_INT (user_data); gint depth = GPOINTER_TO_INT (user_data);
gst_tag_list_copy_value (&val, tags, tag); gst_tag_list_copy_value (&val, tags, tag);
if (G_VALUE_HOLDS_STRING (&val)) if (G_VALUE_HOLDS_STRING (&val))
str = g_value_dup_string (&val); str = g_value_dup_string (&val);
else else
str = gst_value_serialize (&val); str = gst_value_serialize (&val);
g_print ("%*s%s: %s\n", 2 * depth, " ", gst_tag_get_nick (tag), str); g_print ("%*s%s: %s\n", 2 * depth, " ", gst_tag_get_nick (tag), str);
g_free (str); g_free (str);
g_value_unset (&val); g_value_unset (&val);
} }
/* Print information regarding a stream */ /* Print information regarding a stream */
static void print_stream_info (GstDiscovererStreamInfo *info, gint depth) { static void print_stream_info (GstDiscovererStreamInfo *info, gint depth) {
gchar *desc = NULL; gchar *desc = NULL;
GstCaps *caps; GstCaps *caps;
const GstTagList *tags; const GstTagList *tags;
caps = gst_discoverer_stream_info_get_caps (info); caps = gst_discoverer_stream_info_get_caps (info);
if (caps) { if (caps) {
if (gst_caps_is_fixed (caps)) if (gst_caps_is_fixed (caps))
desc = gst_pb_utils_get_codec_description (caps); desc = gst_pb_utils_get_codec_description (caps);
@ -42,37 +42,37 @@ static void print_stream_info (GstDiscovererStreamInfo *info, gint depth) {
desc = gst_caps_to_string (caps); desc = gst_caps_to_string (caps);
gst_caps_unref (caps); gst_caps_unref (caps);
} }
g_print ("%*s%s: %s\n", 2 * depth, " ", gst_discoverer_stream_info_get_stream_type_nick (info), (desc ? desc : "")); g_print ("%*s%s: %s\n", 2 * depth, " ", gst_discoverer_stream_info_get_stream_type_nick (info), (desc ? desc : ""));
if (desc) { if (desc) {
g_free (desc); g_free (desc);
desc = NULL; desc = NULL;
} }
tags = gst_discoverer_stream_info_get_tags (info); tags = gst_discoverer_stream_info_get_tags (info);
if (tags) { if (tags) {
g_print ("%*sTags:\n", 2 * (depth + 1), " "); g_print ("%*sTags:\n", 2 * (depth + 1), " ");
gst_tag_list_foreach (tags, print_tag_foreach, GINT_TO_POINTER (depth + 2)); gst_tag_list_foreach (tags, print_tag_foreach, GINT_TO_POINTER (depth + 2));
} }
} }
/* Print information regarding a stream and its substreams, if any */ /* Print information regarding a stream and its substreams, if any */
static void print_topology (GstDiscovererStreamInfo *info, gint depth) { static void print_topology (GstDiscovererStreamInfo *info, gint depth) {
GstDiscovererStreamInfo *next; GstDiscovererStreamInfo *next;
if (!info) if (!info)
return; return;
print_stream_info (info, depth); print_stream_info (info, depth);
next = gst_discoverer_stream_info_get_next (info); next = gst_discoverer_stream_info_get_next (info);
if (next) { if (next) {
print_topology (next, depth + 1); print_topology (next, depth + 1);
gst_discoverer_stream_info_unref (next); gst_discoverer_stream_info_unref (next);
} else if (GST_IS_DISCOVERER_CONTAINER_INFO (info)) { } else if (GST_IS_DISCOVERER_CONTAINER_INFO (info)) {
GList *tmp, *streams; GList *tmp, *streams;
streams = gst_discoverer_container_info_get_streams (GST_DISCOVERER_CONTAINER_INFO (info)); streams = gst_discoverer_container_info_get_streams (GST_DISCOVERER_CONTAINER_INFO (info));
for (tmp = streams; tmp; tmp = tmp->next) { for (tmp = streams; tmp; tmp = tmp->next) {
GstDiscovererStreamInfo *tmpinf = (GstDiscovererStreamInfo *) tmp->data; GstDiscovererStreamInfo *tmpinf = (GstDiscovererStreamInfo *) tmp->data;
@ -81,7 +81,7 @@ static void print_topology (GstDiscovererStreamInfo *info, gint depth) {
gst_discoverer_stream_info_list_free (streams); gst_discoverer_stream_info_list_free (streams);
} }
} }
/* This function is called every time the discoverer has information regarding /* This function is called every time the discoverer has information regarding
* one of the URIs we provided.*/ * one of the URIs we provided.*/
static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info, GError *err, CustomData *data) { static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info, GError *err, CustomData *data) {
@ -89,7 +89,7 @@ static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info
const gchar *uri; const gchar *uri;
const GstTagList *tags; const GstTagList *tags;
GstDiscovererStreamInfo *sinfo; GstDiscovererStreamInfo *sinfo;
uri = gst_discoverer_info_get_uri (info); uri = gst_discoverer_info_get_uri (info);
result = gst_discoverer_info_get_result (info); result = gst_discoverer_info_get_result (info);
switch (result) { switch (result) {
@ -108,10 +108,10 @@ static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info
case GST_DISCOVERER_MISSING_PLUGINS:{ case GST_DISCOVERER_MISSING_PLUGINS:{
const GstStructure *s; const GstStructure *s;
gchar *str; gchar *str;
s = gst_discoverer_info_get_misc (info); s = gst_discoverer_info_get_misc (info);
str = gst_structure_to_string (s); str = gst_structure_to_string (s);
g_print ("Missing plugins: %s\n", str); g_print ("Missing plugins: %s\n", str);
g_free (str); g_free (str);
break; break;
@ -120,65 +120,65 @@ static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info
g_print ("Discovered '%s'\n", uri); g_print ("Discovered '%s'\n", uri);
break; break;
} }
if (result != GST_DISCOVERER_OK) { if (result != GST_DISCOVERER_OK) {
g_printerr ("This URI cannot be played\n"); g_printerr ("This URI cannot be played\n");
return; return;
} }
/* If we got no error, show the retrieved information */ /* If we got no error, show the retrieved information */
g_print ("\nDuration: %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (gst_discoverer_info_get_duration (info))); g_print ("\nDuration: %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (gst_discoverer_info_get_duration (info)));
tags = gst_discoverer_info_get_tags (info); tags = gst_discoverer_info_get_tags (info);
if (tags) { if (tags) {
g_print ("Tags:\n"); g_print ("Tags:\n");
gst_tag_list_foreach (tags, print_tag_foreach, GINT_TO_POINTER (1)); gst_tag_list_foreach (tags, print_tag_foreach, GINT_TO_POINTER (1));
} }
g_print ("Seekable: %s\n", (gst_discoverer_info_get_seekable (info) ? "yes" : "no")); g_print ("Seekable: %s\n", (gst_discoverer_info_get_seekable (info) ? "yes" : "no"));
g_print ("\n"); g_print ("\n");
sinfo = gst_discoverer_info_get_stream_info (info); sinfo = gst_discoverer_info_get_stream_info (info);
if (!sinfo) if (!sinfo)
return; return;
g_print ("Stream information:\n"); g_print ("Stream information:\n");
print_topology (sinfo, 1); print_topology (sinfo, 1);
gst_discoverer_stream_info_unref (sinfo); gst_discoverer_stream_info_unref (sinfo);
g_print ("\n"); g_print ("\n");
} }
/* This function is called when the discoverer has finished examining /* This function is called when the discoverer has finished examining
* all the URIs we provided.*/ * all the URIs we provided.*/
static void on_finished_cb (GstDiscoverer *discoverer, CustomData *data) { static void on_finished_cb (GstDiscoverer *discoverer, CustomData *data) {
g_print ("Finished discovering\n"); g_print ("Finished discovering\n");
g_main_loop_quit (data->loop); g_main_loop_quit (data->loop);
} }
int main (int argc, char **argv) { int main (int argc, char **argv) {
CustomData data; CustomData data;
GError *err = NULL; GError *err = NULL;
gchar *uri = "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm"; gchar *uri = "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm";
/* if a URI was provided, use it instead of the default one */ /* if a URI was provided, use it instead of the default one */
if (argc > 1) { if (argc > 1) {
uri = argv[1]; uri = argv[1];
} }
/* Initialize cumstom data structure */ /* Initialize cumstom data structure */
memset (&data, 0, sizeof (data)); memset (&data, 0, sizeof (data));
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
g_print ("Discovering '%s'\n", uri); g_print ("Discovering '%s'\n", uri);
/* Instantiate the Discoverer */ /* Instantiate the Discoverer */
data.discoverer = gst_discoverer_new (5 * GST_SECOND, &err); data.discoverer = gst_discoverer_new (5 * GST_SECOND, &err);
if (!data.discoverer) { if (!data.discoverer) {
@ -186,31 +186,31 @@ int main (int argc, char **argv) {
g_clear_error (&err); g_clear_error (&err);
return -1; return -1;
} }
/* Connect to the interesting signals */ /* Connect to the interesting signals */
g_signal_connect (data.discoverer, "discovered", G_CALLBACK (on_discovered_cb), &data); g_signal_connect (data.discoverer, "discovered", G_CALLBACK (on_discovered_cb), &data);
g_signal_connect (data.discoverer, "finished", G_CALLBACK (on_finished_cb), &data); g_signal_connect (data.discoverer, "finished", G_CALLBACK (on_finished_cb), &data);
/* Start the discoverer process (nothing to do yet) */ /* Start the discoverer process (nothing to do yet) */
gst_discoverer_start (data.discoverer); gst_discoverer_start (data.discoverer);
/* Add a request to process asynchronously the URI passed through the command line */ /* Add a request to process asynchronously the URI passed through the command line */
if (!gst_discoverer_discover_uri_async (data.discoverer, uri)) { if (!gst_discoverer_discover_uri_async (data.discoverer, uri)) {
g_print ("Failed to start discovering URI '%s'\n", uri); g_print ("Failed to start discovering URI '%s'\n", uri);
g_object_unref (data.discoverer); g_object_unref (data.discoverer);
return -1; return -1;
} }
/* Create a GLib Main Loop and set it to run, so we can wait for the signals */ /* Create a GLib Main Loop and set it to run, so we can wait for the signals */
data.loop = g_main_loop_new (NULL, FALSE); data.loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.loop); g_main_loop_run (data.loop);
/* Stop the discoverer process */ /* Stop the discoverer process */
gst_discoverer_stop (data.discoverer); gst_discoverer_stop (data.discoverer);
/* Free resources */ /* Free resources */
g_object_unref (data.discoverer); g_object_unref (data.discoverer);
g_main_loop_unref (data.loop); g_main_loop_unref (data.loop);
return 0; return 0;
} }

View file

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

View file

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

View file

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

View file

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

View file

Before

Width:  |  Height:  |  Size: 3.4 KiB

After

Width:  |  Height:  |  Size: 3.4 KiB

View file

@ -1,221 +1,221 @@
#include <stdio.h> #include <stdio.h>
#include <gst/gst.h> #include <gst/gst.h>
/* Structure to contain all our information, so we can pass it around */ /* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData { typedef struct _CustomData {
GstElement *playbin; /* Our one and only element */ GstElement *playbin; /* Our one and only element */
gint n_video; /* Number of embedded video streams */ gint n_video; /* Number of embedded video streams */
gint n_audio; /* Number of embedded audio streams */ gint n_audio; /* Number of embedded audio streams */
gint n_text; /* Number of embedded subtitle streams */ gint n_text; /* Number of embedded subtitle streams */
gint current_video; /* Currently playing video stream */ gint current_video; /* Currently playing video stream */
gint current_audio; /* Currently playing audio stream */ gint current_audio; /* Currently playing audio stream */
gint current_text; /* Currently playing subtitle stream */ gint current_text; /* Currently playing subtitle stream */
GMainLoop *main_loop; /* GLib's Main Loop */ GMainLoop *main_loop; /* GLib's Main Loop */
} CustomData; } CustomData;
/* playbin flags */ /* playbin flags */
typedef enum { typedef enum {
GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */ GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */ GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */
GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */ GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
} GstPlayFlags; } GstPlayFlags;
/* Forward definition for the message and keyboard processing functions */ /* Forward definition for the message and keyboard processing functions */
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data); static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data); static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data);
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
CustomData data; CustomData data;
GstBus *bus; GstBus *bus;
GstStateChangeReturn ret; GstStateChangeReturn ret;
gint flags; gint flags;
GIOChannel *io_stdin; GIOChannel *io_stdin;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Create the elements */ /* Create the elements */
data.playbin = gst_element_factory_make ("playbin", "playbin"); data.playbin = gst_element_factory_make ("playbin", "playbin");
if (!data.playbin) { if (!data.playbin) {
g_printerr ("Not all elements could be created.\n"); g_printerr ("Not all elements could be created.\n");
return -1; return -1;
} }
/* Set the URI to play */ /* Set the URI to play */
g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_cropped_multilingual.webm", NULL); g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_cropped_multilingual.webm", NULL);
/* Set flags to show Audio and Video but ignore Subtitles */ /* Set flags to show Audio and Video but ignore Subtitles */
g_object_get (data.playbin, "flags", &flags, NULL); g_object_get (data.playbin, "flags", &flags, NULL);
flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO; flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
flags &= ~GST_PLAY_FLAG_TEXT; flags &= ~GST_PLAY_FLAG_TEXT;
g_object_set (data.playbin, "flags", flags, NULL); g_object_set (data.playbin, "flags", flags, NULL);
/* Set connection speed. This will affect some internal decisions of playbin */ /* Set connection speed. This will affect some internal decisions of playbin */
g_object_set (data.playbin, "connection-speed", 56, NULL); g_object_set (data.playbin, "connection-speed", 56, NULL);
/* Add a bus watch, so we get notified when a message arrives */ /* Add a bus watch, so we get notified when a message arrives */
bus = gst_element_get_bus (data.playbin); bus = gst_element_get_bus (data.playbin);
gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data); gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data);
/* Add a keyboard watch so we get notified of keystrokes */ /* Add a keyboard watch so we get notified of keystrokes */
#ifdef G_OS_WIN32 #ifdef G_OS_WIN32
io_stdin = g_io_channel_win32_new_fd (fileno (stdin)); io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
#else #else
io_stdin = g_io_channel_unix_new (fileno (stdin)); io_stdin = g_io_channel_unix_new (fileno (stdin));
#endif #endif
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data); g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
/* Start playing */ /* Start playing */
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING); ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) { if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n"); g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.playbin); gst_object_unref (data.playbin);
return -1; return -1;
} }
/* Create a GLib Main Loop and set it to run */ /* Create a GLib Main Loop and set it to run */
data.main_loop = g_main_loop_new (NULL, FALSE); data.main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.main_loop); g_main_loop_run (data.main_loop);
/* Free resources */ /* Free resources */
g_main_loop_unref (data.main_loop); g_main_loop_unref (data.main_loop);
g_io_channel_unref (io_stdin); g_io_channel_unref (io_stdin);
gst_object_unref (bus); gst_object_unref (bus);
gst_element_set_state (data.playbin, GST_STATE_NULL); gst_element_set_state (data.playbin, GST_STATE_NULL);
gst_object_unref (data.playbin); gst_object_unref (data.playbin);
return 0; return 0;
} }
/* Extract some metadata from the streams and print it on the screen */ /* Extract some metadata from the streams and print it on the screen */
static void analyze_streams (CustomData *data) { static void analyze_streams (CustomData *data) {
gint i; gint i;
GstTagList *tags; GstTagList *tags;
gchar *str; gchar *str;
guint rate; guint rate;
/* Read some properties */ /* Read some properties */
g_object_get (data->playbin, "n-video", &data->n_video, NULL); g_object_get (data->playbin, "n-video", &data->n_video, NULL);
g_object_get (data->playbin, "n-audio", &data->n_audio, NULL); g_object_get (data->playbin, "n-audio", &data->n_audio, NULL);
g_object_get (data->playbin, "n-text", &data->n_text, NULL); g_object_get (data->playbin, "n-text", &data->n_text, NULL);
g_print ("%d video stream(s), %d audio stream(s), %d text stream(s)\n", g_print ("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
data->n_video, data->n_audio, data->n_text); data->n_video, data->n_audio, data->n_text);
g_print ("\n"); g_print ("\n");
for (i = 0; i < data->n_video; i++) { for (i = 0; i < data->n_video; i++) {
tags = NULL; tags = NULL;
/* Retrieve the stream's video tags */ /* Retrieve the stream's video tags */
g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags); g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags);
if (tags) { if (tags) {
g_print ("video stream %d:\n", i); g_print ("video stream %d:\n", i);
gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str); gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str);
g_print (" codec: %s\n", str ? str : "unknown"); g_print (" codec: %s\n", str ? str : "unknown");
g_free (str); g_free (str);
gst_tag_list_unref (tags); gst_tag_list_unref (tags);
} }
} }
g_print ("\n"); g_print ("\n");
for (i = 0; i < data->n_audio; i++) { for (i = 0; i < data->n_audio; i++) {
tags = NULL; tags = NULL;
/* Retrieve the stream's audio tags */ /* Retrieve the stream's audio tags */
g_signal_emit_by_name (data->playbin, "get-audio-tags", i, &tags); g_signal_emit_by_name (data->playbin, "get-audio-tags", i, &tags);
if (tags) { if (tags) {
g_print ("audio stream %d:\n", i); g_print ("audio stream %d:\n", i);
if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) { if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) {
g_print (" codec: %s\n", str); g_print (" codec: %s\n", str);
g_free (str); g_free (str);
} }
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) { if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print (" language: %s\n", str); g_print (" language: %s\n", str);
g_free (str); g_free (str);
} }
if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) { if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) {
g_print (" bitrate: %d\n", rate); g_print (" bitrate: %d\n", rate);
} }
gst_tag_list_unref (tags); gst_tag_list_unref (tags);
} }
} }
g_print ("\n"); g_print ("\n");
for (i = 0; i < data->n_text; i++) { for (i = 0; i < data->n_text; i++) {
tags = NULL; tags = NULL;
/* Retrieve the stream's subtitle tags */ /* Retrieve the stream's subtitle tags */
g_signal_emit_by_name (data->playbin, "get-text-tags", i, &tags); g_signal_emit_by_name (data->playbin, "get-text-tags", i, &tags);
if (tags) { if (tags) {
g_print ("subtitle stream %d:\n", i); g_print ("subtitle stream %d:\n", i);
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) { if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print (" language: %s\n", str); g_print (" language: %s\n", str);
g_free (str); g_free (str);
} }
gst_tag_list_unref (tags); gst_tag_list_unref (tags);
} }
} }
g_object_get (data->playbin, "current-video", &data->current_video, NULL); g_object_get (data->playbin, "current-video", &data->current_video, NULL);
g_object_get (data->playbin, "current-audio", &data->current_audio, NULL); g_object_get (data->playbin, "current-audio", &data->current_audio, NULL);
g_object_get (data->playbin, "current-text", &data->current_text, NULL); g_object_get (data->playbin, "current-text", &data->current_text, NULL);
g_print ("\n"); g_print ("\n");
g_print ("Currently playing video stream %d, audio stream %d and text stream %d\n", g_print ("Currently playing video stream %d, audio stream %d and text stream %d\n",
data->current_video, data->current_audio, data->current_text); data->current_video, data->current_audio, data->current_text);
g_print ("Type any number and hit ENTER to select a different audio stream\n"); g_print ("Type any number and hit ENTER to select a different audio stream\n");
} }
/* Process messages from GStreamer */ /* Process messages from GStreamer */
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) { static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err; GError *err;
gchar *debug_info; gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) { switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR: case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info); gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err); g_clear_error (&err);
g_free (debug_info); g_free (debug_info);
g_main_loop_quit (data->main_loop); g_main_loop_quit (data->main_loop);
break; break;
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n"); g_print ("End-Of-Stream reached.\n");
g_main_loop_quit (data->main_loop); g_main_loop_quit (data->main_loop);
break; break;
case GST_MESSAGE_STATE_CHANGED: { case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state; GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) { if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
if (new_state == GST_STATE_PLAYING) { if (new_state == GST_STATE_PLAYING) {
/* Once we are in the playing state, analyze the streams */ /* Once we are in the playing state, analyze the streams */
analyze_streams (data); analyze_streams (data);
} }
} }
} break; } break;
default: default:
break; break;
} }
/* We want to keep receiving messages */ /* We want to keep receiving messages */
return TRUE; return TRUE;
} }
/* Process keyboard input */ /* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) { static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL; gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) { if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
int index = g_ascii_strtoull (str, NULL, 0); int index = g_ascii_strtoull (str, NULL, 0);
if (index < 0 || index >= data->n_audio) { if (index < 0 || index >= data->n_audio) {
g_printerr ("Index out of bounds\n"); g_printerr ("Index out of bounds\n");
} else { } else {
/* If the input was a valid audio stream index, set the current audio stream */ /* If the input was a valid audio stream index, set the current audio stream */
g_print ("Setting current audio stream to %d\n", index); g_print ("Setting current audio stream to %d\n", index);
g_object_set (data->playbin, "current-audio", index, NULL); g_object_set (data->playbin, "current-audio", index, NULL);
} }
} }
g_free (str); g_free (str);
return TRUE; return TRUE;
} }

View file

@ -1,223 +1,223 @@
#include <stdio.h> #include <stdio.h>
#include <gst/gst.h> #include <gst/gst.h>
/* Structure to contain all our information, so we can pass it around */ /* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData { typedef struct _CustomData {
GstElement *playbin; /* Our one and only element */ GstElement *playbin; /* Our one and only element */
gint n_video; /* Number of embedded video streams */ gint n_video; /* Number of embedded video streams */
gint n_audio; /* Number of embedded audio streams */ gint n_audio; /* Number of embedded audio streams */
gint n_text; /* Number of embedded subtitle streams */ gint n_text; /* Number of embedded subtitle streams */
gint current_video; /* Currently playing video stream */ gint current_video; /* Currently playing video stream */
gint current_audio; /* Currently playing audio stream */ gint current_audio; /* Currently playing audio stream */
gint current_text; /* Currently playing subtitle stream */ gint current_text; /* Currently playing subtitle stream */
GMainLoop *main_loop; /* GLib's Main Loop */ GMainLoop *main_loop; /* GLib's Main Loop */
} CustomData; } CustomData;
/* playbin flags */ /* playbin flags */
typedef enum { typedef enum {
GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */ GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */ GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */
GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */ GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
} GstPlayFlags; } GstPlayFlags;
/* Forward definition for the message and keyboard processing functions */ /* Forward definition for the message and keyboard processing functions */
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data); static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data); static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data);
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
CustomData data; CustomData data;
GstBus *bus; GstBus *bus;
GstStateChangeReturn ret; GstStateChangeReturn ret;
gint flags; gint flags;
GIOChannel *io_stdin; GIOChannel *io_stdin;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Create the elements */ /* Create the elements */
data.playbin = gst_element_factory_make ("playbin", "playbin"); data.playbin = gst_element_factory_make ("playbin", "playbin");
if (!data.playbin) { if (!data.playbin) {
g_printerr ("Not all elements could be created.\n"); g_printerr ("Not all elements could be created.\n");
return -1; return -1;
} }
/* Set the URI to play */ /* Set the URI to play */
g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.ogv", NULL); g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.ogv", NULL);
/* Set the subtitle URI to play and some font description */ /* Set the subtitle URI to play and some font description */
g_object_set (data.playbin, "suburi", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer_gr.srt", NULL); g_object_set (data.playbin, "suburi", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer_gr.srt", NULL);
g_object_set (data.playbin, "subtitle-font-desc", "Sans, 18", NULL); g_object_set (data.playbin, "subtitle-font-desc", "Sans, 18", NULL);
/* Set flags to show Audio, Video and Subtitles */ /* Set flags to show Audio, Video and Subtitles */
g_object_get (data.playbin, "flags", &flags, NULL); g_object_get (data.playbin, "flags", &flags, NULL);
flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_TEXT; flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_TEXT;
g_object_set (data.playbin, "flags", flags, NULL); g_object_set (data.playbin, "flags", flags, NULL);
/* Add a bus watch, so we get notified when a message arrives */ /* Add a bus watch, so we get notified when a message arrives */
bus = gst_element_get_bus (data.playbin); bus = gst_element_get_bus (data.playbin);
gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data); gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data);
/* Add a keyboard watch so we get notified of keystrokes */ /* Add a keyboard watch so we get notified of keystrokes */
#ifdef G_OS_WIN32 #ifdef G_OS_WIN32
io_stdin = g_io_channel_win32_new_fd (fileno (stdin)); io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
#else #else
io_stdin = g_io_channel_unix_new (fileno (stdin)); io_stdin = g_io_channel_unix_new (fileno (stdin));
#endif #endif
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data); g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
/* Start playing */ /* Start playing */
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING); ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) { if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n"); g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.playbin); gst_object_unref (data.playbin);
return -1; return -1;
} }
/* Create a GLib Main Loop and set it to run */ /* Create a GLib Main Loop and set it to run */
data.main_loop = g_main_loop_new (NULL, FALSE); data.main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.main_loop); g_main_loop_run (data.main_loop);
/* Free resources */ /* Free resources */
g_main_loop_unref (data.main_loop); g_main_loop_unref (data.main_loop);
g_io_channel_unref (io_stdin); g_io_channel_unref (io_stdin);
gst_object_unref (bus); gst_object_unref (bus);
gst_element_set_state (data.playbin, GST_STATE_NULL); gst_element_set_state (data.playbin, GST_STATE_NULL);
gst_object_unref (data.playbin); gst_object_unref (data.playbin);
return 0; return 0;
} }
/* Extract some metadata from the streams and print it on the screen */ /* Extract some metadata from the streams and print it on the screen */
static void analyze_streams (CustomData *data) { static void analyze_streams (CustomData *data) {
gint i; gint i;
GstTagList *tags; GstTagList *tags;
gchar *str; gchar *str;
guint rate; guint rate;
/* Read some properties */ /* Read some properties */
g_object_get (data->playbin, "n-video", &data->n_video, NULL); g_object_get (data->playbin, "n-video", &data->n_video, NULL);
g_object_get (data->playbin, "n-audio", &data->n_audio, NULL); g_object_get (data->playbin, "n-audio", &data->n_audio, NULL);
g_object_get (data->playbin, "n-text", &data->n_text, NULL); g_object_get (data->playbin, "n-text", &data->n_text, NULL);
g_print ("%d video stream(s), %d audio stream(s), %d text stream(s)\n", g_print ("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
data->n_video, data->n_audio, data->n_text); data->n_video, data->n_audio, data->n_text);
g_print ("\n"); g_print ("\n");
for (i = 0; i < data->n_video; i++) { for (i = 0; i < data->n_video; i++) {
tags = NULL; tags = NULL;
/* Retrieve the stream's video tags */ /* Retrieve the stream's video tags */
g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags); g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags);
if (tags) { if (tags) {
g_print ("video stream %d:\n", i); g_print ("video stream %d:\n", i);
gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str); gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str);
g_print (" codec: %s\n", str ? str : "unknown"); g_print (" codec: %s\n", str ? str : "unknown");
g_free (str); g_free (str);
gst_tag_list_free (tags); gst_tag_list_free (tags);
} }
} }
g_print ("\n"); g_print ("\n");
for (i = 0; i < data->n_audio; i++) { for (i = 0; i < data->n_audio; i++) {
tags = NULL; tags = NULL;
/* Retrieve the stream's audio tags */ /* Retrieve the stream's audio tags */
g_signal_emit_by_name (data->playbin, "get-audio-tags", i, &tags); g_signal_emit_by_name (data->playbin, "get-audio-tags", i, &tags);
if (tags) { if (tags) {
g_print ("audio stream %d:\n", i); g_print ("audio stream %d:\n", i);
if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) { if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) {
g_print (" codec: %s\n", str); g_print (" codec: %s\n", str);
g_free (str); g_free (str);
} }
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) { if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print (" language: %s\n", str); g_print (" language: %s\n", str);
g_free (str); g_free (str);
} }
if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) { if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) {
g_print (" bitrate: %d\n", rate); g_print (" bitrate: %d\n", rate);
} }
gst_tag_list_free (tags); gst_tag_list_free (tags);
} }
} }
g_print ("\n"); g_print ("\n");
for (i = 0; i < data->n_text; i++) { for (i = 0; i < data->n_text; i++) {
tags = NULL; tags = NULL;
/* Retrieve the stream's subtitle tags */ /* Retrieve the stream's subtitle tags */
g_print ("subtitle stream %d:\n", i); g_print ("subtitle stream %d:\n", i);
g_signal_emit_by_name (data->playbin, "get-text-tags", i, &tags); g_signal_emit_by_name (data->playbin, "get-text-tags", i, &tags);
if (tags) { if (tags) {
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) { if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
g_print (" language: %s\n", str); g_print (" language: %s\n", str);
g_free (str); g_free (str);
} }
gst_tag_list_free (tags); gst_tag_list_free (tags);
} else { } else {
g_print (" no tags found\n"); g_print (" no tags found\n");
} }
} }
g_object_get (data->playbin, "current-video", &data->current_video, NULL); g_object_get (data->playbin, "current-video", &data->current_video, NULL);
g_object_get (data->playbin, "current-audio", &data->current_audio, NULL); g_object_get (data->playbin, "current-audio", &data->current_audio, NULL);
g_object_get (data->playbin, "current-text", &data->current_text, NULL); g_object_get (data->playbin, "current-text", &data->current_text, NULL);
g_print ("\n"); g_print ("\n");
g_print ("Currently playing video stream %d, audio stream %d and subtitle stream %d\n", g_print ("Currently playing video stream %d, audio stream %d and subtitle stream %d\n",
data->current_video, data->current_audio, data->current_text); data->current_video, data->current_audio, data->current_text);
g_print ("Type any number and hit ENTER to select a different subtitle stream\n"); g_print ("Type any number and hit ENTER to select a different subtitle stream\n");
} }
/* Process messages from GStreamer */ /* Process messages from GStreamer */
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) { static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err; GError *err;
gchar *debug_info; gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) { switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR: case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info); gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err); g_clear_error (&err);
g_free (debug_info); g_free (debug_info);
g_main_loop_quit (data->main_loop); g_main_loop_quit (data->main_loop);
break; break;
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n"); g_print ("End-Of-Stream reached.\n");
g_main_loop_quit (data->main_loop); g_main_loop_quit (data->main_loop);
break; break;
case GST_MESSAGE_STATE_CHANGED: { case GST_MESSAGE_STATE_CHANGED: {
GstState old_state, new_state, pending_state; GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state); gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) { if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
if (new_state == GST_STATE_PLAYING) { if (new_state == GST_STATE_PLAYING) {
/* Once we are in the playing state, analyze the streams */ /* Once we are in the playing state, analyze the streams */
analyze_streams (data); analyze_streams (data);
} }
} }
} break; } break;
default: default:
break; break;
} }
/* We want to keep receiving messages */ /* We want to keep receiving messages */
return TRUE; return TRUE;
} }
/* Process keyboard input */ /* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) { static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL; gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) { if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
int index = g_ascii_strtoull (str, NULL, 0); int index = g_ascii_strtoull (str, NULL, 0);
if (index < 0 || index >= data->n_text) { if (index < 0 || index >= data->n_text) {
g_printerr ("Index out of bounds\n"); g_printerr ("Index out of bounds\n");
} else { } else {
/* If the input was a valid subtitle stream index, set the current subtitle stream */ /* If the input was a valid subtitle stream index, set the current subtitle stream */
g_print ("Setting current subtitle stream to %d\n", index); g_print ("Setting current subtitle stream to %d\n", index);
g_object_set (data->playbin, "current-text", index, NULL); g_object_set (data->playbin, "current-text", index, NULL);
} }
} }
g_free (str); g_free (str);
return TRUE; return TRUE;
} }

View file

@ -1,23 +1,23 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/audio/audio.h> #include <gst/audio/audio.h>
#include <string.h> #include <string.h>
#define CHUNK_SIZE 1024 /* Amount of bytes we are sending in each buffer */ #define CHUNK_SIZE 1024 /* Amount of bytes we are sending in each buffer */
#define SAMPLE_RATE 44100 /* Samples per second we are sending */ #define SAMPLE_RATE 44100 /* Samples per second we are sending */
/* Structure to contain all our information, so we can pass it to callbacks */ /* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData { typedef struct _CustomData {
GstElement *pipeline; GstElement *pipeline;
GstElement *app_source; GstElement *app_source;
guint64 num_samples; /* Number of samples generated so far (for timestamp generation) */ guint64 num_samples; /* Number of samples generated so far (for timestamp generation) */
gfloat a, b, c, d; /* For waveform generation */ gfloat a, b, c, d; /* For waveform generation */
guint sourceid; /* To control the GSource */ guint sourceid; /* To control the GSource */
GMainLoop *main_loop; /* GLib's Main Loop */ GMainLoop *main_loop; /* GLib's Main Loop */
} CustomData; } CustomData;
/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc. /* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc.
* The ide handler is added to the mainloop when appsrc requests us to start sending data (need-data signal) * The ide handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
* and is removed when appsrc has enough data (enough-data signal). * and is removed when appsrc has enough data (enough-data signal).
@ -30,14 +30,14 @@ static gboolean push_data (CustomData *data) {
gint16 *raw; gint16 *raw;
gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */ gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
gfloat freq; gfloat freq;
/* Create a new empty buffer */ /* Create a new empty buffer */
buffer = gst_buffer_new_and_alloc (CHUNK_SIZE); buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);
/* Set its timestamp and duration */ /* Set its timestamp and duration */
GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE); GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE);
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, SAMPLE_RATE); GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, SAMPLE_RATE);
/* Generate some psychodelic waveforms */ /* Generate some psychodelic waveforms */
gst_buffer_map (buffer, &map, GST_MAP_WRITE); gst_buffer_map (buffer, &map, GST_MAP_WRITE);
raw = (gint16 *)map.data; raw = (gint16 *)map.data;
@ -51,21 +51,21 @@ static gboolean push_data (CustomData *data) {
} }
gst_buffer_unmap (buffer, &map); gst_buffer_unmap (buffer, &map);
data->num_samples += num_samples; data->num_samples += num_samples;
/* Push the buffer into the appsrc */ /* Push the buffer into the appsrc */
g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret); g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
/* Free the buffer now that we are done with it */ /* Free the buffer now that we are done with it */
gst_buffer_unref (buffer); gst_buffer_unref (buffer);
if (ret != GST_FLOW_OK) { if (ret != GST_FLOW_OK) {
/* We got some error, stop sending data */ /* We got some error, stop sending data */
return FALSE; return FALSE;
} }
return TRUE; return TRUE;
} }
/* This signal callback triggers when appsrc needs data. Here, we add an idle handler /* This signal callback triggers when appsrc needs data. Here, we add an idle handler
* to the mainloop to start pushing data into the appsrc */ * to the mainloop to start pushing data into the appsrc */
static void start_feed (GstElement *source, guint size, CustomData *data) { static void start_feed (GstElement *source, guint size, CustomData *data) {
@ -74,7 +74,7 @@ static void start_feed (GstElement *source, guint size, CustomData *data) {
data->sourceid = g_idle_add ((GSourceFunc) push_data, data); data->sourceid = g_idle_add ((GSourceFunc) push_data, data);
} }
} }
/* This callback triggers when appsrc has enough data and we can stop sending. /* This callback triggers when appsrc has enough data and we can stop sending.
* We remove the idle handler from the mainloop */ * We remove the idle handler from the mainloop */
static void stop_feed (GstElement *source, CustomData *data) { static void stop_feed (GstElement *source, CustomData *data) {
@ -84,31 +84,31 @@ static void stop_feed (GstElement *source, CustomData *data) {
data->sourceid = 0; data->sourceid = 0;
} }
} }
/* This function is called when an error message is posted on the bus */ /* This function is called when an error message is posted on the bus */
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) { static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err; GError *err;
gchar *debug_info; gchar *debug_info;
/* Print error details on the screen */ /* Print error details on the screen */
gst_message_parse_error (msg, &err, &debug_info); gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message); g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none"); g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err); g_clear_error (&err);
g_free (debug_info); g_free (debug_info);
g_main_loop_quit (data->main_loop); g_main_loop_quit (data->main_loop);
} }
/* This function is called when playbin has created the appsrc element, so we have /* This function is called when playbin has created the appsrc element, so we have
* a chance to configure it. */ * a chance to configure it. */
static void source_setup (GstElement *pipeline, GstElement *source, CustomData *data) { static void source_setup (GstElement *pipeline, GstElement *source, CustomData *data) {
GstAudioInfo info; GstAudioInfo info;
GstCaps *audio_caps; GstCaps *audio_caps;
g_print ("Source has been created. Configuring.\n"); g_print ("Source has been created. Configuring.\n");
data->app_source = source; data->app_source = source;
/* Configure appsrc */ /* Configure appsrc */
gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1, NULL); gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1, NULL);
audio_caps = gst_audio_info_to_caps (&info); audio_caps = gst_audio_info_to_caps (&info);
@ -117,36 +117,36 @@ static void source_setup (GstElement *pipeline, GstElement *source, CustomData *
g_signal_connect (source, "enough-data", G_CALLBACK (stop_feed), data); g_signal_connect (source, "enough-data", G_CALLBACK (stop_feed), data);
gst_caps_unref (audio_caps); gst_caps_unref (audio_caps);
} }
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
CustomData data; CustomData data;
GstBus *bus; GstBus *bus;
/* Initialize cumstom data structure */ /* Initialize cumstom data structure */
memset (&data, 0, sizeof (data)); memset (&data, 0, sizeof (data));
data.b = 1; /* For waveform generation */ data.b = 1; /* For waveform generation */
data.d = 1; data.d = 1;
/* Initialize GStreamer */ /* Initialize GStreamer */
gst_init (&argc, &argv); gst_init (&argc, &argv);
/* Create the playbin element */ /* Create the playbin element */
data.pipeline = gst_parse_launch ("playbin uri=appsrc://", NULL); data.pipeline = gst_parse_launch ("playbin uri=appsrc://", NULL);
g_signal_connect (data.pipeline, "source-setup", G_CALLBACK (source_setup), &data); g_signal_connect (data.pipeline, "source-setup", G_CALLBACK (source_setup), &data);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */ /* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (data.pipeline); bus = gst_element_get_bus (data.pipeline);
gst_bus_add_signal_watch (bus); gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data); g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
gst_object_unref (bus); gst_object_unref (bus);
/* Start playing the pipeline */ /* Start playing the pipeline */
gst_element_set_state (data.pipeline, GST_STATE_PLAYING); gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
/* Create a GLib Main Loop and set it to run */ /* Create a GLib Main Loop and set it to run */
data.main_loop = g_main_loop_new (NULL, FALSE); data.main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.main_loop); g_main_loop_run (data.main_loop);
/* Free resources */ /* Free resources */
gst_element_set_state (data.pipeline, GST_STATE_NULL); gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline); gst_object_unref (data.pipeline);

Some files were not shown because too many files have changed in this diff Show more