Remove 'GStreamer SDK documentation' in all titles

And pass all the markdown file into `git stripspace`
This commit is contained in:
Thibault Saunier 2016-05-26 22:21:04 -04:00
parent 7619e42870
commit 7d5fe14134
64 changed files with 775 additions and 854 deletions

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : 2012.11 Brahmaputra
# 2012.11 Brahmaputra
This page last changed on Nov 28, 2012 by slomo.
@ -74,11 +74,11 @@ the following development environments
- Microsoft Visual Studio 2010 or 2012 (including the free Visual C++
Express
edition)
  <http://www.microsoft.com/visualstudio/eng/products/visual-studio-overview>
- MinGW/MSYS
  [http://mingw.org](http://mingw.org/)
For installation instructions and development environment setup
@ -282,4 +282,3 @@ Bug
tracker: <https://bugs.freedesktop.org/enter_bug.cgi?product=GStreamer%20SDK>
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : 2012.5 Amazon
# 2012.5 Amazon
This page last changed on Jun 15, 2012 by slomo.
@ -64,11 +64,11 @@ the following development environments
- Microsoft Visual Studio 2010 (including the free Visual C++ Express
edition)
  <http://www.microsoft.com/visualstudio/en-us/products/2010-editions>
- MinGW/MSYS
  [http://mingw.org](http://mingw.org/)
For installation instructions and development environment setup
@ -121,8 +121,6 @@ These use-cases are currently not officially supported by the GStreamer
SDK but will usually work and will be officially supported in future
releases of the GStreamer SDK.
The GStreamer SDK Amazon contains the following major components, some
of them being optional or not used on some platforms. 
@ -246,4 +244,3 @@ tracker: <https://bugs.freedesktop.org/enter_bug.cgi?product=GStreamer%20SDK>
 
Document generated by Confluence on Oct 08, 2015 10:28

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : 2012.7 Amazon (Bugfix Release 1)
# 2012.7 Amazon (Bugfix Release 1)
This page last changed on Jul 11, 2012 by slomo.
@ -64,11 +64,11 @@ the following development environments
- Microsoft Visual Studio 2010 (including the free Visual C++ Express
edition)
  <http://www.microsoft.com/visualstudio/en-us/products/2010-editions>
- MinGW/MSYS
  [http://mingw.org](http://mingw.org/)
For installation instructions and development environment setup
@ -138,8 +138,6 @@ These use-cases are currently not officially supported by the GStreamer
SDK but will usually work and will be officially supported in future
releases of the GStreamer SDK.
The GStreamer SDK Amazon contains the following major components, some
of them being optional or not used on some platforms. 
@ -264,4 +262,3 @@ tracker: <https://bugs.freedesktop.org/enter_bug.cgi?product=GStreamer%20SDK>
 
Document generated by Confluence on Oct 08, 2015 10:28

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : 2012.9 Amazon (Bugfix Release 2)
# 2012.9 Amazon (Bugfix Release 2)
This page last changed on Sep 18, 2012 by ylatuya.
@ -64,11 +64,11 @@ the following development environments
- Microsoft Visual Studio 2010 (including the free Visual C++ Express
edition)
  <http://www.microsoft.com/visualstudio/en-us/products/2010-editions>
- MinGW/MSYS
  [http://mingw.org](http://mingw.org/)
For installation instructions and development environment setup
@ -136,8 +136,6 @@ These use-cases are currently not officially supported by the GStreamer
SDK but will usually work and will be officially supported in future
releases of the GStreamer SDK.
The GStreamer SDK Amazon contains the following major components, some
of them being optional or not used on some platforms. 
@ -262,4 +260,3 @@ tracker: <https://bugs.freedesktop.org/enter_bug.cgi?product=GStreamer%20SDK>
 
Document generated by Confluence on Oct 08, 2015 10:28

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : 2013.6 Congo
# 2013.6 Congo
This page last changed on Jun 11, 2013 by ylatuya.
@ -76,11 +76,11 @@ the following development environments
- Microsoft Visual Studio 2010 or 2012 (including the free Visual C++
Express
edition)
  <http://www.microsoft.com/visualstudio/eng/products/visual-studio-overview>
- MinGW/MSYS
  [http://mingw.org](http://mingw.org/)
For installation instructions and development environment setup
@ -303,4 +303,3 @@ Bug
tracker: <https://bugs.freedesktop.org/enter_bug.cgi?product=GStreamer%20SDK>
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Android tutorial 1: Link against GStreamer
# Android tutorial 1: Link against GStreamer
This page last changed on May 02, 2013 by xartigas.
@ -28,42 +28,42 @@ makefile that allows GStreamer integration.
``` theme: Default; brush: java; gutter: true
package com.gst_sdk_tutorials.tutorial_1;
import android.app.Activity;
import android.os.Bundle;
import android.widget.TextView;
import android.widget.Toast;
import com.gstreamer.GStreamer;
public class Tutorial1 extends Activity {
private native String nativeGetGStreamerInfo();
// Called when the activity is first created.
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
try {
GStreamer.init(this);
} catch (Exception e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
finish();
finish();
return;
}
setContentView(R.layout.main);
TextView tv = (TextView)findViewById(R.id.textview_info);
tv.setText("Welcome to " + nativeGetGStreamerInfo() + " !");
}
static {
System.loadLibrary("gstreamer_android");
System.loadLibrary("tutorial-1");
}
}
```
@ -100,13 +100,13 @@ initializes GStreamer and registers all plugins (The tutorial library is
explained later below).
``` first-line: 19; theme: Default; brush: java; gutter: true
try {
GStreamer.init(this);
} catch (Exception e) {
try {
GStreamer.init(this);
} catch (Exception e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
finish();
return;
}
finish();
return;
}
```
Next, in the `OnCreate()` method of the
@ -123,8 +123,8 @@ Should initialization fail, the `init()` method would throw an
with the details provided by the GStreamer library.
``` first-line: 29; theme: Default; brush: java; gutter: true
TextView tv = (TextView)findViewById(R.id.textview_info);
tv.setText("Welcome to " + nativeGetGStreamerInfo() + " !");
TextView tv = (TextView)findViewById(R.id.textview_info);
tv.setText("Welcome to " + nativeGetGStreamerInfo() + " !");
```
Then, the native method `nativeGetGStreamerInfo()` is called and a
@ -181,7 +181,7 @@ with Java:
``` first-line: 21; theme: Default; brush: cpp; gutter: true
JNIEnv *env = NULL;
if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) {
__android_log_print (ANDROID_LOG_ERROR, "tutorial-1", "Could not retrieve JNIEnv");
return 0;
@ -285,13 +285,12 @@ As usual, it has been a pleasure having you here, and see you soon\!
![](images/icons/bullet_blue.gif)
[tutorial1-screenshot.png](attachments/2687057/2654411.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[tutorial1-screenshot.png](attachments/2687057/2654416.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[tutorial1-screenshot.png](attachments/2687057/2654326.png)
(image/png)
(image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Android tutorial 2: A running pipeline
# Android tutorial 2: A running pipeline
This page last changed on May 07, 2013 by xartigas.
@ -91,7 +91,7 @@ public class Tutorial2 extends Activity {
GStreamer.init(this);
} catch (Exception e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
finish();
finish();
return;
}
@ -684,7 +684,7 @@ static jboolean gst_native_class_init (JNIEnv* env, jclass klass) {
custom_data_field_id = (*env)->GetFieldID (env, klass, "native_custom_data", "J");
set_message_method_id = (*env)->GetMethodID (env, klass, "setMessage", "(Ljava/lang/String;)V");
on_gstreamer_initialized_method_id = (*env)->GetMethodID (env, klass, "onGStreamerInitialized", "()V");
if (!custom_data_field_id || !set_message_method_id || !on_gstreamer_initialized_method_id) {
/* We emit this message through the Android log instead of the GStreamer log because the later
* has not been initialized yet.
@ -741,7 +741,7 @@ pthread_create (&gst_app_thread, NULL, &app_function, data);
Finally, a thread is created and it starts running the
`app_function()` method.
### `app_function()`
### `app_function()`
``` first-line: 134; theme: Default; brush: cpp; gutter: true
/* Main method for the native code. This is executed on its own thread. */
@ -751,9 +751,9 @@ static void *app_function (void *userdata) {
CustomData *data = (CustomData *)userdata;
GSource *bus_source;
GError *error = NULL;
GST_DEBUG ("Creating pipeline in CustomData at %p", data);
/* Create our own GLib Main Context and make it the default one */
data->context = g_main_context_new ();
g_main_context_push_thread_default(data->context);
@ -1051,16 +1051,15 @@ As usual, it has been a pleasure having you here, and see you soon\!
![](images/icons/bullet_blue.gif)
[tutorial2-screenshot.png](attachments/2687063/2654325.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[tutorial2-screenshot.png](attachments/2687063/2654412.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[tutorial2-screenshot.png](attachments/2687063/2654417.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[tutorial2-screenshot.png](attachments/2687063/2654324.png)
(image/png)
(image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Android tutorial 3: Video
# Android tutorial 3: Video
This page last changed on Nov 05, 2012 by xartigas.
@ -77,7 +77,7 @@ public class Tutorial3 extends Activity implements SurfaceHolder.Callback {
GStreamer.init(this);
} catch (Exception e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
finish();
finish();
return;
}
@ -917,16 +917,15 @@ It has been a pleasure having you here, and see you soon\!
![](images/icons/bullet_blue.gif)
[tutorial3-screenshot.png](attachments/2687065/2654414.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[tutorial3-screenshot.png](attachments/2687065/2654415.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[tutorial3-screenshot.png](attachments/2687065/2654418.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[tutorial3-screenshot.png](attachments/2687065/2654413.png)
(image/png)
(image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Android tutorial 4: A basic media player
# Android tutorial 4: A basic media player
This page last changed on May 21, 2013 by xartigas.
@ -103,7 +103,7 @@ public class Tutorial4 extends Activity implements SurfaceHolder.Callback, OnSee
GStreamer.init(this);
} catch (Exception e) {
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
finish();
finish();
return;
}
@ -402,7 +402,7 @@ private void updateTimeWidget () {
final TextView tv = (TextView) this.findViewById(R.id.textview_time);
final SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
final int pos = sb.getProgress();
SimpleDateFormat df = new SimpleDateFormat("HH:mm:ss");
df.setTimeZone(TimeZone.getTimeZone("UTC"));
final String message = df.format(new Date (pos)) + " / " + df.format(new Date (duration));
@ -1428,7 +1428,6 @@ As usual, it has been a pleasure having you here, and see you soon\!
![](images/icons/bullet_blue.gif)
[tutorial4-screenshot.png](attachments/2687067/2654419.png)
(image/png)
(image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Android tutorial 5: A Complete media player
# Android tutorial 5: A Complete media player
This page last changed on Nov 28, 2012 by xartigas.
@ -104,11 +104,10 @@ It has been a pleasure having you here, and see you soon\!
![](images/icons/bullet_blue.gif)
[tutorial5-screenshot.png](attachments/2687069/2654436.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[ic\_media\_next.png](attachments/2687069/2654438.png) (image/png)
[ic\_media\_next.png](attachments/2687069/2654438.png) (image/png)
![](images/icons/bullet_blue.gif)
[ic\_media\_next.png](attachments/2687069/2654437.png) (image/png)
[ic\_media\_next.png](attachments/2687069/2654437.png) (image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Android tutorials
# Android tutorials
This page last changed on May 02, 2013 by xartigas.
@ -35,4 +35,3 @@ files
in `$(ANDROID_NDK_ROOT)\platforms\android-9\arch-arm\usr\include\android`.
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Basic Media Player
# Basic Media Player
This page last changed on May 24, 2013 by xartigas.
@ -874,4 +874,3 @@ This tutorial has shown:
It has been a pleasure having you here, and see you soon\!
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# Basic tutorial 1: Hello world!
# Basic tutorial 1: Hello world!
## Goal
@ -23,25 +23,25 @@ in the SDK installation).
```
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline = gst_parse_launch ("playbin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
@ -240,4 +240,3 @@ The next tutorial will keep introducing more basic GStreamer elements,
and show you how to build a pipeline manually.
It has been a pleasure having you here, and see you soon!

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Basic tutorial 10: GStreamer tools
# Basic tutorial 10: GStreamer tools
This page last changed on Jun 01, 2012 by xartigas.
@ -364,7 +364,7 @@ Element Properties:
(0x00000004): addnoise - Add noise
deblocking-level : Deblocking level
flags: readable, writable
Unsigned Integer. Range: 0 - 16 Default: 4
Unsigned Integer. Range: 0 - 16 Default: 4
noise-level : Noise level
flags: readable, writable
Unsigned Integer. Range: 0 - 16 Default: 0  
@ -465,4 +465,3 @@ This tutorial has shown:
It has been a pleasure having you here, and see you soon\!
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Basic tutorial 11: Debugging tools
# Basic tutorial 11: Debugging tools
This page last changed on Jun 04, 2012 by xartigas.
@ -226,7 +226,6 @@ It has been a pleasure having you here, and see you soon\!
## Attachments:
![](images/icons/bullet_blue.gif)
[playbin2.png](attachments/327830/2424840.png) (image/png)
[playbin2.png](attachments/327830/2424840.png) (image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Basic tutorial 12: Streaming
# Basic tutorial 12: Streaming
This page last changed on Sep 28, 2012 by xartigas.
@ -65,25 +65,25 @@ Copy this code into a text file named `basic-tutorial-12.c`.
``` theme: Default; brush: cpp; gutter: true
#include <gst/gst.h>
#include <string.h>
typedef struct _CustomData {
gboolean is_live;
GstElement *pipeline;
GMainLoop *loop;
} CustomData;
static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
gst_element_set_state (data->pipeline, GST_STATE_READY);
g_main_loop_quit (data->loop);
break;
@ -95,10 +95,10 @@ static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
break;
case GST_MESSAGE_BUFFERING: {
gint percent = 0;
/* If the stream is live, we do not care about buffering. */
if (data->is_live) break;
gst_message_parse_buffering (msg, &percent);
g_print ("Buffering (%3d%%)\r", percent);
/* Wait until buffering is complete before start/resume playing */
@ -118,24 +118,24 @@ static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
break;
}
}
int main(int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
GstStateChangeReturn ret;
GMainLoop *main_loop;
CustomData data;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Initialize our data structure */
memset (&data, 0, sizeof (data));
/* Build the pipeline */
pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
bus = gst_element_get_bus (pipeline);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
@ -145,16 +145,16 @@ int main(int argc, char *argv[]) {
} else if (ret == GST_STATE_CHANGE_NO_PREROLL) {
data.is_live = TRUE;
}
main_loop = g_main_loop_new (NULL, FALSE);
data.loop = main_loop;
data.pipeline = pipeline;
gst_bus_add_signal_watch (bus);
g_signal_connect (bus, "message", G_CALLBACK (cb_message), &data);
g_main_loop_run (main_loop);
/* Free resources */
g_main_loop_unref (main_loop);
gst_object_unref (bus);
@ -224,10 +224,10 @@ Lets now review the interesting parts of the message parsing callback:
``` first-line: 31; theme: Default; brush: cpp; gutter: true
case GST_MESSAGE_BUFFERING: {
gint percent = 0;
/* If the stream is live, we do not care about buffering. */
if (data->is_live) break;
gst_message_parse_buffering (msg, &percent);
g_print ("Buffering (%3d%%)\r", percent);
/* Wait until buffering is complete before start/resume playing */
@ -282,9 +282,8 @@ It has been a pleasure having you here, and see you soon\!
## Attachments:
![](images/icons/bullet_blue.gif)
[basic-tutorial-12.c](attachments/327806/2424843.c) (text/plain)
[basic-tutorial-12.c](attachments/327806/2424843.c) (text/plain)
![](images/icons/bullet_blue.gif)
[vs2010.zip](attachments/327806/2424844.zip) (application/zip)
[vs2010.zip](attachments/327806/2424844.zip) (application/zip)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Basic tutorial 13: Playback speed
# Basic tutorial 13: Playback speed
This page last changed on Jul 06, 2012 by xartigas.
@ -72,28 +72,28 @@ Copy this code into a text file named `basic-tutorial-13.c`.
``` theme: Default; brush: cpp; gutter: true
#include <string.h>
#include <gst/gst.h>
typedef struct _CustomData {
GstElement *pipeline;
GstElement *video_sink;
GMainLoop *loop;
gboolean playing; /* Playing or Paused */
gdouble rate; /* Current playback rate (can be negative) */
} CustomData;
/* Send seek event to change rate */
static void send_seek_event (CustomData *data) {
gint64 position;
GstFormat format = GST_FORMAT_TIME;
GstEvent *seek_event;
/* Obtain the current position, needed for the seek event */
if (!gst_element_query_position (data->pipeline, &format, &position)) {
g_printerr ("Unable to retrieve current position.\n");
return;
}
/* Create the seek event */
if (data->rate > 0) {
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
@ -102,26 +102,26 @@ static void send_seek_event (CustomData *data) {
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, position);
}
if (data->video_sink == NULL) {
/* If we have not done so, obtain the sink through which we will send the seek events */
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
}
/* Send the event */
gst_element_send_event (data->video_sink, seek_event);
g_print ("Current rate: %g\n", data->rate);
}
/* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) != G_IO_STATUS_NORMAL) {
return TRUE;
}
switch (g_ascii_tolower (str[0])) {
case 'p':
data->playing = !data->playing;
@ -145,7 +145,7 @@ static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomDa
/* If we have not done so, obtain the sink through which we will send the step events */
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
}
gst_element_send_event (data->video_sink,
gst_event_new_step (GST_FORMAT_BUFFERS, 1, data->rate, TRUE, FALSE));
g_print ("Stepping one frame\n");
@ -156,23 +156,23 @@ static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomDa
default:
break;
}
g_free (str);
return TRUE;
}
int main(int argc, char *argv[]) {
CustomData data;
GstStateChangeReturn ret;
GIOChannel *io_stdin;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Initialize our data structure */
memset (&data, 0, sizeof (data));
/* Print usage map */
g_print (
"USAGE: Choose one of the following options, then press enter:\n"
@ -181,10 +181,10 @@ int main(int argc, char *argv[]) {
" 'D' to toggle playback direction\n"
" 'N' to move to next frame (in the current direction, better in PAUSE)\n"
" 'Q' to quit\n");
/* Build the pipeline */
data.pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Add a keyboard watch so we get notified of keystrokes */
#ifdef _WIN32
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
@ -192,7 +192,7 @@ int main(int argc, char *argv[]) {
io_stdin = g_io_channel_unix_new (fileno (stdin));
#endif
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
@ -202,11 +202,11 @@ int main(int argc, char *argv[]) {
}
data.playing = TRUE;
data.rate = 1.0;
/* Create a GLib Main Loop and set it to run */
data.loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.loop);
/* Free resources */
g_main_loop_unref (data.loop);
g_io_channel_unref (io_stdin);
@ -254,11 +254,11 @@ Then, in the keyboard handler function:
/* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) != G_IO_STATUS_NORMAL) {
return TRUE;
}
switch (g_ascii_tolower (str[0])) {
case 'p':
data->playing = !data->playing;
@ -296,7 +296,7 @@ static void send_seek_event (CustomData *data) {
gint64 position;
GstFormat format = GST_FORMAT_TIME;
GstEvent *seek_event;
/* Obtain the current position, needed for the seek event */
if (!gst_element_query_position (data->pipeline, &format, &position)) {
g_printerr ("Unable to retrieve current position.\n");
@ -360,7 +360,7 @@ case 'n':
/* If we have not done so, obtain the sink through which we will send the step events */
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
}
gst_element_send_event (data->video_sink,
gst_event_new_step (GST_FORMAT_BUFFERS, 1, data->rate, TRUE, FALSE));
g_print ("Stepping one frame\n");
@ -401,9 +401,8 @@ It has been a pleasure having you here, and see you soon\!
## Attachments:
![](images/icons/bullet_blue.gif)
[basic-tutorial-13.c](attachments/327800/2424883.c) (text/plain)
[basic-tutorial-13.c](attachments/327800/2424883.c) (text/plain)
![](images/icons/bullet_blue.gif)
[vs2010.zip](attachments/327800/2424884.zip) (application/zip)
[vs2010.zip](attachments/327800/2424884.zip) (application/zip)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Basic tutorial 14: Handy elements
# Basic tutorial 14: Handy elements
This page last changed on May 13, 2014 by xartigas.
@ -364,4 +364,3 @@ debugging purposes.
It has been a pleasure having you here, and see you soon\!
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Basic tutorial 15: Clutter integration
# Basic tutorial 15: Clutter integration
This page last changed on Jul 11, 2012 by xartigas.
@ -38,31 +38,31 @@ Copy this code into a text file named `basic-tutorial-15.c`..
``` theme: Default; brush: cpp; gutter: true
#include <clutter-gst/clutter-gst.h>
/* Setup the video texture once its size is known */
void size_change (ClutterActor *texture, gint width, gint height, gpointer user_data) {
ClutterActor *stage;
gfloat new_x, new_y, new_width, new_height;
gfloat stage_width, stage_height;
ClutterAnimation *animation = NULL;
stage = clutter_actor_get_stage (texture);
if (stage == NULL)
return;
clutter_actor_get_size (stage, &stage_width, &stage_height);
/* Center video on window and calculate new size preserving aspect ratio */
new_height = (height * stage_width) / width;
if (new_height <= stage_height) {
new_width = stage_width;
new_x = 0;
new_y = (stage_height - new_height) / 2;
} else {
new_width = (width * stage_height) / height;
new_height = stage_height;
new_x = (stage_width - new_width) / 2;
new_y = 0;
}
@ -73,31 +73,31 @@ void size_change (ClutterActor *texture, gint width, gint height, gpointer user_
animation = clutter_actor_animate (texture, CLUTTER_LINEAR, 10000, "rotation-angle-y", 360.0, NULL);
clutter_animation_set_loop (animation, TRUE);
}
int main(int argc, char *argv[]) {
GstElement *pipeline, *sink;
ClutterTimeline *timeline;
ClutterActor *stage, *texture;
/* clutter-gst takes care of initializing Clutter and GStreamer */
if (clutter_gst_init (&argc, &argv) != CLUTTER_INIT_SUCCESS) {
g_error ("Failed to initialize clutter\n");
return -1;
}
stage = clutter_stage_get_default ();
/* Make a timeline */
timeline = clutter_timeline_new (1000);
g_object_set(timeline, "loop", TRUE, NULL);
/* Create new texture and disable slicing so the video is properly mapped onto it */
texture = CLUTTER_ACTOR (g_object_new (CLUTTER_TYPE_TEXTURE, "disable-slicing", TRUE, NULL));
g_signal_connect (texture, "size-change", G_CALLBACK (size_change), NULL);
/* Build the GStreamer pipeline */
pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Instantiate the Clutter sink */
sink = gst_element_factory_make ("autocluttersink", NULL);
if (sink == NULL) {
@ -108,25 +108,25 @@ int main(int argc, char *argv[]) {
g_printerr ("Unable to find a Clutter sink.\n");
return -1;
}
/* Link GStreamer with Clutter by passing the Clutter texture to the Clutter sink*/
g_object_set (sink, "texture", texture, NULL);
/* Add the Clutter sink to the pipeline */
g_object_set (pipeline, "video-sink", sink, NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* start the timeline */
clutter_timeline_start (timeline);
/* Add texture to the stage, and show it */
clutter_group_add (CLUTTER_GROUP (stage), texture);
clutter_actor_show_all (stage);
clutter_main();
/* Free resources */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
@ -252,4 +252,3 @@ This tutorial has shown:
It has been a pleasure having you here, and see you soon\!
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Basic tutorial 16: Platform-specific elements
# Basic tutorial 16: Platform-specific elements
This page last changed on May 30, 2013 by xartigas.
@ -209,4 +209,3 @@ instancing them manually.
It has been a pleasure having you here, and see you soon\!
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# Basic tutorial 2: GStreamer concepts
# Basic tutorial 2: GStreamer concepts
## Goal
@ -24,28 +24,28 @@ in the SDK installation).
```
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *sink;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("videotestsrc", "source");
sink = gst_element_factory_make ("autovideosink", "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
if (gst_element_link (source, sink) != TRUE) {
@ -53,10 +53,10 @@ int main(int argc, char *argv[]) {
gst_object_unref (pipeline);
return -1;
}
/* Modify the source's properties */
g_object_set (source, "pattern", 0, NULL);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
@ -64,16 +64,16 @@ int main(int argc, char *argv[]) {
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
@ -92,7 +92,7 @@ int main(int argc, char *argv[]) {
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
@ -101,7 +101,6 @@ int main(int argc, char *argv[]) {
}
```
> ![Information](images/icons/emoticons/information.png)
> Need help?
>
@ -253,12 +252,12 @@ pipelines](Basic+tutorial+3+Dynamic+pipelines.markdown).
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
@ -294,7 +293,7 @@ In this case, once we know the message contains an error (by using the
`GST_MESSAGE_TYPE()` macro), we can use
`gst_message_parse_error()` which returns a GLib `GError` error
structure and a string useful for debugging. Examine the code to see how
these are used and freed afterward.
these are used and freed afterward.
### The GStreamer bus
@ -349,4 +348,4 @@ concepts. The second one comes next.
Remember that attached to this page you should find the complete source
code of the tutorial and any accessory files needed to build it.
It has been a pleasure having you here, and see you soon!
It has been a pleasure having you here, and see you soon!

View file

@ -1,4 +1,4 @@
# Basic tutorial 3: Dynamic pipelines
# Basic tutorial 3: Dynamic pipelines
## Goal
@ -87,7 +87,7 @@ in the SDK installation).
```
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
@ -95,33 +95,33 @@ typedef struct _CustomData {
GstElement *convert;
GstElement *sink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.source = gst_element_factory_make ("uridecodebin", "source");
data.convert = gst_element_factory_make ("audioconvert", "convert");
data.sink = gst_element_factory_make ("autoaudiosink", "sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.source || !data.convert || !data.sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.sink, NULL);
@ -130,13 +130,13 @@ int main(int argc, char *argv[]) {
gst_object_unref (data.pipeline);
return -1;
}
/* Set the URI to play */
g_object_set (data.source, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
@ -144,18 +144,18 @@ int main(int argc, char *argv[]) {
gst_object_unref (data.pipeline);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
@ -186,14 +186,14 @@ int main(int argc, char *argv[]) {
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
return 0;
}
/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
@ -201,15 +201,15 @@ static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *dat
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
/* If our converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad)) {
g_print (" We are already linked. Ignoring.\n");
goto exit;
}
/* Check the new pad's type */
new_pad_caps = gst_pad_query_caps (new_pad, NULL);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
@ -218,7 +218,7 @@ static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *dat
g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
goto exit;
}
/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad);
if (GST_PAD_LINK_FAILED (ret)) {
@ -226,18 +226,17 @@ static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *dat
} else {
g_print (" Link succeeded (type '%s').\n", new_pad_type);
}
exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);
/* Unreference the sink pad */
gst_object_unref (sink_pad);
}
```
> ![Information](images/icons/emoticons/information.png)
> Need help?
>
@ -533,5 +532,5 @@ to the [Playback tutorials](Playback+tutorials.markdown), and gain more
insight about the `playbin2` element.
Remember that attached to this page you should find the complete source
code of the tutorial and any accessory files needed to build it.
It has been a pleasure having you here, and see you soon!
code of the tutorial and any accessory files needed to build it.
It has been a pleasure having you here, and see you soon!

View file

@ -1,4 +1,4 @@
# Basic tutorial 4: Time management
# Basic tutorial 4: Time management
## Goal
@ -37,7 +37,7 @@ in the SDK installation).
``` theme: Default; brush: cpp; gutter: true
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstElement *playbin; /* Our one and only element */
@ -47,36 +47,36 @@ typedef struct _CustomData {
gboolean seek_done; /* Have we performed the seek already? */
gint64 duration; /* How long does this media last, in nanoseconds */
} CustomData;
/* Forward definition of the message processing function */
static void handle_message (CustomData *data, GstMessage *msg);
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
data.playing = FALSE;
data.terminate = FALSE;
data.seek_enabled = FALSE;
data.seek_done = FALSE;
data.duration = GST_CLOCK_TIME_NONE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.playbin = gst_element_factory_make ("playbin", "playbin");
if (!data.playbin) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
g_object_set (data.playbin, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Start playing */
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
@ -84,13 +84,13 @@ int main(int argc, char *argv[]) {
gst_object_unref (data.playbin);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (data.playbin);
do {
msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION);
/* Parse message */
if (msg != NULL) {
handle_message (&data, msg);
@ -98,23 +98,23 @@ int main(int argc, char *argv[]) {
/* We got no message, this means the timeout expired */
if (data.playing) {
gint64 current = -1;
/* Query the current position of the stream */
if (!gst_element_query_position (data.playbin, GST_TIME_FORMAT, &current)) {
g_printerr ("Could not query current position.\n");
}
/* If we didn't know it yet, query the stream duration */
if (!GST_CLOCK_TIME_IS_VALID (data.duration)) {
if (!gst_element_query_duration (data.playbin, GST_TIME_FORMAT, &data.duration)) {
g_printerr ("Could not query current duration.\n");
}
}
/* Print current position and total duration */
g_print ("Position %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
GST_TIME_ARGS (current), GST_TIME_ARGS (data.duration));
/* If seeking is enabled, we have not done it yet, and the time is right, seek */
if (data.seek_enabled && !data.seek_done && current > 10 * GST_SECOND) {
g_print ("\nReached 10s, performing seek...\n");
@ -125,18 +125,18 @@ int main(int argc, char *argv[]) {
}
}
} while (!data.terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (data.playbin, GST_STATE_NULL);
gst_object_unref (data.playbin);
return 0;
}
static void handle_message (CustomData *data, GstMessage *msg) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
@ -160,10 +160,10 @@ static void handle_message (CustomData *data, GstMessage *msg) {
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
/* Remember whether we are in the PLAYING state or not */
data->playing = (new_state == GST_STATE_PLAYING);
if (data->playing) {
/* We just moved to PLAYING. Check if seeking is possible */
GstQuery *query;
@ -219,7 +219,7 @@ typedef struct _CustomData {
gboolean seek_done; /* Have we performed the seek already? */
gint64 duration; /* How long does this media last, in nanoseconds */
} CustomData;
/* Forward definition of the message processing function */
static void handle_message (CustomData *data, GstMessage *msg);
```
@ -378,7 +378,7 @@ case GST_MESSAGE_STATE_CHANGED: {
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->pipeline)) {
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
/* Remember whether we are in the PLAYING state or not */
data->playing = (new_state == GST_STATE_PLAYING);
```
@ -414,7 +414,6 @@ if (data->playing) {
}
```
`gst_query_new_seeking()` creates a new query object of the "seeking"
type, with `GST_FORMAT_TIME` format. This indicates that we are
interested in seeking by specifying the new time to which we want to
@ -455,4 +454,3 @@ Remember that attached to this page you should find the complete source
code of the tutorial and any accessory files needed to build it.
It has been a pleasure having you here, and see you soon!

View file

@ -1,4 +1,4 @@
# Basic tutorial 5: GUI toolkit integration
# Basic tutorial 5: GUI toolkit integration
## Goal
@ -43,7 +43,6 @@ rendering.
>
> A GObject *interface* (which GStreamer uses) is a set of functions that an element can implement. If it does, then it is said to support that particular interface. For example, video sinks usually create their own windows to display video, but, if they are also capable of rendering to an external window, they can choose to implement the `GstVideoOverlay` interface and provide functions to specify this external window. From the application developer point of view, if a certain interface is supported, you can use it and forget about which kind of element is implementing it. Moreover, if you are using `playbin`, it will automatically expose some of the interfaces supported by its internal elements: You can use your interface functions directly on `playbin` without knowing who is implementing them!
Another issue is that GUI toolkits usually only allow manipulation of
the graphical “widgets” through the main (or application) thread,
whereas GStreamer usually spawns multiple threads to take care of
@ -72,11 +71,11 @@ in the SDK installation).
```
#include <string.h>
#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gdk/gdk.h>
#if defined (GDK_WINDOWING_X11)
#include <gdk/gdkx.h>
@ -85,29 +84,29 @@ in the SDK installation).
#elif defined (GDK_WINDOWING_QUARTZ)
#include <gdk/gdkquartz.h>
#endif
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstElement *playbin; /* Our one and only pipeline */
GtkWidget *slider; /* Slider widget to keep track of current position */
GtkWidget *streams_list; /* Text widget to display info about the streams */
gulong slider_update_signal_id; /* Signal ID for the slider update signal */
GstState state; /* Current state of the pipeline */
gint64 duration; /* Duration of the clip, in nanoseconds */
} CustomData;
/* This function is called when the GUI toolkit creates the physical window that will hold the video.
* At this point we can retrieve its handler (which has a different meaning depending on the windowing system)
* and pass it to GStreamer through the GstVideoOverlay interface. */
static void realize_cb (GtkWidget *widget, CustomData *data) {
GdkWindow *window = gtk_widget_get_window (widget);
guintptr window_handle;
if (!gdk_window_ensure_native (window))
g_error ("Couldn't create native window needed for GstVideoOverlay!");
/* Retrieve window handler from GDK */
#if defined (GDK_WINDOWING_WIN32)
window_handle = (guintptr)GDK_WINDOW_HWND (window);
@ -119,35 +118,35 @@ static void realize_cb (GtkWidget *widget, CustomData *data) {
/* Pass it to playbin, which implements GstVideoOverlay and will forward it to the video sink */
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->playbin), window_handle);
}
/* This function is called when the PLAY button is clicked */
static void play_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_PLAYING);
}
/* This function is called when the PAUSE button is clicked */
static void pause_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_PAUSED);
}
/* This function is called when the STOP button is clicked */
static void stop_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when the main window is closed */
static void delete_event_cb (GtkWidget *widget, GdkEvent *event, CustomData *data) {
stop_cb (NULL, data);
gtk_main_quit ();
}
/* This function is called everytime the video window needs to be redrawn (due to damage/exposure,
* rescaling, etc). GStreamer takes care of this in the PAUSED and PLAYING states, otherwise,
* we simply draw a black rectangle to avoid garbage showing up. */
static gboolean draw_cb (GtkWidget *widget, cairo_t *cr, CustomData *data) {
if (data->state < GST_STATE_PAUSED) {
GtkAllocation allocation;
/* Cairo is a 2D graphics library which we use here to clean the video window.
* It is used by GStreamer for other reasons, so it will always be available to us. */
gtk_widget_get_allocation (widget, &allocation);
@ -156,10 +155,10 @@ static gboolean draw_cb (GtkWidget *widget, cairo_t *cr, CustomData *data) {
cairo_fill (cr);
cairo_destroy (cr);
}
return FALSE;
}
/* This function is called when the slider changes its position. We perform a seek to the
* new position here. */
static void slider_cb (GtkRange *range, CustomData *data) {
@ -167,7 +166,7 @@ static void slider_cb (GtkRange *range, CustomData *data) {
gst_element_seek_simple (data->playbin, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,
(gint64)(value * GST_SECOND));
}
/* This creates all the GTK+ widgets that compose our application, and registers the callbacks */
static void create_ui (CustomData *data) {
GtkWidget *main_window; /* The uppermost window, containing all other windows */
@ -176,58 +175,58 @@ static void create_ui (CustomData *data) {
GtkWidget *main_hbox; /* HBox to hold the video_window and the stream info text widget */
GtkWidget *controls; /* HBox to hold the buttons and the slider */
GtkWidget *play_button, *pause_button, *stop_button; /* Buttons */
main_window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
g_signal_connect (G_OBJECT (main_window), "delete-event", G_CALLBACK (delete_event_cb), data);
video_window = gtk_drawing_area_new ();
gtk_widget_set_double_buffered (video_window, FALSE);
g_signal_connect (video_window, "realize", G_CALLBACK (realize_cb), data);
g_signal_connect (video_window, "draw", G_CALLBACK (draw_cb), data);
play_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PLAY);
g_signal_connect (G_OBJECT (play_button), "clicked", G_CALLBACK (play_cb), data);
pause_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PAUSE);
g_signal_connect (G_OBJECT (pause_button), "clicked", G_CALLBACK (pause_cb), data);
stop_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_STOP);
g_signal_connect (G_OBJECT (stop_button), "clicked", G_CALLBACK (stop_cb), data);
data->slider = gtk_hscale_new_with_range (0, 100, 1);
gtk_scale_set_draw_value (GTK_SCALE (data->slider), 0);
data->slider_update_signal_id = g_signal_connect (G_OBJECT (data->slider), "value-changed", G_CALLBACK (slider_cb), data);
data->streams_list = gtk_text_view_new ();
gtk_text_view_set_editable (GTK_TEXT_VIEW (data->streams_list), FALSE);
controls = gtk_box_new (GTK_ORIENTATION_HORIZONTAL,, 0);
gtk_box_pack_start (GTK_BOX (controls), play_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), pause_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), stop_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), data->slider, TRUE, TRUE, 2);
main_hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL,, 0);
gtk_box_pack_start (GTK_BOX (main_hbox), video_window, TRUE, TRUE, 0);
gtk_box_pack_start (GTK_BOX (main_hbox), data->streams_list, FALSE, FALSE, 2);
main_box = gtk_box_new (GTK_ORIENTATION_VERTICAL,, 0);
gtk_box_pack_start (GTK_BOX (main_box), main_hbox, TRUE, TRUE, 0);
gtk_box_pack_start (GTK_BOX (main_box), controls, FALSE, FALSE, 0);
gtk_container_add (GTK_CONTAINER (main_window), main_box);
gtk_window_set_default_size (GTK_WINDOW (main_window), 640, 480);
gtk_widget_show_all (main_window);
}
/* This function is called periodically to refresh the GUI */
static gboolean refresh_ui (CustomData *data) {
gint64 current = -1;
/* We do not want to update anything unless we are in the PAUSED or PLAYING states */
if (data->state < GST_STATE_PAUSED)
return TRUE;
/* If we didn't know it yet, query the stream duration */
if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
if (!gst_element_query_duration (data->playbin, GST_FORMAT_TIME, &data->duration)) {
@ -237,7 +236,7 @@ static gboolean refresh_ui (CustomData *data) {
gtk_range_set_range (GTK_RANGE (data->slider), 0, (gdouble)data->duration / GST_SECOND);
}
}
if (gst_element_query_position (data->playbin, GST_FORMAT_TIME, &current)) {
/* Block the "value-changed" signal, so the slider_cb function is not called
* (which would trigger a seek the user has not requested) */
@ -249,7 +248,7 @@ static gboolean refresh_ui (CustomData *data) {
}
return TRUE;
}
/* This function is called when new metadata is discovered in the stream */
static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
/* We are possibly in a GStreamer working thread, so we notify the main
@ -258,30 +257,30 @@ static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
gst_message_new_application (GST_OBJECT (playbin),
gst_structure_new ("tags-changed", NULL)));
}
/* This function is called when an error message is posted on the bus */
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
/* Print error details on the screen */
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
/* Set the pipeline to READY (which stops playback) */
gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when an End-Of-Stream message is posted on the bus.
* We just set the pipeline to READY (which stops playback) */
static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
g_print ("End-Of-Stream reached.\n");
gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when the pipeline changes states. We use it to
* keep track of the current state. */
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
@ -296,7 +295,7 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
}
}
}
/* Extract metadata from all the streams and write it to the text widget in the GUI */
static void analyze_streams (CustomData *data) {
gint i;
@ -305,16 +304,16 @@ static void analyze_streams (CustomData *data) {
guint rate;
gint n_video, n_audio, n_text;
GtkTextBuffer *text;
/* Clean current contents of the widget */
text = gtk_text_view_get_buffer (GTK_TEXT_VIEW (data->streams_list));
gtk_text_buffer_set_text (text, "", -1);
/* Read some properties */
g_object_get (data->playbin, "n-video", &n_video, NULL);
g_object_get (data->playbin, "n-audio", &n_audio, NULL);
g_object_get (data->playbin, "n-text", &n_text, NULL);
for (i = 0; i < n_video; i++) {
tags = NULL;
/* Retrieve the stream's video tags */
@ -331,7 +330,7 @@ static void analyze_streams (CustomData *data) {
gst_tag_list_free (tags);
}
}
for (i = 0; i < n_audio; i++) {
tags = NULL;
/* Retrieve the stream's audio tags */
@ -360,7 +359,7 @@ static void analyze_streams (CustomData *data) {
gst_tag_list_free (tags);
}
}
for (i = 0; i < n_text; i++) {
tags = NULL;
/* Retrieve the stream's subtitle tags */
@ -379,7 +378,7 @@ static void analyze_streams (CustomData *data) {
}
}
}
/* This function is called when an "application" message is posted on the bus.
* Here we retrieve the message posted by the tags_cb callback */
static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
@ -389,41 +388,41 @@ static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
analyze_streams (data);
}
}
int main(int argc, char *argv[]) {
CustomData data;
GstStateChangeReturn ret;
GstBus *bus;
/* Initialize GTK */
gtk_init (&argc, &argv);
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Initialize our data structure */
memset (&data, 0, sizeof (data));
data.duration = GST_CLOCK_TIME_NONE;
/* Create the elements */
data.playbin = gst_element_factory_make ("playbin", "playbin");
if (!data.playbin) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
g_object_set (data.playbin, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Connect to interesting signals in playbin */
g_signal_connect (G_OBJECT (data.playbin), "video-tags-changed", (GCallback) tags_cb, &data);
g_signal_connect (G_OBJECT (data.playbin), "audio-tags-changed", (GCallback) tags_cb, &data);
g_signal_connect (G_OBJECT (data.playbin), "text-tags-changed", (GCallback) tags_cb, &data);
/* Create the GUI */
create_ui (&data);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (data.playbin);
gst_bus_add_signal_watch (bus);
@ -432,7 +431,7 @@ int main(int argc, char *argv[]) {
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, &data);
g_signal_connect (G_OBJECT (bus), "message::application", (GCallback)application_cb, &data);
gst_object_unref (bus);
/* Start playing */
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
@ -440,13 +439,13 @@ int main(int argc, char *argv[]) {
gst_object_unref (data.playbin);
return -1;
}
/* Register a function that GLib will call every second */
g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);
/* Start the GTK main loop. We will not regain control until gtk_main_quit is called. */
gtk_main ();
/* Free resources */
gst_element_set_state (data.playbin, GST_STATE_NULL);
gst_object_unref (data.playbin);
@ -504,25 +503,25 @@ int main(int argc, char *argv[]) {
CustomData data;
GstStateChangeReturn ret;
GstBus *bus;
/* Initialize GTK */
gtk_init (&argc, &argv);
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Initialize our data structure */
memset (&data, 0, sizeof (data));
data.duration = GST_CLOCK_TIME_NONE;
/* Create the elements */
data.playbin = gst_element_factory_make ("playbin", "playbin");
if (!data.playbin) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
g_object_set (data.playbin, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
```
@ -608,10 +607,10 @@ documentation of the signal.
static void realize_cb (GtkWidget *widget, CustomData *data) {
GdkWindow *window = gtk_widget_get_window (widget);
guintptr window_handle;
if (!gdk_window_ensure_native (window))
g_error ("Couldn't create native window needed for GstVideoOverlay!");
/* Retrieve window handler from GDK */
#if defined (GDK_WINDOWING_WIN32)
window_handle = (guintptr)GDK_WINDOW_HWND (window);
@ -642,12 +641,12 @@ this process a lot!
static void play_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_PLAYING);
}
/* This function is called when the PAUSE button is clicked */
static void pause_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_PAUSED);
}
/* This function is called when the STOP button is clicked */
static void stop_cb (GtkButton *button, CustomData *data) {
gst_element_set_state (data->playbin, GST_STATE_READY);
@ -684,7 +683,7 @@ static gboolean expose_cb (GtkWidget *widget, GdkEventExpose *event, CustomData
GtkAllocation allocation;
GdkWindow *window = gtk_widget_get_window (widget);
cairo_t *cr;
/* Cairo is a 2D graphics library which we use here to clean the video window.
* It is used by GStreamer for other reasons, so it will always be available to us. */
gtk_widget_get_allocation (widget, &allocation);
@ -694,7 +693,7 @@ static gboolean expose_cb (GtkWidget *widget, GdkEventExpose *event, CustomData
cairo_fill (cr);
cairo_destroy (cr);
}
return FALSE;
}
```
@ -735,7 +734,7 @@ allow any seek to complete before a new one is queued.
/* This function is called periodically to refresh the GUI */
static gboolean refresh_ui (CustomData *data) {
gint64 current = -1;
/* We do not want to update anything unless we are in the PAUSED or PLAYING states */
if (data->state < GST_STATE_PAUSED)
return TRUE;
@ -886,5 +885,3 @@ The following basic tutorials keep focusing on other individual
GStreamer topics
It has been a pleasure having you here, and see you soon!

View file

@ -1,4 +1,4 @@
# Basic tutorial 6: Media formats and Pad Capabilities
# Basic tutorial 6: Media formats and Pad Capabilities
## Goal
@ -89,7 +89,7 @@ SRC template: 'src'
width: [ 1, 2147483647 ]
height: [ 1, 2147483647 ]
framerate: [ 0/1, 2147483647/1 ]
format: { I420, NV12, NV21, YV12, YUY2, Y42B, Y444, YUV9, YVU9, Y41B, Y800, Y8, GREY, Y16 , UYVY, YVYU, IYU1, v308, AYUV, A420 }
format: { I420, NV12, NV21, YV12, YUY2, Y42B, Y444, YUV9, YVU9, Y41B, Y800, Y8, GREY, Y16 , UYVY, YVYU, IYU1, v308, AYUV, A420 }
```
`video/x-raw` indicates that this source pad outputs raw video. It
@ -124,21 +124,21 @@ in the SDK installation).
```
#include <gst/gst.h>
/* Functions below print the Capabilities in a human-friendly format */
static gboolean print_field (GQuark field, const GValue * value, gpointer pfx) {
gchar *str = gst_value_serialize (value);
g_print ("%s %15s: %s\n", (gchar *) pfx, g_quark_to_string (field), str);
g_free (str);
return TRUE;
}
static void print_caps (const GstCaps * caps, const gchar * pfx) {
guint i;
g_return_if_fail (caps != NULL);
if (gst_caps_is_any (caps)) {
g_print ("%sANY\n", pfx);
return;
@ -147,38 +147,38 @@ static void print_caps (const GstCaps * caps, const gchar * pfx) {
g_print ("%sEMPTY\n", pfx);
return;
}
for (i = 0; i < gst_caps_get_size (caps); i++) {
GstStructure *structure = gst_caps_get_structure (caps, i);
g_print ("%s%s\n", pfx, gst_structure_get_name (structure));
gst_structure_foreach (structure, print_field, (gpointer) pfx);
}
}
/* Prints information about a Pad Template, including its Capabilities */
static void print_pad_templates_information (GstElementFactory * factory) {
const GList *pads;
GstStaticPadTemplate *padtemplate;
g_print ("Pad Templates for %s:\n", gst_element_factory_get_longname (factory));
if (!gst_element_factory_get_num_pad_templates (factory)) {
g_print (" none\n");
return;
}
pads = gst_element_factory_get_static_pad_templates (factory);
while (pads) {
padtemplate = pads->data
pads = g_list_next (pads);
if (padtemplate->direction == GST_PAD_SRC)
g_print (" SRC template: '%s'\n", padtemplate->name_template);
else if (padtemplate->direction == GST_PAD_SINK)
g_print (" SINK template: '%s'\n", padtemplate->name_template);
else
g_print (" UNKNOWN!!! template: '%s'\n", padtemplate->name_template);
if (padtemplate->presence == GST_PAD_ALWAYS)
g_print (" Availability: Always\n");
else if (padtemplate->presence == GST_PAD_SOMETIMES)
@ -187,7 +187,7 @@ static void print_pad_templates_information (GstElementFactory * factory) {
g_print (" Availability: On request\n");
} else
g_print (" Availability: UNKNOWN!!!\n");
if (padtemplate->static_caps.string) {
GstCaps *caps;
g_print (" Capabilities:\n");
@ -196,35 +196,35 @@ static void print_pad_templates_information (GstElementFactory * factory) {
gst_caps_unref (caps);
}
g_print ("\n");
}
}
/* Shows the CURRENT capabilities of the requested pad in the given element */
static void print_pad_capabilities (GstElement *element, gchar *pad_name) {
GstPad *pad = NULL;
GstCaps *caps = NULL;
/* Retrieve pad */
pad = gst_element_get_static_pad (element, pad_name);
if (!pad) {
g_printerr ("Could not retrieve pad '%s'\n", pad_name);
return;
}
/* Retrieve negotiated caps (or acceptable caps if negotiation is not finished yet) */
caps = gst_pad_get_current_caps (pad);
if (!caps)
caps = gst_pad_query_caps (pad, NULL);
/* Print and free */
g_print ("Caps for the %s pad:\n", pad_name);
print_caps (caps, " ");
gst_caps_unref (caps);
gst_object_unref (pad);
}
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *sink;
GstElementFactory *source_factory, *sink_factory;
@ -232,10 +232,10 @@ int main(int argc, char *argv[]) {
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the element factories */
source_factory = gst_element_factory_find ("audiotestsrc");
sink_factory = gst_element_factory_find ("autoaudiosink");
@ -243,23 +243,23 @@ int main(int argc, char *argv[]) {
g_printerr ("Not all element factories could be created.\n");
return -1;
}
/* Print information about the pad templates of these factories */
print_pad_templates_information (source_factory);
print_pad_templates_information (sink_factory);
/* Ask the factories to instantiate actual elements */
source = gst_element_factory_create (source_factory, "source");
sink = gst_element_factory_create (sink_factory, "sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
if (gst_element_link (source, sink) != TRUE) {
@ -267,28 +267,28 @@ int main(int argc, char *argv[]) {
gst_object_unref (pipeline);
return -1;
}
/* Print initial negotiated caps (in NULL state) */
g_print ("In NULL state:\n");
print_pad_capabilities (sink, "sink");
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state (check the bus for error messages).\n");
}
/* Wait until error, EOS or State Change */
bus = gst_element_get_bus (pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS |
GST_MESSAGE_STATE_CHANGED);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
@ -321,7 +321,7 @@ int main(int argc, char *argv[]) {
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
@ -332,7 +332,6 @@ int main(int argc, char *argv[]) {
}
```
> ![Information](images/icons/emoticons/information.png)
> Need help?
>
@ -359,19 +358,19 @@ Caps.
static void print_pad_capabilities (GstElement *element, gchar *pad_name) {
GstPad *pad = NULL;
GstCaps *caps = NULL;
/* Retrieve pad */
pad = gst_element_get_static_pad (element, pad_name);
if (!pad) {
g_printerr ("Could not retrieve pad '%s'\n", pad_name);
return;
}
/* Retrieve negotiated caps (or acceptable caps if negotiation is not finished yet) */
caps = gst_pad_get_current_caps (pad);
if (!caps)
caps = gst_pad_query_caps (pad, NULL);
/* Print and free */
g_print ("Caps for the %s pad:\n", pad_name);
print_caps (caps, " ");
@ -403,11 +402,11 @@ if (!source_factory || !sink_factory) {
g_printerr ("Not all element factories could be created.\n");
return -1;
}
/* Print information about the pad templates of these factories */
print_pad_templates_information (source_factory);
print_pad_templates_information (sink_factory);
/* Ask the factories to instantiate actual elements */
source = gst_element_factory_create (source_factory, "source");
sink = gst_element_factory_create (sink_factory, "sink");
@ -473,6 +472,5 @@ Next tutorial shows how data can be manually injected into and extracted
from the GStreamer pipeline.
Remember that attached to this page you should find the complete source
code of the tutorial and any accessory files needed to build it.
code of the tutorial and any accessory files needed to build it.
It has been a pleasure having you here, and see you soon!

View file

@ -1,4 +1,4 @@
# Basic tutorial 7: Multithreading and Pad Availability
# Basic tutorial 7: Multithreading and Pad Availability
## Goal
@ -86,7 +86,7 @@ in the SDK installation).
``` theme: Default; brush: cpp; gutter: true
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *audio_source, *tee, *audio_queue, *audio_convert, *audio_resample, *audio_sink;
GstElement *video_queue, *visual, *video_convert, *video_sink;
@ -95,10 +95,10 @@ int main(int argc, char *argv[]) {
GstPadTemplate *tee_src_pad_template;
GstPad *tee_audio_pad, *tee_video_pad;
GstPad *queue_audio_pad, *queue_video_pad;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
audio_source = gst_element_factory_make ("audiotestsrc", "audio_source");
tee = gst_element_factory_make ("tee", "tee");
@ -110,20 +110,20 @@ int main(int argc, char *argv[]) {
visual = gst_element_factory_make ("wavescope", "visual");
video_convert = gst_element_factory_make ("videoconvert", "csp");
video_sink = gst_element_factory_make ("autovideosink", "video_sink");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !audio_source || !tee || !audio_queue || !audio_convert || !audio_resample || !audio_sink ||
!video_queue || !visual || !video_convert || !video_sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Configure elements */
g_object_set (audio_source, "freq", 215.0f, NULL);
g_object_set (visual, "shader", 0, "style", 1, NULL);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_resample, audio_sink,
video_queue, visual, video_convert, video_sink, NULL);
@ -134,7 +134,7 @@ int main(int argc, char *argv[]) {
gst_object_unref (pipeline);
return -1;
}
/* Manually link the Tee, which has "Request" pads */
tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src_%d");
tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL);
@ -151,26 +151,26 @@ int main(int argc, char *argv[]) {
}
gst_object_unref (queue_audio_pad);
gst_object_unref (queue_video_pad);
/* Start playing the pipeline */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Release the request pads from the Tee, and unref them */
gst_element_release_request_pad (tee, tee_audio_pad);
gst_element_release_request_pad (tee, tee_video_pad);
gst_object_unref (tee_audio_pad);
gst_object_unref (tee_video_pad);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
@ -185,7 +185,7 @@ int main(int argc, char *argv[]) {
>
>If you need help to run this code, refer to the **Running the tutorials** section for your platform: [Linux](Installing+on+Linux.markdown#InstallingonLinux-Run), [Mac OS X](Installing+on+Mac+OS+X.markdown#InstallingonMacOSX-Run) or [Windows](Installing+on+Windows.markdown#InstallingonWindows-Run).
>
> This tutorial plays an audible tone through the audio card and opens a window with a waveform representation of the tone. The waveform should be a sinusoid, but due to the refreshing of the window might not appear so.
> This tutorial plays an audible tone through the audio card and opens a window with a waveform representation of the tone. The waveform should be a sinusoid, but due to the refreshing of the window might not appear so.
>
> Required libraries: `gstreamer-1.0`
@ -330,5 +330,3 @@ The next tutorial builds on top of this one to show how data can be
manually injected into and extracted from a running pipeline.
It has been a pleasure having you here, and see you soon!

View file

@ -1,4 +1,4 @@
# Basic tutorial 8: Short-cutting the pipeline
# Basic tutorial 8: Short-cutting the pipeline
## Goal
@ -98,24 +98,24 @@ in the SDK installation).
#include <gst/gst.h>
#include <gst/audio/audio.h>
#include <string.h>
#define CHUNK_SIZE 1024 /* Amount of bytes we are sending in each buffer */
#define SAMPLE_RATE 44100 /* Samples per second we are sending */
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline, *app_source, *tee, *audio_queue, *audio_convert1, *audio_resample, *audio_sink;
GstElement *video_queue, *audio_convert2, *visual, *video_convert, *video_sink;
GstElement *app_queue, *app_sink;
guint64 num_samples; /* Number of samples generated so far (for timestamp generation) */
gfloat a, b, c, d; /* For waveform generation */
guint sourceid; /* To control the GSource */
GMainLoop *main_loop; /* GLib's Main Loop */
} CustomData;
/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc.
* The ide handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
* and is removed when appsrc has enough data (enough-data signal).
@ -128,14 +128,14 @@ static gboolean push_data (CustomData *data) {
gint16 *raw;
gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
gfloat freq;
/* Create a new empty buffer */
buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);
/* Set its timestamp and duration */
GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE);
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, SAMPLE_RATE);
/* Generate some psychodelic waveforms */
gst_buffer_map (buffer, &map, GST_MAP_WRITE);
raw = (gint16 *)map.data;
@ -149,21 +149,21 @@ static gboolean push_data (CustomData *data) {
}
gst_buffer_unmap (buffer, &map);
data->num_samples += num_samples;
/* Push the buffer into the appsrc */
g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
/* Free the buffer now that we are done with it */
gst_buffer_unref (buffer);
if (ret != GST_FLOW_OK) {
/* We got some error, stop sending data */
return FALSE;
}
return TRUE;
}
/* This signal callback triggers when appsrc needs data. Here, we add an idle handler
* to the mainloop to start pushing data into the appsrc */
static void start_feed (GstElement *source, guint size, CustomData *data) {
@ -172,7 +172,7 @@ static void start_feed (GstElement *source, guint size, CustomData *data) {
data->sourceid = g_idle_add ((GSourceFunc) push_data, data);
}
}
/* This callback triggers when appsrc has enough data and we can stop sending.
* We remove the idle handler from the mainloop */
static void stop_feed (GstElement *source, CustomData *data) {
@ -182,11 +182,11 @@ static void stop_feed (GstElement *source, CustomData *data) {
data->sourceid = 0;
}
}
/* The appsink has received a buffer */
static void new_sample (GstElement *sink, CustomData *data) {
GstSample *sample;
/* Retrieve the buffer */
g_signal_emit_by_name (sink, "pull-sample", &sample);
if (sample) {
@ -195,22 +195,22 @@ static void new_sample (GstElement *sink, CustomData *data) {
gst_buffer_unref (sample);
}
}
/* This function is called when an error message is posted on the bus */
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
/* Print error details on the screen */
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
g_main_loop_quit (data->main_loop);
}
int main(int argc, char *argv[]) {
CustomData data;
GstPadTemplate *tee_src_pad_template;
@ -219,15 +219,15 @@ int main(int argc, char *argv[]) {
GstAudioInfo info;
GstCaps *audio_caps;
GstBus *bus;
/* Initialize cumstom data structure */
memset (&data, 0, sizeof (data));
data.b = 1; /* For waveform generation */
data.d = 1;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.app_source = gst_element_factory_make ("appsrc", "audio_source");
data.tee = gst_element_factory_make ("tee", "tee");
@ -242,35 +242,35 @@ int main(int argc, char *argv[]) {
data.video_sink = gst_element_factory_make ("autovideosink", "video_sink");
data.app_queue = gst_element_factory_make ("queue", "app_queue");
data.app_sink = gst_element_factory_make ("appsink", "app_sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("test-pipeline");
if (!data.pipeline || !data.app_source || !data.tee || !data.audio_queue || !data.audio_convert1 ||
!data.audio_resample || !data.audio_sink || !data.video_queue || !data.audio_convert2 || !data.visual ||
!data.video_convert || !data.video_sink || !data.app_queue || !data.app_sink) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Configure wavescope */
g_object_set (data.visual, "shader", 0, "style", 0, NULL);
/* Configure appsrc */
gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1, NULL);
audio_caps = gst_audio_info_to_caps (&info);
g_object_set (data.app_source, "caps", audio_caps, "format", GST_FORMAT_TIME, NULL);
g_signal_connect (data.app_source, "need-data", G_CALLBACK (start_feed), &data);
g_signal_connect (data.app_source, "enough-data", G_CALLBACK (stop_feed), &data);
/* Configure appsink */
g_object_set (data.app_sink, "emit-signals", TRUE, "caps", audio_caps, NULL);
g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data);
gst_caps_unref (audio_caps);
g_free (audio_caps_text);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (data.pipeline), data.app_source, data.tee, data.audio_queue, data.audio_convert1, data.audio_resample,
gst_bin_add_many (GST_BIN (data.pipeline), data.app_source, data.tee, data.audio_queue, data.audio_convert1, data.audio_resample,
data.audio_sink, data.video_queue, data.audio_convert2, data.visual, data.video_convert, data.video_sink, data.app_queue,
data.app_sink, NULL);
if (gst_element_link_many (data.app_source, data.tee, NULL) != TRUE ||
@ -281,7 +281,7 @@ int main(int argc, char *argv[]) {
gst_object_unref (data.pipeline);
return -1;
}
/* Manually link the Tee, which has "Request" pads */
tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (data.tee), "src_%d");
tee_audio_pad = gst_element_request_pad (data.tee, tee_src_pad_template, NULL, NULL);
@ -303,20 +303,20 @@ int main(int argc, char *argv[]) {
gst_object_unref (queue_audio_pad);
gst_object_unref (queue_video_pad);
gst_object_unref (queue_app_pad);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (data.pipeline);
gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
gst_object_unref (bus);
/* Start playing the pipeline */
gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
/* Create a GLib Main Loop and set it to run */
data.main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.main_loop);
/* Release the request pads from the Tee, and unref them */
gst_element_release_request_pad (data.tee, tee_audio_pad);
gst_element_release_request_pad (data.tee, tee_video_pad);
@ -324,7 +324,7 @@ int main(int argc, char *argv[]) {
gst_object_unref (tee_audio_pad);
gst_object_unref (tee_video_pad);
gst_object_unref (tee_app_pad);
/* Free resources */
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
@ -462,14 +462,14 @@ static gboolean push_data (CustomData *data) {
gint16 *raw;
gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
gfloat freq;
/* Create a new empty buffer */
buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);
/* Set its timestamp and duration */
GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE);
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, SAMPLE_RATE);
/* Generate some psychodelic waveforms */
raw = (gint16 *)GST_BUFFER_DATA (buffer);
```
@ -503,7 +503,7 @@ psychedelic wave).
``` first-line: 53; theme: Default; brush: cpp; gutter: true
/* Push the buffer into the appsrc */
g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
/* Free the buffer now that we are done with it */
gst_buffer_unref (buffer);
```
@ -518,7 +518,7 @@ usage](Playback+tutorial+1+Playbin2+usage.markdown)), and then
/* The appsink has received a buffer */
static void new_buffer (GstElement *sink, CustomData *data) {
GstBuffer *buffer;
/* Retrieve the buffer */
g_signal_emit_by_name (sink, "pull-buffer", &buffer);
if (buffer) {
@ -555,4 +555,4 @@ different way. [Playback tutorial 3: Short-cutting the
pipeline](Playback+tutorial+3+Short-cutting+the+pipeline.markdown) shows
how to do it.
It has been a pleasure having you here, and see you soon\!
It has been a pleasure having you here, and see you soon\!

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Basic tutorial 9: Media information gathering
# Basic tutorial 9: Media information gathering
This page last changed on May 30, 2012 by xartigas.
@ -85,40 +85,40 @@ in the SDK installation).
#include <string.h>
#include <gst/gst.h>
#include <gst/pbutils/pbutils.h>
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstDiscoverer *discoverer;
GMainLoop *loop;
} CustomData;
/* Print a tag in a human-readable format (name: value) */
static void print_tag_foreach (const GstTagList *tags, const gchar *tag, gpointer user_data) {
GValue val = { 0, };
gchar *str;
gint depth = GPOINTER_TO_INT (user_data);
gst_tag_list_copy_value (&val, tags, tag);
if (G_VALUE_HOLDS_STRING (&val))
str = g_value_dup_string (&val);
else
str = gst_value_serialize (&val);
g_print ("%*s%s: %s\n", 2 * depth, " ", gst_tag_get_nick (tag), str);
g_free (str);
g_value_unset (&val);
}
/* Print information regarding a stream */
static void print_stream_info (GstDiscovererStreamInfo *info, gint depth) {
gchar *desc = NULL;
GstCaps *caps;
const GstTagList *tags;
caps = gst_discoverer_stream_info_get_caps (info);
if (caps) {
if (gst_caps_is_fixed (caps))
desc = gst_pb_utils_get_codec_description (caps);
@ -126,37 +126,37 @@ static void print_stream_info (GstDiscovererStreamInfo *info, gint depth) {
desc = gst_caps_to_string (caps);
gst_caps_unref (caps);
}
g_print ("%*s%s: %s\n", 2 * depth, " ", gst_discoverer_stream_info_get_stream_type_nick (info), (desc ? desc : ""));
if (desc) {
g_free (desc);
desc = NULL;
}
tags = gst_discoverer_stream_info_get_tags (info);
if (tags) {
g_print ("%*sTags:\n", 2 * (depth + 1), " ");
gst_tag_list_foreach (tags, print_tag_foreach, GINT_TO_POINTER (depth + 2));
}
}
/* Print information regarding a stream and its substreams, if any */
static void print_topology (GstDiscovererStreamInfo *info, gint depth) {
GstDiscovererStreamInfo *next;
if (!info)
return;
print_stream_info (info, depth);
next = gst_discoverer_stream_info_get_next (info);
if (next) {
print_topology (next, depth + 1);
gst_discoverer_stream_info_unref (next);
} else if (GST_IS_DISCOVERER_CONTAINER_INFO (info)) {
GList *tmp, *streams;
streams = gst_discoverer_container_info_get_streams (GST_DISCOVERER_CONTAINER_INFO (info));
for (tmp = streams; tmp; tmp = tmp->next) {
GstDiscovererStreamInfo *tmpinf = (GstDiscovererStreamInfo *) tmp->data;
@ -165,7 +165,7 @@ static void print_topology (GstDiscovererStreamInfo *info, gint depth) {
gst_discoverer_stream_info_list_free (streams);
}
}
/* This function is called every time the discoverer has information regarding
* one of the URIs we provided.*/
static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info, GError *err, CustomData *data) {
@ -173,7 +173,7 @@ static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info
const gchar *uri;
const GstTagList *tags;
GstDiscovererStreamInfo *sinfo;
uri = gst_discoverer_info_get_uri (info);
result = gst_discoverer_info_get_result (info);
switch (result) {
@ -192,10 +192,10 @@ static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info
case GST_DISCOVERER_MISSING_PLUGINS:{
const GstStructure *s;
gchar *str;
s = gst_discoverer_info_get_misc (info);
str = gst_structure_to_string (s);
g_print ("Missing plugins: %s\n", str);
g_free (str);
break;
@ -204,65 +204,65 @@ static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info
g_print ("Discovered '%s'\n", uri);
break;
}
if (result != GST_DISCOVERER_OK) {
g_printerr ("This URI cannot be played\n");
return;
}
/* If we got no error, show the retrieved information */
g_print ("\nDuration: %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (gst_discoverer_info_get_duration (info)));
tags = gst_discoverer_info_get_tags (info);
if (tags) {
g_print ("Tags:\n");
gst_tag_list_foreach (tags, print_tag_foreach, GINT_TO_POINTER (1));
}
g_print ("Seekable: %s\n", (gst_discoverer_info_get_seekable (info) ? "yes" : "no"));
g_print ("\n");
sinfo = gst_discoverer_info_get_stream_info (info);
if (!sinfo)
return;
g_print ("Stream information:\n");
print_topology (sinfo, 1);
gst_discoverer_stream_info_unref (sinfo);
g_print ("\n");
}
/* This function is called when the discoverer has finished examining
* all the URIs we provided.*/
static void on_finished_cb (GstDiscoverer *discoverer, CustomData *data) {
g_print ("Finished discovering\n");
g_main_loop_quit (data->loop);
}
int main (int argc, char **argv) {
CustomData data;
GError *err = NULL;
gchar *uri = "http://docs.gstreamer.com/media/sintel_trailer-480p.webm";
/* if a URI was provided, use it instead of the default one */
if (argc > 1) {
uri = argv[1];
}
/* Initialize cumstom data structure */
memset (&data, 0, sizeof (data));
/* Initialize GStreamer */
gst_init (&argc, &argv);
g_print ("Discovering '%s'\n", uri);
/* Instantiate the Discoverer */
data.discoverer = gst_discoverer_new (5 * GST_SECOND, &err);
if (!data.discoverer) {
@ -270,32 +270,32 @@ int main (int argc, char **argv) {
g_clear_error (&err);
return -1;
}
/* Connect to the interesting signals */
g_signal_connect (data.discoverer, "discovered", G_CALLBACK (on_discovered_cb), &data);
g_signal_connect (data.discoverer, "finished", G_CALLBACK (on_finished_cb), &data);
/* Start the discoverer process (nothing to do yet) */
gst_discoverer_start (data.discoverer);
/* Add a request to process asynchronously the URI passed through the command line */
if (!gst_discoverer_discover_uri_async (data.discoverer, uri)) {
g_print ("Failed to start discovering URI '%s'\n", uri);
g_object_unref (data.discoverer);
return -1;
}
/* Create a GLib Main Loop and set it to run, so we can wait for the signals */
data.loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.loop);
/* Stop the discoverer process */
gst_discoverer_stop (data.discoverer);
/* Free resources */
g_object_unref (data.discoverer);
g_main_loop_unref (data.loop);
return 0;
}
```
@ -404,7 +404,7 @@ static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info
const gchar *uri;
const GstTagList *tags;
GstDiscovererStreamInfo *sinfo;
uri = gst_discoverer_info_get_uri (info);
result = gst_discoverer_info_get_result (info);
```
@ -434,10 +434,10 @@ switch (result) {
case GST_DISCOVERER_MISSING_PLUGINS:{
const GstStructure *s;
gchar *str;
s = gst_discoverer_info_get_misc (info);
str = gst_structure_to_string (s);
g_print ("Missing plugins: %s\n", str);
g_free (str);
break;
@ -447,7 +447,6 @@ switch (result) {
break;
}
if (result != GST_DISCOVERER_OK) {
g_printerr ("This URI cannot be played\n");
return;
@ -487,11 +486,11 @@ much self-explicative.
sinfo = gst_discoverer_info_get_stream_info (info);
if (!sinfo)
return;
g_print ("Stream information:\n");
print_topology (sinfo, 1);
gst_discoverer_stream_info_unref (sinfo);
```
@ -504,19 +503,19 @@ with `gst_discoverer_stream_info_unref()`.
/* Print information regarding a stream and its substreams, if any */
static void print_topology (GstDiscovererStreamInfo *info, gint depth) {
GstDiscovererStreamInfo *next;
if (!info)
return;
print_stream_info (info, depth);
next = gst_discoverer_stream_info_get_next (info);
if (next) {
print_topology (next, depth + 1);
gst_discoverer_stream_info_unref (next);
} else if (GST_IS_DISCOVERER_CONTAINER_INFO (info)) {
GList *tmp, *streams;
streams = gst_discoverer_container_info_get_streams (GST_DISCOVERER_CONTAINER_INFO (info));
for (tmp = streams; tmp; tmp = tmp->next) {
GstDiscovererStreamInfo *tmpinf = (GstDiscovererStreamInfo *) tmp->data;
@ -551,4 +550,3 @@ This tutorial has shown:
It has been a pleasure having you here, and see you soon\!
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,7 +1,6 @@
# Basic tutorials
# Basic tutorials
## Welcome to the GStreamer SDK Basic tutorials
These tutorials describe general topics required to understand the rest
of tutorials in the GStreamer SDK.

View file

@ -1,4 +1,4 @@
# Building from source using Cerbero
# Building from source using Cerbero
> ![Warning](images/icons/emoticons/warning.png)
> This section is intended for advanced users.</p></td>
@ -39,13 +39,11 @@ platforms, but it still needs a minimum base to bootstrap:
>
> (Note that inside the shell, / is mapped to c:\Mingw\msys\1.0\ )
> ![Information](images/icons/emoticons/information.png)
> **OS X users**
>
>To use cerbero on OS X you need to install the "Command Line Tools" from XCode. They are available from the "Preferences" dialog under "Downloads".
> ![Information](images/icons/emoticons/information.png)
> **iOS developers**
>

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Contact
# Contact
This page last changed on Dec 03, 2012 by xartigas.
@ -35,4 +35,3 @@ We want to hear from you!</p></td>
</table>
Document generated by Confluence on Oct 08, 2015 10:28

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Deploying your application
# Deploying your application
This page last changed on Jun 12, 2013 by xartigas.
@ -128,4 +128,3 @@ options.
 
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Frequently Asked Questions
# Frequently Asked Questions
This page last changed on Jun 12, 2013 by xartigas.
@ -110,22 +110,22 @@ In summary:
Some cool media apps using GStreamer:
- [Banshee](http://banshee.fm/)
- [Banshee](http://banshee.fm/)
- [Songbird](http://getsongbird.com/)
- [Snappy](http://live.gnome.org/snappy)  
- [Empathy](https://live.gnome.org/Empathy)
- [Totem](http://projects.gnome.org/totem/)
- [Transmaggedon](http://www.linuxrising.org/)
- [Flumotion](http://www.flumotion.net/)
- [Landell](http://landell.holoscopio.com/)
- [Longo match](http://longomatch.org/)
- [Rygel](https://live.gnome.org/Rygel)
- [Empathy](https://live.gnome.org/Empathy)
- [Totem](http://projects.gnome.org/totem/)
- [Transmaggedon](http://www.linuxrising.org/)
- [Flumotion](http://www.flumotion.net/)
- [Landell](http://landell.holoscopio.com/)
- [Longo match](http://longomatch.org/)
- [Rygel](https://live.gnome.org/Rygel)
- [Sound
juicer](http://www.burtonini.com/blog/computers/sound-juicer)
- [Buzztard](http://wiki.buzztard.org/index.php/Overview)
juicer](http://www.burtonini.com/blog/computers/sound-juicer)
- [Buzztard](http://wiki.buzztard.org/index.php/Overview)
- [Moovida](http://www.moovida.com/) (Based on Banshee)
- [Fluendo DVD
Player](http://www.fluendo.com/shop/product/fluendo-dvd-player/)
Player](http://www.fluendo.com/shop/product/fluendo-dvd-player/)
- and many [more](http://gstreamer.freedesktop.org/apps/)
# What is the target audience?
@ -154,4 +154,3 @@ The GStreamer SDK supports the iOS platform since [version 2013.6
(Congo)](2013.6%2BCongo.html).
Document generated by Confluence on Oct 08, 2015 10:28

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : GStreamer reference
# GStreamer reference
This page last changed on Jun 25, 2012 by xartigas.
@ -54,4 +54,3 @@ generated from the source code of GStreamer.
</table>
Document generated by Confluence on Oct 08, 2015 10:28

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Installing on Linux
# Installing on Linux
This page last changed on Jun 12, 2013 by slomo.
@ -282,4 +282,3 @@ the `gensdkshell` command of the Cerbero build system, if you built
the SDK yourself as explained above.
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Legal information
# Legal information
This page last changed on Jun 11, 2012 by xartigas.
@ -219,4 +219,3 @@ that the license of the conditions of the resulting program must allow
decompilation to debug modifications to the library.
Document generated by Confluence on Oct 08, 2015 10:28

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Mac OS X deployment
# Mac OS X deployment
This page last changed on Nov 28, 2012 by xartigas.
@ -275,7 +275,7 @@ dependent dynamic libraries
using [otool](https://developer.apple.com/library/mac/#documentation/darwin/reference/manpages/man1/otool.1.html):
``` theme: Default; brush: bash; gutter: false
$ otool -L /Library/Frameworks/GStreamer.framework/Commands/gst-launch-0.10
$ otool -L /Library/Frameworks/GStreamer.framework/Commands/gst-launch-0.10
/Library/Frameworks/GStreamer.framework/Commands/gst-launch-0.10:
/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation (compatibility version 150.0.0, current version 550.43.0)
/Library/Frameworks/GStreamer.framework/Versions/0.10/x86/lib/libgstreamer-0.10.0.dylib (compatibility version 31.0.0, current version 31.0.0)
@ -294,7 +294,7 @@ install name of a library can be retrieved with
[otool](https://developer.apple.com/library/mac/#documentation/darwin/reference/manpages/man1/otool.1.html) too:
``` theme: Default; brush: bash; gutter: false
$ otool -D /Library/Frameworks/GStreamer.framework/Libraries/libgstreamer-0.10.dylib
$ otool -D /Library/Frameworks/GStreamer.framework/Libraries/libgstreamer-0.10.dylib
/Library/Frameworks/GStreamer.framework/Libraries/libgstreamer-0.10.dylib:
/Library/Frameworks/GStreamer.framework/Versions/0.10/x86/lib/libgstreamer-0.10.0.dylib
```
@ -376,9 +376,8 @@ You can use the following functions:
## Attachments:
![](images/icons/bullet_blue.gif)
[PackageMaker1.png](attachments/2097292/2424841.png) (image/png)
[PackageMaker1.png](attachments/2097292/2424841.png) (image/png)
![](images/icons/bullet_blue.gif)
[PackageMaker2.png](attachments/2097292/2424842.png) (image/png)
[PackageMaker2.png](attachments/2097292/2424842.png) (image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Multiplatform deployment using Cerbero
# Multiplatform deployment using Cerbero
This page last changed on Nov 21, 2012 by slomo.
@ -223,11 +223,9 @@ class Recipe(recipe.Recipe):
use_system_libs = True
remotes = {'upstream': 'git://git.gnome.org/snappy'}
files_bins = ['snappy']
files_data = ['share/snappy']
def prepare(self):
if self.config.target_platform == Platform.LINUX:
self.configure_options += ' --enable-dbus' 
@ -433,7 +431,6 @@ class Package(package.Package):
uuid = '6cd161c2-4535-411f-8287-e8f6a892f853'
deps = ['gstreamer-core']
files = ['flac:libs',
'jasper:libs', 'libkate:libs',
'libogg:libs', 'schroedinger:libs', 'speex:libs',
@ -486,4 +483,3 @@ the dependencies and your software). The resulting files will be in the
current working directory.
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Playback tutorial 1: Playbin2 usage
# Playback tutorial 1: Playbin2 usage
This page last changed on Jun 26, 2012 by xartigas.
@ -66,67 +66,67 @@ it in the SDK installation).
``` theme: Default; brush: cpp; gutter: true
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstElement *playbin2; /* Our one and only element */
gint n_video; /* Number of embedded video streams */
gint n_audio; /* Number of embedded audio streams */
gint n_text; /* Number of embedded subtitle streams */
gint current_video; /* Currently playing video stream */
gint current_audio; /* Currently playing audio stream */
gint current_text; /* Currently playing subtitle stream */
GMainLoop *main_loop; /* GLib's Main Loop */
} CustomData;
/* playbin2 flags */
typedef enum {
GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */
GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
} GstPlayFlags;
/* Forward definition for the message and keyboard processing functions */
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data);
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstStateChangeReturn ret;
gint flags;
GIOChannel *io_stdin;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.playbin2 = gst_element_factory_make ("playbin2", "playbin2");
if (!data.playbin2) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_cropped_multilingual.webm", NULL);
/* Set flags to show Audio and Video but ignore Subtitles */
g_object_get (data.playbin2, "flags", &flags, NULL);
flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
flags &= ~GST_PLAY_FLAG_TEXT;
g_object_set (data.playbin2, "flags", flags, NULL);
/* Set connection speed. This will affect some internal decisions of playbin2 */
g_object_set (data.playbin2, "connection-speed", 56, NULL);
/* Add a bus watch, so we get notified when a message arrives */
bus = gst_element_get_bus (data.playbin2);
gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data);
/* Add a keyboard watch so we get notified of keystrokes */
#ifdef _WIN32
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
@ -134,7 +134,7 @@ int main(int argc, char *argv[]) {
io_stdin = g_io_channel_unix_new (fileno (stdin));
#endif
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
/* Start playing */
ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
@ -142,11 +142,11 @@ int main(int argc, char *argv[]) {
gst_object_unref (data.playbin2);
return -1;
}
/* Create a GLib Main Loop and set it to run */
data.main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.main_loop);
/* Free resources */
g_main_loop_unref (data.main_loop);
g_io_channel_unref (io_stdin);
@ -155,22 +155,22 @@ int main(int argc, char *argv[]) {
gst_object_unref (data.playbin2);
return 0;
}
/* Extract some metadata from the streams and print it on the screen */
static void analyze_streams (CustomData *data) {
gint i;
GstTagList *tags;
gchar *str;
guint rate;
/* Read some properties */
g_object_get (data->playbin2, "n-video", &data->n_video, NULL);
g_object_get (data->playbin2, "n-audio", &data->n_audio, NULL);
g_object_get (data->playbin2, "n-text", &data->n_text, NULL);
g_print ("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
data->n_video, data->n_audio, data->n_text);
g_print ("\n");
for (i = 0; i < data->n_video; i++) {
tags = NULL;
@ -184,7 +184,7 @@ static void analyze_streams (CustomData *data) {
gst_tag_list_free (tags);
}
}
g_print ("\n");
for (i = 0; i < data->n_audio; i++) {
tags = NULL;
@ -206,7 +206,7 @@ static void analyze_streams (CustomData *data) {
gst_tag_list_free (tags);
}
}
g_print ("\n");
for (i = 0; i < data->n_text; i++) {
tags = NULL;
@ -221,22 +221,22 @@ static void analyze_streams (CustomData *data) {
gst_tag_list_free (tags);
}
}
g_object_get (data->playbin2, "current-video", &data->current_video, NULL);
g_object_get (data->playbin2, "current-audio", &data->current_audio, NULL);
g_object_get (data->playbin2, "current-text", &data->current_text, NULL);
g_print ("\n");
g_print ("Currently playing video stream %d, audio stream %d and text stream %d\n",
data->current_video, data->current_audio, data->current_text);
g_print ("Type any number and hit ENTER to select a different audio stream\n");
}
/* Process messages from GStreamer */
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
@ -261,15 +261,15 @@ static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data)
}
} break;
}
/* We want to keep receiving messages */
return TRUE;
}
/* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
int index = atoi (str);
if (index < 0 || index >= data->n_audio) {
@ -318,15 +318,15 @@ static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomDa
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstElement *playbin2; /* Our one and only element */
gint n_video; /* Number of embedded video streams */
gint n_audio; /* Number of embedded audio streams */
gint n_text; /* Number of embedded subtitle streams */
gint current_video; /* Currently playing video stream */
gint current_audio; /* Currently playing audio stream */
gint current_text; /* Currently playing subtitle stream */
GMainLoop *main_loop; /* GLib's Main Loop */
} CustomData;
```
@ -483,7 +483,7 @@ static void analyze_streams (CustomData *data) {
GstTagList *tags;
gchar *str;
guint rate;
/* Read some properties */
g_object_get (data->playbin2, "n-video", &data->n_video, NULL);
g_object_get (data->playbin2, "n-audio", &data->n_audio, NULL);
@ -554,7 +554,7 @@ checking the metadata to identify one particular stream becomes crucial.
/* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
int index = atoi (str);
if (index < 0 || index >= data->n_audio) {
@ -613,4 +613,3 @@ code of the tutorial and any accessory files needed to build it.
It has been a pleasure having you here, and see you soon\!
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Playback tutorial 2: Subtitle management
# Playback tutorial 2: Subtitle management
This page last changed on May 16, 2012 by xartigas.
@ -43,67 +43,67 @@ it in the SDK installation).
``` theme: Default; brush: cpp; gutter: true
#include <gst/gst.h>
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
GstElement *playbin2; /* Our one and only element */
gint n_video; /* Number of embedded video streams */
gint n_audio; /* Number of embedded audio streams */
gint n_text; /* Number of embedded subtitle streams */
gint current_video; /* Currently playing video stream */
gint current_audio; /* Currently playing audio stream */
gint current_text; /* Currently playing subtitle stream */
GMainLoop *main_loop; /* GLib's Main Loop */
} CustomData;
/* playbin2 flags */
typedef enum {
GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */
GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
} GstPlayFlags;
/* Forward definition for the message and keyboard processing functions */
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data);
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstStateChangeReturn ret;
gint flags;
GIOChannel *io_stdin;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
data.playbin2 = gst_element_factory_make ("playbin2", "playbin2");
if (!data.playbin2) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.ogv", NULL);
/* Set the subtitle URI to play and some font description */
g_object_set (data.playbin2, "suburi", "http://docs.gstreamer.com/media/sintel_trailer_gr.srt", NULL);
g_object_set (data.playbin2, "subtitle-font-desc", "Sans, 18", NULL);
/* Set flags to show Audio, Video and Subtitles */
g_object_get (data.playbin2, "flags", &flags, NULL);
flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_TEXT;
g_object_set (data.playbin2, "flags", flags, NULL);
/* Add a bus watch, so we get notified when a message arrives */
bus = gst_element_get_bus (data.playbin2);
gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data);
/* Add a keyboard watch so we get notified of keystrokes */
#ifdef _WIN32
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
@ -111,7 +111,7 @@ int main(int argc, char *argv[]) {
io_stdin = g_io_channel_unix_new (fileno (stdin));
#endif
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
/* Start playing */
ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
@ -119,11 +119,11 @@ int main(int argc, char *argv[]) {
gst_object_unref (data.playbin2);
return -1;
}
/* Create a GLib Main Loop and set it to run */
data.main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.main_loop);
/* Free resources */
g_main_loop_unref (data.main_loop);
g_io_channel_unref (io_stdin);
@ -132,22 +132,22 @@ int main(int argc, char *argv[]) {
gst_object_unref (data.playbin2);
return 0;
}
/* Extract some metadata from the streams and print it on the screen */
static void analyze_streams (CustomData *data) {
gint i;
GstTagList *tags;
gchar *str;
guint rate;
/* Read some properties */
g_object_get (data->playbin2, "n-video", &data->n_video, NULL);
g_object_get (data->playbin2, "n-audio", &data->n_audio, NULL);
g_object_get (data->playbin2, "n-text", &data->n_text, NULL);
g_print ("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
data->n_video, data->n_audio, data->n_text);
g_print ("\n");
for (i = 0; i < data->n_video; i++) {
tags = NULL;
@ -161,7 +161,7 @@ static void analyze_streams (CustomData *data) {
gst_tag_list_free (tags);
}
}
g_print ("\n");
for (i = 0; i < data->n_audio; i++) {
tags = NULL;
@ -183,7 +183,7 @@ static void analyze_streams (CustomData *data) {
gst_tag_list_free (tags);
}
}
g_print ("\n");
for (i = 0; i < data->n_text; i++) {
tags = NULL;
@ -200,22 +200,22 @@ static void analyze_streams (CustomData *data) {
g_print (" no tags found\n");
}
}
g_object_get (data->playbin2, "current-video", &data->current_video, NULL);
g_object_get (data->playbin2, "current-audio", &data->current_audio, NULL);
g_object_get (data->playbin2, "current-text", &data->current_text, NULL);
g_print ("\n");
g_print ("Currently playing video stream %d, audio stream %d and subtitle stream %d\n",
data->current_video, data->current_audio, data->current_text);
g_print ("Type any number and hit ENTER to select a different subtitle stream\n");
}
/* Process messages from GStreamer */
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
@ -240,15 +240,15 @@ static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data)
}
} break;
}
/* We want to keep receiving messages */
return TRUE;
}
/* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
int index = atoi (str);
if (index < 0 || index >= data->n_text) {
@ -384,7 +384,7 @@ they are embedded in the container or in a different file:
The next playback tutorial shows how to change the playback speed.
Remember that attached to this page you should find the complete source
code of the tutorial and any accessory files needed to build it.
code of the tutorial and any accessory files needed to build it.
It has been a pleasure having you here, and see you soon\!
<table>
@ -400,4 +400,3 @@ It has been a pleasure having you here, and see you soon\!
</table>
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Playback tutorial 3: Short-cutting the pipeline
# Playback tutorial 3: Short-cutting the pipeline
This page last changed on Jun 26, 2012 by xartigas.
@ -35,24 +35,24 @@ Copy this code into a text file named `playback-tutorial-3.c`.
``` theme: Default; brush: cpp; gutter: true
#include <gst/gst.h>
#include <string.h>
#define CHUNK_SIZE 1024 /* Amount of bytes we are sending in each buffer */
#define SAMPLE_RATE 44100 /* Samples per second we are sending */
#define AUDIO_CAPS "audio/x-raw-int,channels=1,rate=%d,signed=(boolean)true,width=16,depth=16,endianness=BYTE_ORDER"
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstElement *pipeline;
GstElement *app_source;
guint64 num_samples; /* Number of samples generated so far (for timestamp generation) */
gfloat a, b, c, d; /* For waveform generation */
guint sourceid; /* To control the GSource */
GMainLoop *main_loop; /* GLib's Main Loop */
} CustomData;
/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc.
* The ide handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
* and is removed when appsrc has enough data (enough-data signal).
@ -64,14 +64,14 @@ static gboolean push_data (CustomData *data) {
gint16 *raw;
gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
gfloat freq;
/* Create a new empty buffer */
buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);
/* Set its timestamp and duration */
GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE);
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, SAMPLE_RATE);
/* Generate some psychodelic waveforms */
raw = (gint16 *)GST_BUFFER_DATA (buffer);
data->c += data->d;
@ -83,21 +83,21 @@ static gboolean push_data (CustomData *data) {
raw[i] = (gint16)(500 * data->a);
}
data->num_samples += num_samples;
/* Push the buffer into the appsrc */
g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
/* Free the buffer now that we are done with it */
gst_buffer_unref (buffer);
if (ret != GST_FLOW_OK) {
/* We got some error, stop sending data */
return FALSE;
}
return TRUE;
}
/* This signal callback triggers when appsrc needs data. Here, we add an idle handler
* to the mainloop to start pushing data into the appsrc */
static void start_feed (GstElement *source, guint size, CustomData *data) {
@ -106,7 +106,7 @@ static void start_feed (GstElement *source, guint size, CustomData *data) {
data->sourceid = g_idle_add ((GSourceFunc) push_data, data);
}
}
/* This callback triggers when appsrc has enough data and we can stop sending.
* We remove the idle handler from the mainloop */
static void stop_feed (GstElement *source, CustomData *data) {
@ -116,31 +116,31 @@ static void stop_feed (GstElement *source, CustomData *data) {
data->sourceid = 0;
}
}
/* This function is called when an error message is posted on the bus */
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
/* Print error details on the screen */
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
g_main_loop_quit (data->main_loop);
}
/* This function is called when playbin2 has created the appsrc element, so we have
* a chance to configure it. */
static void source_setup (GstElement *pipeline, GstElement *source, CustomData *data) {
gchar *audio_caps_text;
GstCaps *audio_caps;
g_print ("Source has been created. Configuring.\n");
data->app_source = source;
/* Configure appsrc */
audio_caps_text = g_strdup_printf (AUDIO_CAPS, SAMPLE_RATE);
audio_caps = gst_caps_from_string (audio_caps_text);
@ -150,36 +150,36 @@ static void source_setup (GstElement *pipeline, GstElement *source, CustomData *
gst_caps_unref (audio_caps);
g_free (audio_caps_text);
}
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
/* Initialize cumstom data structure */
memset (&data, 0, sizeof (data));
data.b = 1; /* For waveform generation */
data.d = 1;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the playbin2 element */
data.pipeline = gst_parse_launch ("playbin2 uri=appsrc://", NULL);
g_signal_connect (data.pipeline, "source-setup", G_CALLBACK (source_setup), &data);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (data.pipeline);
gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
gst_object_unref (bus);
/* Start playing the pipeline */
gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
/* Create a GLib Main Loop and set it to run */
data.main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.main_loop);
/* Free resources */
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (data.pipeline);
@ -214,10 +214,10 @@ caps:
static void source_setup (GstElement *pipeline, GstElement *source, CustomData *data) {
gchar *audio_caps_text;
GstCaps *audio_caps;
g_print ("Source has been created. Configuring.\n");
data->app_source = source;
/* Configure appsrc */
audio_caps_text = g_strdup_printf (AUDIO_CAPS, SAMPLE_RATE);
audio_caps = gst_caps_from_string (audio_caps_text);
@ -262,11 +262,10 @@ It has been a pleasure having you here, and see you soon\!
## Attachments:
![](images/icons/bullet_blue.gif)
[playback-tutorial-3.c](attachments/1442200/2424850.c) (text/plain)
[playback-tutorial-3.c](attachments/1442200/2424850.c) (text/plain)
![](images/icons/bullet_blue.gif)
[vs2010.zip](attachments/1442200/2424849.zip) (application/zip)
[vs2010.zip](attachments/1442200/2424849.zip) (application/zip)
![](images/icons/bullet_blue.gif)
[playback-tutorial-3.c](attachments/1442200/2424848.c) (text/plain)
[playback-tutorial-3.c](attachments/1442200/2424848.c) (text/plain)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Playback tutorial 4: Progressive streaming
# Playback tutorial 4: Progressive streaming
This page last changed on Sep 13, 2012 by xartigas.
@ -57,21 +57,21 @@ Copy this code into a text file named `playback-tutorial-4.c`.
``` theme: Default; brush: cpp; gutter: true
#include <gst/gst.h>
#include <string.h>
#define GRAPH_LENGTH 80
/* playbin2 flags */
typedef enum {
GST_PLAY_FLAG_DOWNLOAD = (1 << 7) /* Enable progressive download (on selected formats) */
} GstPlayFlags;
typedef struct _CustomData {
gboolean is_live;
GstElement *pipeline;
GMainLoop *loop;
gint buffering_level;
} CustomData;
static void got_location (GstObject *gstobject, GstObject *prop_object, GParamSpec *prop, gpointer data) {
gchar *location;
g_object_get (G_OBJECT (prop_object), "temp-location", &location, NULL);
@ -79,19 +79,19 @@ static void got_location (GstObject *gstobject, GstObject *prop_object, GParamSp
/* Uncomment this line to keep the temporary file after the program exits */
/* g_object_set (G_OBJECT (prop_object), "temp-remove", FALSE, NULL); */
}
static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
gst_element_set_state (data->pipeline, GST_STATE_READY);
g_main_loop_quit (data->loop);
break;
@ -104,9 +104,9 @@ static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
case GST_MESSAGE_BUFFERING:
/* If the stream is live, we do not care about buffering. */
if (data->is_live) break;
gst_message_parse_buffering (msg, &data->buffering_level);
/* Wait until buffering is complete before start/resume playing */
if (data->buffering_level < 100)
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
@ -123,11 +123,11 @@ static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
break;
}
}
static gboolean refresh_ui (CustomData *data) {
GstQuery *query;
gboolean result;
query = gst_query_new_buffering (GST_FORMAT_PERCENT);
result = gst_element_query (data->pipeline, query);
if (result) {
@ -135,10 +135,10 @@ static gboolean refresh_ui (CustomData *data) {
gchar graph[GRAPH_LENGTH + 1];
GstFormat format = GST_FORMAT_TIME;
gint64 position = 0, duration = 0;
memset (graph, ' ', GRAPH_LENGTH);
graph[GRAPH_LENGTH] = '\0';
n_ranges = gst_query_get_n_buffering_ranges (query);
for (range = 0; range < n_ranges; range++) {
gint64 start, stop;
@ -163,11 +163,11 @@ static gboolean refresh_ui (CustomData *data) {
}
g_print ("\r");
}
return TRUE;
}
int main(int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
@ -175,26 +175,26 @@ int main(int argc, char *argv[]) {
GMainLoop *main_loop;
CustomData data;
guint flags;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Initialize our data structure */
memset (&data, 0, sizeof (data));
data.buffering_level = 100;
/* Build the pipeline */
pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
bus = gst_element_get_bus (pipeline);
/* Set the download flag */
g_object_get (pipeline, "flags", &flags, NULL);
flags |= GST_PLAY_FLAG_DOWNLOAD;
g_object_set (pipeline, "flags", flags, NULL);
/* Uncomment this line to limit the amount of downloaded data */
/* g_object_set (pipeline, "ring-buffer-max-size", (guint64)4000000, NULL); */
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
@ -204,20 +204,20 @@ int main(int argc, char *argv[]) {
} else if (ret == GST_STATE_CHANGE_NO_PREROLL) {
data.is_live = TRUE;
}
main_loop = g_main_loop_new (NULL, FALSE);
data.loop = main_loop;
data.pipeline = pipeline;
gst_bus_add_signal_watch (bus);
g_signal_connect (bus, "message", G_CALLBACK (cb_message), &data);
g_signal_connect (pipeline, "deep-notify::temp-location", G_CALLBACK (got_location), NULL);
/* Register a function that GLib will call every second */
g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);
g_main_loop_run (main_loop);
/* Free resources */
g_main_loop_unref (main_loop);
gst_object_unref (bus);
@ -442,9 +442,8 @@ It has been a pleasure having you here, and see you soon\!
## Attachments:
![](images/icons/bullet_blue.gif)
[playback-tutorial-4.c](attachments/327808/2424846.c) (text/plain)
[playback-tutorial-4.c](attachments/327808/2424846.c) (text/plain)
![](images/icons/bullet_blue.gif)
[vs2010.zip](attachments/327808/2424847.zip) (application/zip)
[vs2010.zip](attachments/327808/2424847.zip) (application/zip)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Playback tutorial 5: Color Balance
# Playback tutorial 5: Color Balance
This page last changed on Jun 25, 2012 by xartigas.
@ -48,24 +48,24 @@ Copy this code into a text file named `playback-tutorial-5.c`.
#include <string.h>
#include <gst/gst.h>
#include <gst/interfaces/colorbalance.h>
typedef struct _CustomData {
GstElement *pipeline;
GMainLoop *loop;
} CustomData;
/* Process a color balance command */
static void update_color_channel (const gchar *channel_name, gboolean increase, GstColorBalance *cb) {
gdouble step;
gint value;
GstColorBalanceChannel *channel = NULL;
const GList *channels, *l;
/* Retrieve the list of channels and locate the requested one */
channels = gst_color_balance_list_channels (cb);
for (l = channels; l != NULL; l = l->next) {
GstColorBalanceChannel *tmp = (GstColorBalanceChannel *)l->data;
if (g_strrstr (tmp->label, channel_name)) {
channel = tmp;
break;
@ -73,7 +73,7 @@ static void update_color_channel (const gchar *channel_name, gboolean increase,
}
if (!channel)
return;
/* Change the channel's value */
step = 0.1 * (channel->max_value - channel->min_value);
value = gst_color_balance_get_value (cb, channel);
@ -88,11 +88,11 @@ static void update_color_channel (const gchar *channel_name, gboolean increase,
}
gst_color_balance_set_value (cb, channel, value);
}
/* Output the current values of all Color Balance channels */
static void print_current_values (GstElement *pipeline) {
const GList *channels, *l;
/* Output Color Balance values */
channels = gst_color_balance_list_channels (GST_COLOR_BALANCE (pipeline));
for (l = channels; l != NULL; l = l->next) {
@ -103,15 +103,15 @@ static void print_current_values (GstElement *pipeline) {
}
g_print ("\n");
}
/* Process keyboard input */
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
gchar *str = NULL;
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) != G_IO_STATUS_NORMAL) {
return TRUE;
}
switch (g_ascii_tolower (str[0])) {
case 'c':
update_color_channel ("CONTRAST", g_ascii_isupper (str[0]), GST_COLOR_BALANCE (data->pipeline));
@ -131,25 +131,25 @@ static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomDa
default:
break;
}
g_free (str);
print_current_values (data->pipeline);
return TRUE;
}
int main(int argc, char *argv[]) {
CustomData data;
GstStateChangeReturn ret;
GIOChannel *io_stdin;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Initialize our data structure */
memset (&data, 0, sizeof (data));
/* Print usage map */
g_print (
"USAGE: Choose one of the following options, then press enter:\n"
@ -158,10 +158,10 @@ int main(int argc, char *argv[]) {
" 'H' to increase hue, 'h' to decrease hue\n"
" 'S' to increase saturation, 's' to decrease saturation\n"
" 'Q' to quit\n");
/* Build the pipeline */
data.pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Add a keyboard watch so we get notified of keystrokes */
#ifdef _WIN32
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
@ -169,7 +169,7 @@ int main(int argc, char *argv[]) {
io_stdin = g_io_channel_unix_new (fileno (stdin));
#endif
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
/* Start playing */
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
@ -178,11 +178,11 @@ int main(int argc, char *argv[]) {
return -1;
}
print_current_values (data.pipeline);
/* Create a GLib Main Loop and set it to run */
data.loop = g_main_loop_new (NULL, FALSE);
g_main_loop_run (data.loop);
/* Free resources */
g_main_loop_unref (data.loop);
g_io_channel_unref (io_stdin);
@ -229,7 +229,7 @@ keystrokes can be monitored.
/* Output the current values of all Color Balance channels */
static void print_current_values (GstElement *pipeline) {
const GList *channels, *l;
/* Output Color Balance values */
channels = gst_color_balance_list_channels (GST_COLOR_BALANCE (pipeline));
for (l = channels; l != NULL; l = l->next) {
@ -262,12 +262,12 @@ static void update_color_channel (const gchar *channel_name, gboolean increase,
gint value;
GstColorBalanceChannel *channel = NULL;
const GList *channels, *l;
/* Retrieve the list of channels and locate the requested one */
channels = gst_color_balance_list_channels (cb);
for (l = channels; l != NULL; l = l->next) {
GstColorBalanceChannel *tmp = (GstColorBalanceChannel *)l->data;
if (g_strrstr (tmp->label, channel_name)) {
channel = tmp;
break;
@ -322,9 +322,8 @@ It has been a pleasure having you here, and see you soon\!
## Attachments:
![](images/icons/bullet_blue.gif)
[playback-tutorial-5.c](attachments/327804/2424874.c) (text/plain)
[playback-tutorial-5.c](attachments/327804/2424874.c) (text/plain)
![](images/icons/bullet_blue.gif)
[vs2010.zip](attachments/327804/2424875.zip) (application/zip)
[vs2010.zip](attachments/327804/2424875.zip) (application/zip)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Playback tutorial 6: Audio visualization
# Playback tutorial 6: Audio visualization
This page last changed on Jun 26, 2012 by xartigas.
@ -43,25 +43,25 @@ Copy this code into a text file named `playback-tutorial-6.c`.
``` theme: Default; brush: cpp; gutter: true
#include <gst/gst.h>
/* playbin2 flags */
typedef enum {
GST_PLAY_FLAG_VIS = (1 << 3) /* Enable rendering of visualizations when there is no video stream. */
} GstPlayFlags;
/* Return TRUE if this is a Visualization element */
static gboolean filter_vis_features (GstPluginFeature *feature, gpointer data) {
GstElementFactory *factory;
if (!GST_IS_ELEMENT_FACTORY (feature))
return FALSE;
factory = GST_ELEMENT_FACTORY (feature);
if (!g_strrstr (gst_element_factory_get_klass (factory), "Visualization"))
return FALSE;
return TRUE;
}
int main(int argc, char *argv[]) {
GstElement *pipeline, *vis_plugin;
GstBus *bus;
@ -69,59 +69,59 @@ int main(int argc, char *argv[]) {
GList *list, *walk;
GstElementFactory *selected_factory = NULL;
guint flags;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Get a list of all visualization plugins */
list = gst_registry_feature_filter (gst_registry_get_default (), filter_vis_features, FALSE, NULL);
/* Print their names */
g_print("Available visualization plugins:\n");
for (walk = list; walk != NULL; walk = g_list_next (walk)) {
const gchar *name;
GstElementFactory *factory;
factory = GST_ELEMENT_FACTORY (walk->data);
name = gst_element_factory_get_longname (factory);
g_print(" %s\n", name);
if (selected_factory == NULL || g_str_has_prefix (name, "GOOM")) {
selected_factory = factory;
}
}
/* Don't use the factory if it's still empty */
/* e.g. no visualization plugins found */
if (!selected_factory) {
g_print ("No visualization plugins found!\n");
return -1;
}
/* We have now selected a factory for the visualization element */
g_print ("Selected '%s'\n", gst_element_factory_get_longname (selected_factory));
vis_plugin = gst_element_factory_create (selected_factory, NULL);
if (!vis_plugin)
return -1;
/* Build the pipeline */
pipeline = gst_parse_launch ("playbin2 uri=http://radio.hbr1.com:19800/ambient.ogg", NULL);
/* Set the visualization flag */
g_object_get (pipeline, "flags", &flags, NULL);
flags |= GST_PLAY_FLAG_VIS;
g_object_set (pipeline, "flags", flags, NULL);
/* set vis plugin for playbin2 */
g_object_set (pipeline, "vis-plugin", vis_plugin, NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
@ -189,13 +189,13 @@ only the Visualization plugins:
/* Return TRUE if this is a Visualization element */
static gboolean filter_vis_features (GstPluginFeature *feature, gpointer data) {
GstElementFactory *factory;
if (!GST_IS_ELEMENT_FACTORY (feature))
return FALSE;
factory = GST_ELEMENT_FACTORY (feature);
if (!g_strrstr (gst_element_factory_get_klass (factory), "Visualization"))
return FALSE;
return TRUE;
}
```
@ -221,11 +221,11 @@ g_print("Available visualization plugins:\n");
for (walk = list; walk != NULL; walk = g_list_next (walk)) {
const gchar *name;
GstElementFactory *factory;
factory = GST_ELEMENT_FACTORY (walk->data);
name = gst_element_factory_get_longname (factory);
g_print(" %s\n", name);
if (selected_factory == NULL || g_str_has_prefix (name, "GOOM")) {
selected_factory = factory;
}
@ -268,9 +268,8 @@ It has been a pleasure having you here, and see you soon\!
## Attachments:
![](images/icons/bullet_blue.gif)
[vs2010.zip](attachments/327802/2424878.zip) (application/zip)
[vs2010.zip](attachments/327802/2424878.zip) (application/zip)
![](images/icons/bullet_blue.gif)
[playback-tutorial-6.c](attachments/327802/2424879.c) (text/plain)
[playback-tutorial-6.c](attachments/327802/2424879.c) (text/plain)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Playback tutorial 7: Custom playbin2 sinks
# Playback tutorial 7: Custom playbin2 sinks
This page last changed on Dec 03, 2012 by xartigas.
@ -57,19 +57,19 @@ Copy this code into a text file named `playback-tutorial-7.c`.
``` theme: Default; brush: cpp; gutter: true
#include <gst/gst.h>
int main(int argc, char *argv[]) {
GstElement *pipeline, *bin, *equalizer, *convert, *sink;
GstPad *pad, *ghost_pad;
GstBus *bus;
GstMessage *msg;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Build the pipeline */
pipeline = gst_parse_launch ("playbin2 uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
/* Create the elements inside the sink bin */
equalizer = gst_element_factory_make ("equalizer-3bands", "equalizer");
convert = gst_element_factory_make ("audioconvert", "convert");
@ -78,7 +78,7 @@ int main(int argc, char *argv[]) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Create the sink bin, add the elements and link them */
bin = gst_bin_new ("audio_sink_bin");
gst_bin_add_many (GST_BIN (bin), equalizer, convert, sink, NULL);
@ -88,21 +88,21 @@ int main(int argc, char *argv[]) {
gst_pad_set_active (ghost_pad, TRUE);
gst_element_add_pad (bin, ghost_pad);
gst_object_unref (pad);
/* Configure the equalizer */
g_object_set (G_OBJECT (equalizer), "band1", (gdouble)-24.0, NULL);
g_object_set (G_OBJECT (equalizer), "band2", (gdouble)-24.0, NULL);
/* Set playbin2's audio sink to be our sink bin */
g_object_set (GST_OBJECT (pipeline), "audio-sink", bin, NULL);
/* Start playing */
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
@ -236,11 +236,10 @@ It has been a pleasure having you here, and see you soon\!
## Attachments:
![](images/icons/bullet_blue.gif)
[bin-element-ghost.png](attachments/1441842/2424880.png) (image/png)
[bin-element-ghost.png](attachments/1441842/2424880.png) (image/png)
![](images/icons/bullet_blue.gif)
[playback-tutorial-7.c](attachments/1441842/2424881.c) (text/plain)
[playback-tutorial-7.c](attachments/1441842/2424881.c) (text/plain)
![](images/icons/bullet_blue.gif)
[vs2010.zip](attachments/1441842/2424882.zip) (application/zip)
[vs2010.zip](attachments/1441842/2424882.zip) (application/zip)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Playback tutorial 8: Hardware-accelerated video decoding
# Playback tutorial 8: Hardware-accelerated video decoding
This page last changed on Jul 24, 2012 by xartigas.
@ -177,20 +177,20 @@ as shown in this code:
static void enable_factory (const gchar *name, gboolean enable) {
GstRegistry *registry = NULL;
GstElementFactory *factory = NULL;
registry = gst_registry_get_default ();
if (!registry) return;
factory = gst_element_factory_find (name);
if (!factory) return;
if (enable) {
gst_plugin_feature_set_rank (GST_PLUGIN_FEATURE (factory), GST_RANK_PRIMARY + 1);
}
else {
gst_plugin_feature_set_rank (GST_PLUGIN_FEATURE (factory), GST_RANK_NONE);
}
gst_registry_add_feature (registry, GST_PLUGIN_FEATURE (factory));
return;
}
@ -348,4 +348,3 @@ accelerated video decoding. Particularly,
It has been a pleasure having you here, and see you soon\!
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Playback tutorial 9: Digital audio pass-through
# Playback tutorial 9: Digital audio pass-through
This page last changed on Jul 24, 2012 by xartigas.
@ -106,4 +106,3 @@ In particular, it has shown that:
It has been a pleasure having you here, and see you soon\!
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Playback tutorials
# Playback tutorials
This page last changed on Mar 28, 2012 by xartigas.
@ -8,4 +8,3 @@ These tutorials explain everything you need to know to produce a media
playback application using GStreamer.
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Qt tutorials
# Qt tutorials
This page last changed on May 02, 2013 by tdfischer.
@ -16,4 +16,3 @@ previous one and adds progressively more functionality, until a working
media player application is obtained in \#FIXME\#
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : QtGStreamer vs C GStreamer
# QtGStreamer vs C GStreamer
This page last changed on May 24, 2013 by xartigas.
@ -110,4 +110,3 @@ GstElement* gstElement = GST_ELEMENT(qgstElement);
```
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Releases
# Releases
This page last changed on Jun 12, 2013 by xartigas.
@ -23,4 +23,3 @@ bottom):
### [2012.5 Amazon](2012.5%2BAmazon.html)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Table of Concepts
# Table of Concepts
This page last changed on Jun 06, 2012 by xartigas.
@ -55,4 +55,3 @@ concepts is discussed.
Availability](Basic%2Btutorial%2B7%253A%2BMultithreading%2Band%2BPad%2BAvailability.html)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# Tutorials
# Tutorials
## Welcome to the GStreamer SDK Tutorials!
@ -75,4 +75,4 @@ publicly available and the copyright remains with their respective
authors. In some cases they have been re-encoded for demonstration
purposes.
- [Sintel, the Durian Open Movie Project](http://www.sintel.org/)
- [Sintel, the Durian Open Movie Project](http://www.sintel.org/)

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Upcoming tutorials
# Upcoming tutorials
This page last changed on May 24, 2013 by xartigas.
@ -18,4 +18,3 @@ Playback tutorials:
- DVD playback
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Using appsink/appsrc in Qt
# Using appsink/appsrc in Qt
This page last changed on May 24, 2013 by xartigas.
@ -243,4 +243,3 @@ data into and out of a GStreamer pipeline.
It has been a pleasure having you here, and see you soon\!
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : Windows deployment
# Windows deployment
This page last changed on Nov 28, 2012 by xartigas.
@ -258,4 +258,3 @@ Get the ZIP file with all Merge Modules for your architecture:
</table>
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : gst-inspect
# gst-inspect
This page last changed on May 30, 2012 by xartigas.
@ -210,4 +210,3 @@ Element Properties:
```
Document generated by Confluence on Oct 08, 2015 10:28

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : gst-launch
# gst-launch
This page last changed on May 30, 2012 by xartigas.
@ -97,7 +97,7 @@ Creates an element of type ELEMENTTYPE and sets the PROPERTIES.
PROPERTY=VALUE ...
Sets the property to the specified value. You can use **gst-inspect**(1)
to find out about properties and allowed values of different elements.
to find out about properties and allowed values of different elements.
Enumeration properties can be set by name, nick or value.
**Bins**
@ -126,7 +126,7 @@ used. This works across bins. If a padname is given, the link is done
with these pads. If no pad names are given all possibilities are tried
and a matching pad is used. If multiple padnames are given, both sides
must have the same number of pads specified and multiple links are done
in the given order.
in the given order.
So the simplest link is a simple exclamation mark, that links the
element to the left of it to the element right of it.
@ -140,25 +140,25 @@ chain caps, you can add more caps in the same format afterwards.
**Properties**
NAME=*\[(TYPE)\]*VALUE
NAME=*\[(TYPE)\]*VALUE
in lists and ranges: *\[(TYPE)\]*VALUE
Sets the requested property in capabilities. The name is an alphanumeric
value and the type can have the following case-insensitive values:
\- **i** or **int** for integer values or ranges
\- **f** or **float** for float values or ranges
\- **4** or **fourcc** for FOURCC values
\- **b**, **bool** or **boolean** for boolean values
\- **s**, **str** or **string** for strings
\- **fraction** for fractions (framerate, pixel-aspect-ratio)
\- **l** or **list** for lists
value and the type can have the following case-insensitive values:
\- **i** or **int** for integer values or ranges
\- **f** or **float** for float values or ranges
\- **4** or **fourcc** for FOURCC values
\- **b**, **bool** or **boolean** for boolean values
\- **s**, **str** or **string** for strings
\- **fraction** for fractions (framerate, pixel-aspect-ratio)
\- **l** or **list** for lists
If no type was given, the following order is tried: integer, float,
boolean, string.
boolean, string.
Integer values must be parsable by **strtol()**, floats by **strtod()**.
FOURCC values may either be integers or strings. Boolean values are
(case insensitive) *yes*, *no*, *true* or *false* and may like strings
be escaped with " or '.
Ranges are in this format: \[ VALUE, VALUE \]
be escaped with " or '.
Ranges are in this format: \[ VALUE, VALUE \]
Lists use this format: ( VALUE *\[, VALUE ...\]* )
## Pipeline Control
@ -166,7 +166,7 @@ Lists use this format: ( VALUE *\[, VALUE ...\]* )
A pipeline can be controlled by signals. SIGUSR2 will stop the pipeline
(GST\_STATE\_NULL); SIGUSR1 will put it back to play
(GST\_STATE\_PLAYING). By default, the pipeline will start in the
playing state.
playing state.
There are currently no signals defined to go into the ready or pause
(GST\_STATE\_READY and GST\_STATE\_PAUSED) state explicitely.
@ -186,53 +186,53 @@ ffmpegcolorspace (for video) in front of the sink to make things work.
**Audio playback**
**gst-launch filesrc location=music.mp3 \! mad \! audioconvert \!
audioresample \! osssink**
audioresample \! osssink**
Play the mp3 music file "music.mp3" using a libmad-based plug-in and
output to an OSS device
**gst-launch filesrc location=music.ogg \! oggdemux \! vorbisdec \!
audioconvert \! audioresample \! osssink**
audioconvert \! audioresample \! osssink**
Play an Ogg Vorbis format file
**gst-launch gnomevfssrc location=music.mp3 \! mad \! osssink
**gst-launch gnomevfssrc location=music.mp3 \! mad \! osssink
gst-launch gnomevfssrc location=<http://domain.com/music.mp3> \! mad \!
audioconvert \! audioresample \! osssink**
audioconvert \! audioresample \! osssink**
Play an mp3 file or an http stream using GNOME-VFS
**gst-launch gnomevfssrc location=<smb://computer/music.mp3> \! mad \!
audioconvert \! audioresample \! osssink**
audioconvert \! audioresample \! osssink**
Use GNOME-VFS to play an mp3 file located on an SMB server
**Format conversion**
**gst-launch filesrc location=music.mp3 \! mad \! audioconvert \!
vorbisenc \! oggmux \! filesink location=music.ogg**
vorbisenc \! oggmux \! filesink location=music.ogg**
Convert an mp3 music file to an Ogg Vorbis file
**gst-launch filesrc location=music.mp3 \! mad \! audioconvert \!
flacenc \! filesink location=test.flac**
flacenc \! filesink location=test.flac**
Convert to the FLAC format
**Other**
**gst-launch filesrc location=music.wav \! wavparse \! audioconvert \!
audioresample \! osssink**
audioresample \! osssink**
Plays a .WAV file that contains raw audio data (PCM).
**gst-launch filesrc location=music.wav \! wavparse \! audioconvert \!
vorbisenc \! oggmux \! filesink location=music.ogg
vorbisenc \! oggmux \! filesink location=music.ogg
gst-launch filesrc location=music.wav \! wavparse \! audioconvert \!
lame \! filesink location=music.mp3**
lame \! filesink location=music.mp3**
Convert a .WAV file containing raw audio data into an Ogg Vorbis or mp3
file
**gst-launch cdparanoiasrc mode=continuous \! audioconvert \! lame \!
id3v2mux \! filesink location=cd.mp3**
id3v2mux \! filesink location=cd.mp3**
rips all tracks from compact disc and convert them into a single mp3
file
**gst-launch cdparanoiasrc track=5 \! audioconvert \! lame \! id3v2mux
\! filesink location=track5.mp3**
\! filesink location=track5.mp3**
rips track 5 from the CD and converts it into a single mp3 file
Using **gst-inspect**(1), it is possible to discover settings like the
@ -243,29 +243,29 @@ you, e.g.: **gst-launch [cdda://5]() \! lame vbr=new vbr-quality=6 \!
filesink location=track5.mp3**
**gst-launch osssrc \! audioconvert \! vorbisenc \! oggmux \! filesink
location=input.ogg**
location=input.ogg**
records sound from your audio input and encodes it into an ogg file
**Video**
**gst-launch filesrc location=JB\_FF9\_TheGravityOfLove.mpg \! dvddemux
\! mpeg2dec \! xvimagesink**
\! mpeg2dec \! xvimagesink**
Display only the video portion of an MPEG-1 video file, outputting to an
X display window
**gst-launch filesrc location=/flflfj.vob \! dvddemux \! mpeg2dec \!
sdlvideosink**
sdlvideosink**
Display the video portion of a .vob file (used on DVDs), outputting to
an SDL window
**gst-launch filesrc location=movie.mpg \! dvddemux name=demuxer
demuxer. \! queue \! mpeg2dec \! sdlvideosink demuxer. \! queue \! mad
\! audioconvert \! audioresample \! osssink**
\! audioconvert \! audioresample \! osssink**
Play both video and audio portions of an MPEG movie
**gst-launch filesrc location=movie.mpg \! mpegdemux name=demuxer
demuxer. \! queue \! mpeg2dec \! ffmpegcolorspace \! sdlvideosink
demuxer. \! queue \! mad \! audioconvert \! audioresample \! osssink**
demuxer. \! queue \! mad \! audioconvert \! audioresample \! osssink**
Play an AVI movie with an external text subtitle stream
This example also shows how to refer to specific pads by name if an
@ -288,25 +288,25 @@ Stream video using RTP and network elements.
**gst-launch v4l2src \!
video/x-raw-yuv,width=128,height=96,format='(fourcc)'UYVY \!
ffmpegcolorspace \! ffenc\_h263 \! video/x-h263 \! rtph263ppay pt=96 \!
udpsink host=192.168.1.1 port=5000 sync=false**
udpsink host=192.168.1.1 port=5000 sync=false**
Use this command on the receiver
**gst-launch udpsrc port=5000 \! application/x-rtp,
clock-rate=90000,payload=96 \! rtph263pdepay queue-delay=0 \!
ffdec\_h263 \! xvimagesink**
ffdec\_h263 \! xvimagesink**
This command would be run on the transmitter
**Diagnostic**
**gst-launch -v fakesrc num-buffers=16 \! fakesink**
**gst-launch -v fakesrc num-buffers=16 \! fakesink**
Generate a null stream and ignore it (and print out details).
**gst-launch audiotestsrc \! audioconvert \! audioresample \!
osssink**
osssink**
Generate a pure sine tone to test the audio output
**gst-launch videotestsrc \! xvimagesink
gst-launch videotestsrc \! ximagesink**
**gst-launch videotestsrc \! xvimagesink
gst-launch videotestsrc \! ximagesink**
Generate a familiar test pattern to test the video output
**Automatic linking**
@ -315,12 +315,12 @@ You can use the decodebin element to automatically select the right
elements to get a working pipeline.
**gst-launch filesrc location=musicfile \! decodebin \! audioconvert \!
audioresample \! osssink**
audioresample \! osssink**
Play any supported audio format
**gst-launch filesrc location=videofile \! decodebin name=decoder
decoder. \! queue \! audioconvert \! audioresample \! osssink decoder.
\! ffmpegcolorspace \! xvimagesink**
\! ffmpegcolorspace \! xvimagesink**
Play any supported video format with video and audio output. Threads are
used automatically. To make this even easier, you can use the playbin
element:
@ -333,12 +333,12 @@ These examples show you how to use filtered caps.
**gst-launch videotestsrc \!
'video/x-raw-yuv,format=(fourcc)YUY2;video/x-raw-yuv,format=(fourcc)YV12'
\! xvimagesink**
\! xvimagesink**
Show a test image and use the YUY2 or YV12 video format for this.
**gst-launch osssrc \!
'audio/x-raw-int,rate=\[32000,64000\],width=\[16,32\],depth={16,24,32},signed=(boolean)true'
\! wavenc \! filesink location=recording.wav**
\! wavenc \! filesink location=recording.wav**
record audio and write it to a .wav file. Force usage of signed 16 to 32
bit samples and a sample rate between 32kHz and 64KHz.
@ -403,4 +403,3 @@ let it core dump). Then get a stack trace in the usual way
<!-- end list -->
Document generated by Confluence on Oct 08, 2015 10:28

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : iOS tutorial 1: Link against GStreamer
# iOS tutorial 1: Link against GStreamer
This page last changed on May 06, 2013 by xartigas.
@ -104,7 +104,7 @@ GStreamer version to display at the label. That's it\!
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
gst_backend = [[GStreamerBackend alloc] init];
label.text = [NSString stringWithFormat:@"Welcome to %@!", [gst_backend getGStreamerVersion]];
}
@ -135,13 +135,12 @@ It has been a pleasure having you here, and see you soon\!
![](images/icons/bullet_blue.gif)
[ios-tutorial1-screenshot.png](attachments/3014792/3113602.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[ios-tutorial1-screenshot.png](attachments/3014792/3113603.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[ios-tutorial1-screenshot.png](attachments/3014792/3113601.png)
(image/png)
(image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : iOS tutorial 2: A running pipeline
# iOS tutorial 2: A running pipeline
This page last changed on May 13, 2013 by xartigas.
@ -80,7 +80,7 @@ behalf:
- (void)viewDidLoad
{
[super viewDidLoad];
play_button.enabled = FALSE;
pause_button.enabled = FALSE;
@ -143,7 +143,7 @@ This instance is created in the `viewDidLoad` function through a custom
- (void)viewDidLoad
{
[super viewDidLoad];
play_button.enabled = FALSE;
pause_button.enabled = FALSE;
@ -309,7 +309,7 @@ static void error_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *self)
GError *err;
gchar *debug_info;
gchar *message_string;
gst_message_parse_error (msg, &err, &debug_info);
message_string = g_strdup_printf ("Error received from element %s: %s", GST_OBJECT_NAME (msg->src), err->message);
g_clear_error (&err);
@ -358,7 +358,7 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *se
/* Create our own GLib Main Context and make it the default one */
context = g_main_context_new ();
g_main_context_push_thread_default(context);
/* Build pipeline */
pipeline = gst_parse_launch("audiotestsrc ! audioconvert ! audioresample ! autoaudiosink", &error);
if (error) {
@ -368,7 +368,7 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *se
g_free (message);
return;
}
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (pipeline);
bus_source = gst_bus_create_watch (bus);
@ -378,7 +378,7 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *se
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, (__bridge void *)self);
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, (__bridge void *)self);
gst_object_unref (bus);
/* Create a GLib Main Loop and set it to run */
GST_DEBUG ("Entering main loop...");
main_loop = g_main_loop_new (context, FALSE);
@ -387,13 +387,13 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *se
GST_DEBUG ("Exited main loop");
g_main_loop_unref (main_loop);
main_loop = NULL;
/* Free resources */
g_main_context_pop_thread_default(context);
g_main_context_unref (context);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return;
}
@ -495,7 +495,7 @@ static void error_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *self)
GError *err;
gchar *debug_info;
gchar *message_string;
gst_message_parse_error (msg, &err, &debug_info);
message_string = g_strdup_printf ("Error received from element %s: %s", GST_OBJECT_NAME (msg->src), err->message);
g_clear_error (&err);
@ -659,10 +659,9 @@ It has been a pleasure having you here, and see you soon\!
![](images/icons/bullet_blue.gif)
[ios-tutorial2-screenshot.png](attachments/3571718/3538954.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[ios-tutorial2-screenshot.png](attachments/3571718/3538953.png)
(image/png)
(image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : iOS tutorial 3: Video
# iOS tutorial 3: Video
This page last changed on May 13, 2013 by xartigas.
@ -95,10 +95,10 @@ behalf:
- (void)viewDidLoad
{
[super viewDidLoad];
play_button.enabled = FALSE;
pause_button.enabled = FALSE;
/* Make these constant for now, later tutorials will change them */
media_width = 320;
media_height = 240;
@ -183,10 +183,10 @@ media is constant and initialized in `viewDidLoad`:
- (void)viewDidLoad
{
[super viewDidLoad];
play_button.enabled = FALSE;
pause_button.enabled = FALSE;
/* Make these constant for now, later tutorials will change them */
media_width = 320;
media_height = 240;
@ -342,7 +342,7 @@ static void error_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *self)
GError *err;
gchar *debug_info;
gchar *message_string;
gst_message_parse_error (msg, &err, &debug_info);
message_string = g_strdup_printf ("Error received from element %s: %s", GST_OBJECT_NAME (msg->src), err->message);
g_clear_error (&err);
@ -391,7 +391,7 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *se
/* Create our own GLib Main Context and make it the default one */
context = g_main_context_new ();
g_main_context_push_thread_default(context);
/* Build pipeline */
pipeline = gst_parse_launch("videotestsrc ! warptv ! ffmpegcolorspace ! autovideosink", &error);
if (error) {
@ -404,7 +404,7 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *se
/* Set the pipeline to READY, so it can already accept a window handle */
gst_element_set_state(pipeline, GST_STATE_READY);
video_sink = gst_bin_get_by_interface(GST_BIN(pipeline), GST_TYPE_X_OVERLAY);
if (!video_sink) {
GST_ERROR ("Could not retrieve video sink");
@ -421,7 +421,7 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *se
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, (__bridge void *)self);
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, (__bridge void *)self);
gst_object_unref (bus);
/* Create a GLib Main Loop and set it to run */
GST_DEBUG ("Entering main loop...");
main_loop = g_main_loop_new (context, FALSE);
@ -430,13 +430,13 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *se
GST_DEBUG ("Exited main loop");
g_main_loop_unref (main_loop);
main_loop = NULL;
/* Free resources */
g_main_context_pop_thread_default(context);
g_main_context_unref (context);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return;
}
@ -548,7 +548,6 @@ To this avail, we create the `EaglUIView` class, derived from
@implementation EaglUIView
+ (Class) layerClass
{
return [CAEAGLLayer class];
@ -583,7 +582,6 @@ It has been a pleasure having you here, and see you soon\!
![](images/icons/bullet_blue.gif)
[ios-tutorial3-screenshot.png](attachments/3571736/3538955.png)
(image/png)
(image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : iOS tutorial 4: A basic media player
# iOS tutorial 4: A basic media player
This page last changed on May 21, 2013 by xartigas.
@ -162,10 +162,10 @@ this view is collapsed by default. Click here to expand…
- (void)viewDidLoad
{
[super viewDidLoad];
play_button.enabled = FALSE;
pause_button.enabled = FALSE;
/* As soon as the GStreamer backend knows the real values, these ones will be replaced */
media_width = 320;
media_height = 240;
@ -702,7 +702,7 @@ static void error_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *self)
GError *err;
gchar *debug_info;
gchar *message_string;
gst_message_parse_error (msg, &err, &debug_info);
message_string = g_strdup_printf ("Error received from element %s: %s", GST_OBJECT_NAME (msg->src), err->message);
g_clear_error (&err);
@ -839,7 +839,7 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *se
/* Create our own GLib Main Context and make it the default one */
context = g_main_context_new ();
g_main_context_push_thread_default(context);
/* Build pipeline */
pipeline = gst_parse_launch("playbin2", &error);
if (error) {
@ -852,7 +852,7 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *se
/* Set the pipeline to READY, so it can already accept a window handle */
gst_element_set_state(pipeline, GST_STATE_READY);
video_sink = gst_bin_get_by_interface(GST_BIN(pipeline), GST_TYPE_X_OVERLAY);
if (!video_sink) {
GST_ERROR ("Could not retrieve video sink");
@ -888,14 +888,14 @@ static void state_changed_cb (GstBus *bus, GstMessage *msg, GStreamerBackend *se
GST_DEBUG ("Exited main loop");
g_main_loop_unref (main_loop);
main_loop = NULL;
/* Free resources */
g_main_context_pop_thread_default(context);
g_main_context_unref (context);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
pipeline = NULL;
ui_delegate = NULL;
ui_video_view = NULL;
@ -1236,7 +1236,6 @@ here into an acceptable iOS media player.
![](images/icons/bullet_blue.gif)
[ios-tutorial4-screenshot.png](attachments/3571758/3539044.png)
(image/png)
(image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : iOS tutorial 5: A Complete media player
# iOS tutorial 5: A Complete media player
This page last changed on May 22, 2013 by
xartigas.
@ -66,13 +66,12 @@ It has been a pleasure having you here, and see you soon\!
![](images/icons/bullet_blue.gif)
[ios-tutorial5-screenshot0.png](attachments/3571769/3539071.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[ios-tutorial5-screenshot1.png](attachments/3571769/3539046.png)
(image/png)
(image/png)
![](images/icons/bullet_blue.gif)
[ios-tutorial5-screenshot0.png](attachments/3571769/3539045.png)
(image/png)
(image/png)
Document generated by Confluence on Oct 08, 2015 10:27

View file

@ -1,4 +1,4 @@
# GStreamer SDK documentation : iOS tutorials
# iOS tutorials
This page last changed on May 07, 2013 by xartigas.
@ -36,4 +36,3 @@ All iOS tutorials are split into the following classes:
`GStreamerBackend`.
Document generated by Confluence on Oct 08, 2015 10:27