mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-27 04:01:08 +00:00
Fix code block language tagging
This commit is contained in:
parent
c26379435d
commit
43a2465744
32 changed files with 269 additions and 269 deletions
|
@ -54,7 +54,7 @@ messages sent from the C code (for errors and state changes).
|
||||||
|
|
||||||
**src/org/freedesktop/gstreamer/tutorials/tutorial\_2/Tutorial2.java**
|
**src/org/freedesktop/gstreamer/tutorials/tutorial\_2/Tutorial2.java**
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
package org.freedesktop.gstreamer.tutorials.tutorial_2;
|
package org.freedesktop.gstreamer.tutorials.tutorial_2;
|
||||||
|
|
||||||
import android.app.Activity;
|
import android.app.Activity;
|
||||||
|
@ -179,7 +179,7 @@ public class Tutorial2 extends Activity {
|
||||||
As usual, the first bit that gets executed is the static initializer of
|
As usual, the first bit that gets executed is the static initializer of
|
||||||
the class:
|
the class:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
static {
|
static {
|
||||||
System.loadLibrary("gstreamer_android");
|
System.loadLibrary("gstreamer_android");
|
||||||
System.loadLibrary("tutorial-2");
|
System.loadLibrary("tutorial-2");
|
||||||
|
@ -196,7 +196,7 @@ In the `onCreate()` method GStreamer is initialized as in the previous
|
||||||
tutorial with `GStreamer.init(this)`, and then the layout is inflated
|
tutorial with `GStreamer.init(this)`, and then the layout is inflated
|
||||||
and listeners are setup for the two UI buttons:
|
and listeners are setup for the two UI buttons:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
ImageButton play = (ImageButton) this.findViewById(R.id.button_play);
|
ImageButton play = (ImageButton) this.findViewById(R.id.button_play);
|
||||||
play.setOnClickListener(new OnClickListener() {
|
play.setOnClickListener(new OnClickListener() {
|
||||||
public void onClick(View v) {
|
public void onClick(View v) {
|
||||||
|
@ -222,7 +222,7 @@ and safer than tracking the actual pipeline state, because orientation
|
||||||
changes can happen before the pipeline has moved to the desired state,
|
changes can happen before the pipeline has moved to the desired state,
|
||||||
for example.
|
for example.
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
if (savedInstanceState != null) {
|
if (savedInstanceState != null) {
|
||||||
is_playing_desired = savedInstanceState.getBoolean("playing");
|
is_playing_desired = savedInstanceState.getBoolean("playing");
|
||||||
Log.i ("GStreamer", "Activity created. Saved state is playing:" + is_playing_desired);
|
Log.i ("GStreamer", "Activity created. Saved state is playing:" + is_playing_desired);
|
||||||
|
@ -237,7 +237,7 @@ We will first build the GStreamer pipeline (below) and only when the
|
||||||
native code reports itself as initialized we will use
|
native code reports itself as initialized we will use
|
||||||
`is_playing_desired`.
|
`is_playing_desired`.
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
nativeInit();
|
nativeInit();
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -250,7 +250,7 @@ This finishes the `onCreate()` method and the Java initialization. The
|
||||||
UI buttons are disabled, so nothing will happen until native code is
|
UI buttons are disabled, so nothing will happen until native code is
|
||||||
ready and `onGStreamerInitialized()` is called:
|
ready and `onGStreamerInitialized()` is called:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
private void onGStreamerInitialized () {
|
private void onGStreamerInitialized () {
|
||||||
Log.i ("GStreamer", "Gst initialized. Restoring state, playing:" + is_playing_desired);
|
Log.i ("GStreamer", "Gst initialized. Restoring state, playing:" + is_playing_desired);
|
||||||
```
|
```
|
||||||
|
@ -259,7 +259,7 @@ This is called by the native code when its main loop is finally running.
|
||||||
We first retrieve the desired playing state from `is_playing_desired`,
|
We first retrieve the desired playing state from `is_playing_desired`,
|
||||||
and then set that state:
|
and then set that state:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
// Restore previous playing state
|
// Restore previous playing state
|
||||||
if (is_playing_desired) {
|
if (is_playing_desired) {
|
||||||
nativePlay();
|
nativePlay();
|
||||||
|
@ -270,7 +270,7 @@ if (is_playing_desired) {
|
||||||
|
|
||||||
Here comes the first caveat, when re-enabling the UI buttons:
|
Here comes the first caveat, when re-enabling the UI buttons:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
// Re-enable buttons, now that GStreamer is initialized
|
// Re-enable buttons, now that GStreamer is initialized
|
||||||
final Activity activity = this;
|
final Activity activity = this;
|
||||||
runOnUiThread(new Runnable() {
|
runOnUiThread(new Runnable() {
|
||||||
|
@ -298,7 +298,7 @@ The same problem exists when the native code wants to output a string in
|
||||||
our TextView using the `setMessage()` method: it has to be done from the
|
our TextView using the `setMessage()` method: it has to be done from the
|
||||||
UI thread. The solution is the same:
|
UI thread. The solution is the same:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
private void setMessage(final String message) {
|
private void setMessage(final String message) {
|
||||||
final TextView tv = (TextView) this.findViewById(R.id.textview_message);
|
final TextView tv = (TextView) this.findViewById(R.id.textview_message);
|
||||||
runOnUiThread (new Runnable() {
|
runOnUiThread (new Runnable() {
|
||||||
|
@ -311,7 +311,7 @@ private void setMessage(final String message) {
|
||||||
|
|
||||||
Finally, a few remaining bits:
|
Finally, a few remaining bits:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
protected void onSaveInstanceState (Bundle outState) {
|
protected void onSaveInstanceState (Bundle outState) {
|
||||||
Log.d ("GStreamer", "Saving state, playing:" + is_playing_desired);
|
Log.d ("GStreamer", "Saving state, playing:" + is_playing_desired);
|
||||||
outState.putBoolean("playing", is_playing_desired);
|
outState.putBoolean("playing", is_playing_desired);
|
||||||
|
@ -322,7 +322,7 @@ This method stores the currently desired playing state when Android is
|
||||||
about to shut us down, so next time it restarts (after an orientation
|
about to shut us down, so next time it restarts (after an orientation
|
||||||
change, for example), it can restore the same state.
|
change, for example), it can restore the same state.
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
protected void onDestroy() {
|
protected void onDestroy() {
|
||||||
nativeFinalize();
|
nativeFinalize();
|
||||||
super.onDestroy();
|
super.onDestroy();
|
||||||
|
@ -339,7 +339,7 @@ This concludes the UI part of the tutorial.
|
||||||
|
|
||||||
**jni/tutorial-2.c**
|
**jni/tutorial-2.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <jni.h>
|
#include <jni.h>
|
||||||
#include <android/log.h>
|
#include <android/log.h>
|
||||||
|
@ -622,7 +622,7 @@ the basic tutorials, and it is used to hold all our information in one
|
||||||
place, so we can easily pass it around to
|
place, so we can easily pass it around to
|
||||||
callbacks:
|
callbacks:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Structure to contain all our information, so we can pass it to callbacks */
|
/* Structure to contain all our information, so we can pass it to callbacks */
|
||||||
typedef struct _CustomData {
|
typedef struct _CustomData {
|
||||||
jobject app; /* Application instance, used to call its methods. A global reference is kept. */
|
jobject app; /* Application instance, used to call its methods. A global reference is kept. */
|
||||||
|
@ -649,7 +649,7 @@ the `long` type used in Java is always 64 bits wide, but the pointer
|
||||||
used in C can be either 32 or 64 bits wide. The macros take care of the
|
used in C can be either 32 or 64 bits wide. The macros take care of the
|
||||||
conversion without warnings.
|
conversion without warnings.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Library initializer */
|
/* Library initializer */
|
||||||
jint JNI_OnLoad(JavaVM *vm, void *reserved) {
|
jint JNI_OnLoad(JavaVM *vm, void *reserved) {
|
||||||
JNIEnv *env = NULL;
|
JNIEnv *env = NULL;
|
||||||
|
@ -676,7 +676,7 @@ uses [pthread\_key\_create()](http://pubs.opengroup.org/onlinepubs/9699919799/f
|
||||||
to be able to store per-thread information, which is crucial to properly
|
to be able to store per-thread information, which is crucial to properly
|
||||||
manage the JNI Environment, as shown later.
|
manage the JNI Environment, as shown later.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Static class initializer: retrieve method and field IDs */
|
/* Static class initializer: retrieve method and field IDs */
|
||||||
static jboolean gst_native_class_init (JNIEnv* env, jclass klass) {
|
static jboolean gst_native_class_init (JNIEnv* env, jclass klass) {
|
||||||
custom_data_field_id = (*env)->GetFieldID (env, klass, "native_custom_data", "J");
|
custom_data_field_id = (*env)->GetFieldID (env, klass, "native_custom_data", "J");
|
||||||
|
@ -714,7 +714,7 @@ from Java:
|
||||||
|
|
||||||
This method is called at the end of Java's `onCreate()`.
|
This method is called at the end of Java's `onCreate()`.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void gst_native_init (JNIEnv* env, jobject thiz) {
|
static void gst_native_init (JNIEnv* env, jobject thiz) {
|
||||||
CustomData *data = g_new0 (CustomData, 1);
|
CustomData *data = g_new0 (CustomData, 1);
|
||||||
SET_CUSTOM_DATA (env, thiz, custom_data_field_id, data);
|
SET_CUSTOM_DATA (env, thiz, custom_data_field_id, data);
|
||||||
|
@ -723,7 +723,7 @@ static void gst_native_init (JNIEnv* env, jobject thiz) {
|
||||||
It first allocates memory for the `CustomData` structure and passes the
|
It first allocates memory for the `CustomData` structure and passes the
|
||||||
pointer to the Java class with `SET_CUSTOM_DATA`, so it is remembered.
|
pointer to the Java class with `SET_CUSTOM_DATA`, so it is remembered.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
data->app = (*env)->NewGlobalRef (env, thiz);
|
data->app = (*env)->NewGlobalRef (env, thiz);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -732,7 +732,7 @@ in `CustomData` (a [Global
|
||||||
Reference](http://developer.android.com/guide/practices/jni.html#local_and_global_references)
|
Reference](http://developer.android.com/guide/practices/jni.html#local_and_global_references)
|
||||||
is used) so its methods can be called later.
|
is used) so its methods can be called later.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
pthread_create (&gst_app_thread, NULL, &app_function, data);
|
pthread_create (&gst_app_thread, NULL, &app_function, data);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -741,7 +741,7 @@ Finally, a thread is created and it starts running the
|
||||||
|
|
||||||
### `app_function()`
|
### `app_function()`
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Main method for the native code. This is executed on its own thread. */
|
/* Main method for the native code. This is executed on its own thread. */
|
||||||
static void *app_function (void *userdata) {
|
static void *app_function (void *userdata) {
|
||||||
JavaVMAttachArgs args;
|
JavaVMAttachArgs args;
|
||||||
|
@ -764,7 +764,7 @@ is created with `g_main_context_new()` and then it is made the default
|
||||||
one for the thread with
|
one for the thread with
|
||||||
`g_main_context_push_thread_default()`.
|
`g_main_context_push_thread_default()`.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
data->pipeline = gst_parse_launch("audiotestsrc ! audioconvert ! audioresample ! autoaudiosink", &error);
|
data->pipeline = gst_parse_launch("audiotestsrc ! audioconvert ! audioresample ! autoaudiosink", &error);
|
||||||
if (error) {
|
if (error) {
|
||||||
gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
|
gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
|
||||||
|
@ -779,7 +779,7 @@ It then creates a pipeline the easy way, with `gst-parse-launch()`. In
|
||||||
this case, it is simply an `audiotestsrc` (which produces a continuous
|
this case, it is simply an `audiotestsrc` (which produces a continuous
|
||||||
tone) and an `autoaudiosink`, with accompanying adapter elements.
|
tone) and an `autoaudiosink`, with accompanying adapter elements.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
bus = gst_element_get_bus (data->pipeline);
|
bus = gst_element_get_bus (data->pipeline);
|
||||||
bus_source = gst_bus_create_watch (bus);
|
bus_source = gst_bus_create_watch (bus);
|
||||||
g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
|
g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
|
||||||
|
@ -796,7 +796,7 @@ creation of the watch is done step by step instead of using
|
||||||
`gst_bus_add_signal_watch()` to exemplify how to use a custom GLib
|
`gst_bus_add_signal_watch()` to exemplify how to use a custom GLib
|
||||||
context.
|
context.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
GST_DEBUG ("Entering main loop... (CustomData:%p)", data);
|
GST_DEBUG ("Entering main loop... (CustomData:%p)", data);
|
||||||
data->main_loop = g_main_loop_new (data->context, FALSE);
|
data->main_loop = g_main_loop_new (data->context, FALSE);
|
||||||
check_initialization_complete (data);
|
check_initialization_complete (data);
|
||||||
|
@ -820,7 +820,7 @@ Once the main loop has quit, all resources are freed in lines 178 to
|
||||||
|
|
||||||
### `check_initialization_complete()`
|
### `check_initialization_complete()`
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void check_initialization_complete (CustomData *data) {
|
static void check_initialization_complete (CustomData *data) {
|
||||||
JNIEnv *env = get_jni_env ();
|
JNIEnv *env = get_jni_env ();
|
||||||
if (!data->initialized && data->main_loop) {
|
if (!data->initialized && data->main_loop) {
|
||||||
|
@ -864,7 +864,7 @@ see how it works, step by step:
|
||||||
|
|
||||||
### `get_jni_env()`
|
### `get_jni_env()`
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static JNIEnv *get_jni_env (void) {
|
static JNIEnv *get_jni_env (void) {
|
||||||
JNIEnv *env;
|
JNIEnv *env;
|
||||||
if ((env = pthread_getspecific (current_jni_env)) == NULL) {
|
if ((env = pthread_getspecific (current_jni_env)) == NULL) {
|
||||||
|
@ -901,7 +901,7 @@ Let's now review the rest of the native methods accessible from Java:
|
||||||
|
|
||||||
### `gst_native_finalize()` (`nativeFinalize()` from Java)
|
### `gst_native_finalize()` (`nativeFinalize()` from Java)
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void gst_native_finalize (JNIEnv* env, jobject thiz) {
|
static void gst_native_finalize (JNIEnv* env, jobject thiz) {
|
||||||
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
if (!data) return;
|
if (!data) return;
|
||||||
|
@ -950,7 +950,7 @@ error or state changed message and display a message in the UI using the
|
||||||
|
|
||||||
### `set_ui_message()`
|
### `set_ui_message()`
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void set_ui_message (const gchar *message, CustomData *data) {
|
static void set_ui_message (const gchar *message, CustomData *data) {
|
||||||
JNIEnv *env = get_jni_env ();
|
JNIEnv *env = get_jni_env ();
|
||||||
GST_DEBUG ("Setting message to: %s", message);
|
GST_DEBUG ("Setting message to: %s", message);
|
||||||
|
@ -995,7 +995,7 @@ method and free the UTF16 message with
|
||||||
|
|
||||||
**jni/Android.mk**
|
**jni/Android.mk**
|
||||||
|
|
||||||
``` lang=ruby
|
``` ruby
|
||||||
LOCAL_PATH := $(call my-dir)
|
LOCAL_PATH := $(call my-dir)
|
||||||
|
|
||||||
include $(CLEAR_VARS)
|
include $(CLEAR_VARS)
|
||||||
|
|
|
@ -24,7 +24,7 @@ makefile that allows GStreamer integration.
|
||||||
|
|
||||||
**src/org/freedesktop/gstreamer/tutorials/tutorial\_1/Tutorial1.java**
|
**src/org/freedesktop/gstreamer/tutorials/tutorial\_1/Tutorial1.java**
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
package org.freedesktop.gstreamer.tutorials.tutorial_1;
|
package org.freedesktop.gstreamer.tutorials.tutorial_1;
|
||||||
|
|
||||||
import android.app.Activity;
|
import android.app.Activity;
|
||||||
|
@ -68,7 +68,7 @@ public class Tutorial1 extends Activity {
|
||||||
Calls from Java to C happen through native methods, like the one
|
Calls from Java to C happen through native methods, like the one
|
||||||
declared here:
|
declared here:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
private native String nativeGetGStreamerInfo();
|
private native String nativeGetGStreamerInfo();
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -80,7 +80,7 @@ shown later.
|
||||||
The first bit of code that gets actually executed is the static
|
The first bit of code that gets actually executed is the static
|
||||||
initializer of the class:
|
initializer of the class:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
static {
|
static {
|
||||||
System.loadLibrary("gstreamer_android");
|
System.loadLibrary("gstreamer_android");
|
||||||
System.loadLibrary("tutorial-1");
|
System.loadLibrary("tutorial-1");
|
||||||
|
@ -97,7 +97,7 @@ expose. The GStreamer library only exposes a `init()` method, which
|
||||||
initializes GStreamer and registers all plugins (The tutorial library is
|
initializes GStreamer and registers all plugins (The tutorial library is
|
||||||
explained later below).
|
explained later below).
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
try {
|
try {
|
||||||
GStreamer.init(this);
|
GStreamer.init(this);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -120,7 +120,7 @@ Should initialization fail, the `init()` method would throw an
|
||||||
[Exception](http://developer.android.com/reference/java/lang/Exception.html)
|
[Exception](http://developer.android.com/reference/java/lang/Exception.html)
|
||||||
with the details provided by the GStreamer library.
|
with the details provided by the GStreamer library.
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
TextView tv = (TextView)findViewById(R.id.textview_info);
|
TextView tv = (TextView)findViewById(R.id.textview_info);
|
||||||
tv.setText("Welcome to " + nativeGetGStreamerInfo() + " !");
|
tv.setText("Welcome to " + nativeGetGStreamerInfo() + " !");
|
||||||
```
|
```
|
||||||
|
@ -137,7 +137,7 @@ code:
|
||||||
|
|
||||||
**jni/tutorial-1.c**
|
**jni/tutorial-1.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <jni.h>
|
#include <jni.h>
|
||||||
#include <android/log.h>
|
#include <android/log.h>
|
||||||
|
@ -177,7 +177,7 @@ Machine (VM) loads a library.
|
||||||
Here, we retrieve the JNI environment needed to make calls that interact
|
Here, we retrieve the JNI environment needed to make calls that interact
|
||||||
with Java:
|
with Java:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
JNIEnv *env = NULL;
|
JNIEnv *env = NULL;
|
||||||
|
|
||||||
if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
||||||
|
@ -190,7 +190,7 @@ And then locate the class containing the UI part of this tutorial using
|
||||||
`
|
`
|
||||||
FindClass()`:
|
FindClass()`:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
jclass klass = (*env)->FindClass (env, "org/freedesktop/gstreamer/tutorials/tutorial_1/Tutorial1");
|
jclass klass = (*env)->FindClass (env, "org/freedesktop/gstreamer/tutorials/tutorial_1/Tutorial1");
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -199,7 +199,7 @@ is, we provide the code for the methods we advertised in Java using the
|
||||||
**`native`**
|
**`native`**
|
||||||
keyword:
|
keyword:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
(*env)->RegisterNatives (env, klass, native_methods, G_N_ELEMENTS(native_methods));
|
(*env)->RegisterNatives (env, klass, native_methods, G_N_ELEMENTS(native_methods));
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -209,7 +209,7 @@ name, its [type
|
||||||
signature](http://docs.oracle.com/javase/1.5.0/docs/guide/jni/spec/types.html#wp276)
|
signature](http://docs.oracle.com/javase/1.5.0/docs/guide/jni/spec/types.html#wp276)
|
||||||
and a pointer to the C function implementing it:
|
and a pointer to the C function implementing it:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static JNINativeMethod native_methods[] = {
|
static JNINativeMethod native_methods[] = {
|
||||||
{ "nativeGetGStreamerInfo", "()Ljava/lang/String;", (void *) gst_native_get_gstreamer_info}
|
{ "nativeGetGStreamerInfo", "()Ljava/lang/String;", (void *) gst_native_get_gstreamer_info}
|
||||||
};
|
};
|
||||||
|
@ -218,7 +218,7 @@ static JNINativeMethod native_methods[] = {
|
||||||
The only native method used in this tutorial
|
The only native method used in this tutorial
|
||||||
is `nativeGetGStreamerInfo()`:
|
is `nativeGetGStreamerInfo()`:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
jstring gst_native_get_gstreamer_info (JNIEnv* env, jobject thiz) {
|
jstring gst_native_get_gstreamer_info (JNIEnv* env, jobject thiz) {
|
||||||
char *version_utf8 = gst_version_string();
|
char *version_utf8 = gst_version_string();
|
||||||
jstring *version_jstring = (*env)->NewStringUTF(env, version_utf8);
|
jstring *version_jstring = (*env)->NewStringUTF(env, version_utf8);
|
||||||
|
@ -239,7 +239,7 @@ must free the `char *` returned by `gst_version_string()`.
|
||||||
|
|
||||||
**jni/Android.mk**
|
**jni/Android.mk**
|
||||||
|
|
||||||
``` lang=ruby
|
``` ruby
|
||||||
LOCAL_PATH := $(call my-dir)
|
LOCAL_PATH := $(call my-dir)
|
||||||
|
|
||||||
include $(CLEAR_VARS)
|
include $(CLEAR_VARS)
|
||||||
|
|
|
@ -47,7 +47,7 @@ this view is collapsed by default. Click here to expand…
|
||||||
|
|
||||||
**src/com/gst\_sdk\_tutorials/tutorial\_4/Tutorial4.java**
|
**src/com/gst\_sdk\_tutorials/tutorial\_4/Tutorial4.java**
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
package com.gst_sdk_tutorials.tutorial_4;
|
package com.gst_sdk_tutorials.tutorial_4;
|
||||||
|
|
||||||
import java.text.SimpleDateFormat;
|
import java.text.SimpleDateFormat;
|
||||||
|
@ -311,7 +311,7 @@ offer the same functionalities. We keep track of this in the
|
||||||
`is_local_media` variable, and update it every time we change the media
|
`is_local_media` variable, and update it every time we change the media
|
||||||
URI:
|
URI:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
private void setMediaUri() {
|
private void setMediaUri() {
|
||||||
nativeSetUri (mediaUri);
|
nativeSetUri (mediaUri);
|
||||||
is_local_media = mediaUri.startsWith("file://");
|
is_local_media = mediaUri.startsWith("file://");
|
||||||
|
@ -327,7 +327,7 @@ Every time the size of the media changes (which could happen mid-stream,
|
||||||
for some kind of streams), or when it is first detected, C code calls
|
for some kind of streams), or when it is first detected, C code calls
|
||||||
our `onMediaSizeChanged()` callback:
|
our `onMediaSizeChanged()` callback:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
private void onMediaSizeChanged (int width, int height) {
|
private void onMediaSizeChanged (int width, int height) {
|
||||||
Log.i ("GStreamer", "Media size changed to " + width + "x" + height);
|
Log.i ("GStreamer", "Media size changed to " + width + "x" + height);
|
||||||
final GStreamerSurfaceView gsv = (GStreamerSurfaceView) this.findViewById(R.id.surface_video);
|
final GStreamerSurfaceView gsv = (GStreamerSurfaceView) this.findViewById(R.id.surface_video);
|
||||||
|
@ -369,7 +369,7 @@ To realize the first function, C code will periodically call our
|
||||||
in the Seek Bar. Again we do so from the UI thread, using
|
in the Seek Bar. Again we do so from the UI thread, using
|
||||||
`RunOnUiThread()`.
|
`RunOnUiThread()`.
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
private void setCurrentPosition(final int position, final int duration) {
|
private void setCurrentPosition(final int position, final int duration) {
|
||||||
final SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
final SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
||||||
|
|
||||||
|
@ -395,7 +395,7 @@ widget which we will use to display the current position and duration in
|
||||||
`HH:mm:ss / HH:mm:ss` textual format. The `updateTimeWidget()` method
|
`HH:mm:ss / HH:mm:ss` textual format. The `updateTimeWidget()` method
|
||||||
takes care of it, and must be called every time the Seek Bar is updated:
|
takes care of it, and must be called every time the Seek Bar is updated:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
private void updateTimeWidget () {
|
private void updateTimeWidget () {
|
||||||
final TextView tv = (TextView) this.findViewById(R.id.textview_time);
|
final TextView tv = (TextView) this.findViewById(R.id.textview_time);
|
||||||
final SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
final SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
||||||
|
@ -417,7 +417,7 @@ the user to seek by dragging the thumb), we implement the
|
||||||
interface in the
|
interface in the
|
||||||
Activity:
|
Activity:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
public class Tutorial4 extends Activity implements SurfaceHolder.Callback, OnSeekBarChangeListener {
|
public class Tutorial4 extends Activity implements SurfaceHolder.Callback, OnSeekBarChangeListener {
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -425,7 +425,7 @@ And we register the Activity as the listener for the [Seek
|
||||||
Bar](http://developer.android.com/reference/android/widget/SeekBar.html)’s
|
Bar](http://developer.android.com/reference/android/widget/SeekBar.html)’s
|
||||||
events in the `onCreate()` method:
|
events in the `onCreate()` method:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
||||||
sb.setOnSeekBarChangeListener(this);
|
sb.setOnSeekBarChangeListener(this);
|
||||||
```
|
```
|
||||||
|
@ -434,7 +434,7 @@ We will now be notified of three events: When the user starts dragging
|
||||||
the thumb, every time the thumb moves and when the thumb is released by
|
the thumb, every time the thumb moves and when the thumb is released by
|
||||||
the user:
|
the user:
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
public void onStartTrackingTouch(SeekBar sb) {
|
public void onStartTrackingTouch(SeekBar sb) {
|
||||||
nativePause();
|
nativePause();
|
||||||
}
|
}
|
||||||
|
@ -446,7 +446,7 @@ pause the pipeline. If the user is searching for a particular scene, we
|
||||||
do not want it to keep
|
do not want it to keep
|
||||||
moving.
|
moving.
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
public void onProgressChanged(SeekBar sb, int progress, boolean fromUser) {
|
public void onProgressChanged(SeekBar sb, int progress, boolean fromUser) {
|
||||||
if (fromUser == false) return;
|
if (fromUser == false) return;
|
||||||
desired_position = progress;
|
desired_position = progress;
|
||||||
|
@ -466,7 +466,7 @@ this is, we jump to the indicated position as soon as the thumb moves.
|
||||||
Otherwise, the seek will be performed when the thumb is released, and
|
Otherwise, the seek will be performed when the thumb is released, and
|
||||||
the only thing we do here is update the textual time widget.
|
the only thing we do here is update the textual time widget.
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
public void onStopTrackingTouch(SeekBar sb) {
|
public void onStopTrackingTouch(SeekBar sb) {
|
||||||
// If this is a remote file, scrub seeking is probably not going to work smoothly enough.
|
// If this is a remote file, scrub seeking is probably not going to work smoothly enough.
|
||||||
// Therefore, perform only the seek when the slider is released.
|
// Therefore, perform only the seek when the slider is released.
|
||||||
|
@ -490,7 +490,7 @@ this view is collapsed by default. Click here to expand…
|
||||||
|
|
||||||
**jni/tutorial-4.c**
|
**jni/tutorial-4.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <jni.h>
|
#include <jni.h>
|
||||||
#include <android/log.h>
|
#include <android/log.h>
|
||||||
|
@ -1066,7 +1066,7 @@ jint JNI_OnLoad(JavaVM *vm, void *reserved) {
|
||||||
Java code will call `gst_native_set_uri()` whenever it wants to change
|
Java code will call `gst_native_set_uri()` whenever it wants to change
|
||||||
the playing URI (in this tutorial the URI never changes, but it could):
|
the playing URI (in this tutorial the URI never changes, but it could):
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
void gst_native_set_uri (JNIEnv* env, jobject thiz, jstring uri) {
|
void gst_native_set_uri (JNIEnv* env, jobject thiz, jstring uri) {
|
||||||
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
if (!data || !data->pipeline) return;
|
if (!data || !data->pipeline) return;
|
||||||
|
@ -1114,7 +1114,7 @@ change during playback. For simplicity, this tutorial assumes that they
|
||||||
do not. Therefore, in the READY to PAUSED state change, once the Caps of
|
do not. Therefore, in the READY to PAUSED state change, once the Caps of
|
||||||
the decoded media are known, we inspect them in `check_media_size()`:
|
the decoded media are known, we inspect them in `check_media_size()`:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void check_media_size (CustomData *data) {
|
static void check_media_size (CustomData *data) {
|
||||||
JNIEnv *env = get_jni_env ();
|
JNIEnv *env = get_jni_env ();
|
||||||
GstElement *video_sink;
|
GstElement *video_sink;
|
||||||
|
@ -1165,7 +1165,7 @@ To keep the UI updated, a GLib timer is installed in the
|
||||||
`app_function()` that fires 4 times per second (or every 250ms), right
|
`app_function()` that fires 4 times per second (or every 250ms), right
|
||||||
before entering the main loop:
|
before entering the main loop:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
timeout_source = g_timeout_source_new (250);
|
timeout_source = g_timeout_source_new (250);
|
||||||
g_source_set_callback (timeout_source, (GSourceFunc)refresh_ui, data, NULL);
|
g_source_set_callback (timeout_source, (GSourceFunc)refresh_ui, data, NULL);
|
||||||
g_source_attach (timeout_source, data->context);
|
g_source_attach (timeout_source, data->context);
|
||||||
|
@ -1174,7 +1174,7 @@ g_source_unref (timeout_source);
|
||||||
|
|
||||||
Then, in the refresh\_ui method:
|
Then, in the refresh\_ui method:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static gboolean refresh_ui (CustomData *data) {
|
static gboolean refresh_ui (CustomData *data) {
|
||||||
GstFormat fmt = GST_FORMAT_TIME;
|
GstFormat fmt = GST_FORMAT_TIME;
|
||||||
gint64 current = -1;
|
gint64 current = -1;
|
||||||
|
@ -1228,7 +1228,7 @@ see how to overcome these problems.
|
||||||
In
|
In
|
||||||
`gst_native_set_position()`:
|
`gst_native_set_position()`:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
void gst_native_set_position (JNIEnv* env, jobject thiz, int milliseconds) {
|
void gst_native_set_position (JNIEnv* env, jobject thiz, int milliseconds) {
|
||||||
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
if (!data) return;
|
if (!data) return;
|
||||||
|
@ -1247,7 +1247,7 @@ away; otherwise, store the desired position in the
|
||||||
`desired_position` variable. Then, in the
|
`desired_position` variable. Then, in the
|
||||||
`state_changed_cb()` callback:
|
`state_changed_cb()` callback:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED) {
|
if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED) {
|
||||||
/* By now the sink already knows the media size */
|
/* By now the sink already knows the media size */
|
||||||
check_media_size(data);
|
check_media_size(data);
|
||||||
|
@ -1284,7 +1284,7 @@ once this period elapses.
|
||||||
To achieve this, all seek requests are routed through the
|
To achieve this, all seek requests are routed through the
|
||||||
`execute_seek()` method:
|
`execute_seek()` method:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void execute_seek (gint64 desired_position, CustomData *data) {
|
static void execute_seek (gint64 desired_position, CustomData *data) {
|
||||||
gint64 diff;
|
gint64 diff;
|
||||||
|
|
||||||
|
@ -1353,7 +1353,7 @@ using buffering. The same procedure is used here, by listening to the
|
||||||
buffering
|
buffering
|
||||||
messages:
|
messages:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
g_signal_connect (G_OBJECT (bus), "message::buffering", (GCallback)buffering_cb, data);
|
g_signal_connect (G_OBJECT (bus), "message::buffering", (GCallback)buffering_cb, data);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -1361,7 +1361,7 @@ And pausing the pipeline until buffering is complete (unless this is a
|
||||||
live
|
live
|
||||||
source):
|
source):
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void buffering_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
static void buffering_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
gint percent;
|
gint percent;
|
||||||
|
|
||||||
|
|
|
@ -36,7 +36,7 @@ until a main loop is running and a drawing surface has been received.
|
||||||
|
|
||||||
**src/com/gst\_sdk\_tutorials/tutorial\_3/Tutorial3.java**
|
**src/com/gst\_sdk\_tutorials/tutorial\_3/Tutorial3.java**
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
package com.gst_sdk_tutorials.tutorial_3;
|
package com.gst_sdk_tutorials.tutorial_3;
|
||||||
|
|
||||||
import android.app.Activity;
|
import android.app.Activity;
|
||||||
|
@ -187,7 +187,7 @@ surface to the layout and changing the GStreamer pipeline to produce
|
||||||
video instead of audio. Only the parts of the code that are new will be
|
video instead of audio. Only the parts of the code that are new will be
|
||||||
discussed.
|
discussed.
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
private native void nativeSurfaceInit(Object surface);
|
private native void nativeSurfaceInit(Object surface);
|
||||||
private native void nativeSurfaceFinalize();
|
private native void nativeSurfaceFinalize();
|
||||||
```
|
```
|
||||||
|
@ -197,7 +197,7 @@ Two new entry points to the C code are defined,
|
||||||
when the video surface becomes available and when it is about to be
|
when the video surface becomes available and when it is about to be
|
||||||
destroyed, respectively.
|
destroyed, respectively.
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
SurfaceView sv = (SurfaceView) this.findViewById(R.id.surface_video);
|
SurfaceView sv = (SurfaceView) this.findViewById(R.id.surface_video);
|
||||||
SurfaceHolder sh = sv.getHolder();
|
SurfaceHolder sh = sv.getHolder();
|
||||||
sh.addCallback(this);
|
sh.addCallback(this);
|
||||||
|
@ -212,7 +212,7 @@ interface. This is why we declared this Activity as implementing the
|
||||||
[SurfaceHolder.Callback](http://developer.android.com/reference/android/view/SurfaceHolder.Callback.html)
|
[SurfaceHolder.Callback](http://developer.android.com/reference/android/view/SurfaceHolder.Callback.html)
|
||||||
interface in line 16.
|
interface in line 16.
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
public void surfaceChanged(SurfaceHolder holder, int format, int width,
|
public void surfaceChanged(SurfaceHolder holder, int format, int width,
|
||||||
int height) {
|
int height) {
|
||||||
Log.d("GStreamer", "Surface changed to format " + format + " width "
|
Log.d("GStreamer", "Surface changed to format " + format + " width "
|
||||||
|
@ -243,7 +243,7 @@ Let’s review the C code to see what these functions do.
|
||||||
|
|
||||||
**jni/tutorial-3.c**
|
**jni/tutorial-3.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <jni.h>
|
#include <jni.h>
|
||||||
#include <android/log.h>
|
#include <android/log.h>
|
||||||
|
@ -587,7 +587,7 @@ First, our `CustomData` structure is augmented to keep a pointer to the
|
||||||
video sink element and the native window
|
video sink element and the native window
|
||||||
handle:
|
handle:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
GstElement *video_sink; /* The video sink element which receives XOverlay commands */
|
GstElement *video_sink; /* The video sink element which receives XOverlay commands */
|
||||||
ANativeWindow *native_window; /* The Android native window where video will be rendered */
|
ANativeWindow *native_window; /* The Android native window where video will be rendered */
|
||||||
```
|
```
|
||||||
|
@ -596,7 +596,7 @@ The `check_initialization_complete()` method is also augmented so that
|
||||||
it requires a native window before considering GStreamer to be
|
it requires a native window before considering GStreamer to be
|
||||||
initialized:
|
initialized:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void check_initialization_complete (CustomData *data) {
|
static void check_initialization_complete (CustomData *data) {
|
||||||
JNIEnv *env = get_jni_env ();
|
JNIEnv *env = get_jni_env ();
|
||||||
if (!data->initialized && data->native_window && data->main_loop) {
|
if (!data->initialized && data->native_window && data->main_loop) {
|
||||||
|
@ -625,14 +625,14 @@ effects in the `GSTREAMER_PLUGINS_EFFECTS` package), and an
|
||||||
`autovideosink` which will instantiate the adequate video sink for the
|
`autovideosink` which will instantiate the adequate video sink for the
|
||||||
platform:
|
platform:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
data->pipeline = gst_parse_launch("videotestsrc ! warptv ! ffmpegcolorspace ! autovideosink ", &error);
|
data->pipeline = gst_parse_launch("videotestsrc ! warptv ! ffmpegcolorspace ! autovideosink ", &error);
|
||||||
```
|
```
|
||||||
|
|
||||||
Here things start to get more
|
Here things start to get more
|
||||||
interesting:
|
interesting:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Set the pipeline to READY, so it can already accept a window handle, if we have one */
|
/* Set the pipeline to READY, so it can already accept a window handle, if we have one */
|
||||||
gst_element_set_state(data->pipeline, GST_STATE_READY);
|
gst_element_set_state(data->pipeline, GST_STATE_READY);
|
||||||
|
|
||||||
|
@ -660,7 +660,7 @@ Now we will implement the two native functions called by the Java code
|
||||||
when the drawing surface becomes available or is about to be
|
when the drawing surface becomes available or is about to be
|
||||||
destroyed:
|
destroyed:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void gst_native_surface_init (JNIEnv *env, jobject thiz, jobject surface) {
|
static void gst_native_surface_init (JNIEnv *env, jobject thiz, jobject surface) {
|
||||||
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
if (!data) return;
|
if (!data) return;
|
||||||
|
@ -717,7 +717,7 @@ We finally store the new window handle and call
|
||||||
`check_initialization_complete()` to inform the Java code that
|
`check_initialization_complete()` to inform the Java code that
|
||||||
everything is set up, if that is the case.
|
everything is set up, if that is the case.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void gst_native_surface_finalize (JNIEnv *env, jobject thiz) {
|
static void gst_native_surface_finalize (JNIEnv *env, jobject thiz) {
|
||||||
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
if (!data) return;
|
if (!data) return;
|
||||||
|
@ -770,7 +770,7 @@ surface.
|
||||||
|
|
||||||
**src/com/gst\_sdk\_tutorials/tutorial\_3/GStreamerSurfaceView.java**
|
**src/com/gst\_sdk\_tutorials/tutorial\_3/GStreamerSurfaceView.java**
|
||||||
|
|
||||||
``` lang=java
|
``` java
|
||||||
package com.gst_sdk_tutorials.tutorial_3;
|
package com.gst_sdk_tutorials.tutorial_3;
|
||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
|
@ -862,7 +862,7 @@ public class GStreamerSurfaceView extends SurfaceView {
|
||||||
|
|
||||||
**/jni/Android.mk**
|
**/jni/Android.mk**
|
||||||
|
|
||||||
``` lang=ruby
|
``` ruby
|
||||||
LOCAL_PATH := $(call my-dir)
|
LOCAL_PATH := $(call my-dir)
|
||||||
|
|
||||||
include $(CLEAR_VARS)
|
include $(CLEAR_VARS)
|
||||||
|
|
|
@ -50,7 +50,7 @@ target_link_libraries(player ${QTGSTREAMER_UI_LIBRARIES} ${QT_QTOPENGL_LIBRARIES
|
||||||
|
|
||||||
**main.cpp**
|
**main.cpp**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include "mediaapp.h"
|
#include "mediaapp.h"
|
||||||
#include <QtWidgets/QApplication>
|
#include <QtWidgets/QApplication>
|
||||||
#include <QGst/Init>
|
#include <QGst/Init>
|
||||||
|
@ -71,7 +71,7 @@ int main(int argc, char *argv[])
|
||||||
|
|
||||||
**mediaapp.h**
|
**mediaapp.h**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#ifndef MEDIAAPP_H
|
#ifndef MEDIAAPP_H
|
||||||
#define MEDIAAPP_H
|
#define MEDIAAPP_H
|
||||||
#include <QtCore/QTimer>
|
#include <QtCore/QTimer>
|
||||||
|
@ -124,7 +124,7 @@ private:
|
||||||
|
|
||||||
**mediaapp.cpp**
|
**mediaapp.cpp**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include "mediaapp.h"
|
#include "mediaapp.h"
|
||||||
#include "player.h"
|
#include "player.h"
|
||||||
#if (QT_VERSION >= QT_VERSION_CHECK(5, 0, 0))
|
#if (QT_VERSION >= QT_VERSION_CHECK(5, 0, 0))
|
||||||
|
@ -324,7 +324,7 @@ void MediaApp::createUI(QBoxLayout *appLayout)
|
||||||
|
|
||||||
**player.h**
|
**player.h**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#ifndef PLAYER_H
|
#ifndef PLAYER_H
|
||||||
#define PLAYER_H
|
#define PLAYER_H
|
||||||
#include <QtCore/QTimer>
|
#include <QtCore/QTimer>
|
||||||
|
@ -372,7 +372,7 @@ private:
|
||||||
|
|
||||||
**player.cpp**
|
**player.cpp**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include "player.h"
|
#include "player.h"
|
||||||
#include <QtCore/QDir>
|
#include <QtCore/QDir>
|
||||||
#include <QtCore/QUrl>
|
#include <QtCore/QUrl>
|
||||||
|
@ -553,7 +553,7 @@ We begin by looking at `main()`:
|
||||||
|
|
||||||
**main.cpp**
|
**main.cpp**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
int main(int argc, char *argv[])
|
int main(int argc, char *argv[])
|
||||||
{
|
{
|
||||||
QApplication app(argc, argv);
|
QApplication app(argc, argv);
|
||||||
|
@ -582,7 +582,7 @@ the UI:
|
||||||
|
|
||||||
**MediaApp::MediaApp()**
|
**MediaApp::MediaApp()**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
//create the player
|
//create the player
|
||||||
m_player = new Player(this);
|
m_player = new Player(this);
|
||||||
connect(m_player, SIGNAL(positionChanged()), this, SLOT(onPositionChanged()));
|
connect(m_player, SIGNAL(positionChanged()), this, SLOT(onPositionChanged()));
|
||||||
|
@ -594,7 +594,7 @@ line, if any:
|
||||||
|
|
||||||
**MediaApp::openFile()**
|
**MediaApp::openFile()**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
void MediaApp::openFile(const QString & fileName)
|
void MediaApp::openFile(const QString & fileName)
|
||||||
{
|
{
|
||||||
m_baseDir = QFileInfo(fileName).path();
|
m_baseDir = QFileInfo(fileName).path();
|
||||||
|
@ -608,7 +608,7 @@ This in turn instructs the `Player` to construct our GStreamer pipeline:
|
||||||
|
|
||||||
**Player::setUri()**
|
**Player::setUri()**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
void Player::setUri(const QString & uri)
|
void Player::setUri(const QString & uri)
|
||||||
{
|
{
|
||||||
QString realUri = uri;
|
QString realUri = uri;
|
||||||
|
@ -648,7 +648,7 @@ rendering. For clarity, here is a portion of the implementation:
|
||||||
|
|
||||||
**prepare-xwindow-id handling**
|
**prepare-xwindow-id handling**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
QGlib::connect(pipeline->bus(), "sync-message",
|
QGlib::connect(pipeline->bus(), "sync-message",
|
||||||
this, &PipelineWatch::onBusSyncMessage);
|
this, &PipelineWatch::onBusSyncMessage);
|
||||||
...
|
...
|
||||||
|
@ -664,7 +664,7 @@ void PipelineWatch::onBusSyncMessage(const MessagePtr & msg)
|
||||||
Once the pipeline is created, we connect to the bus' message signal (via
|
Once the pipeline is created, we connect to the bus' message signal (via
|
||||||
`QGlib::connect()`) to dispatch state change signals:
|
`QGlib::connect()`) to dispatch state change signals:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
void Player::onBusMessage(const QGst::MessagePtr & message)
|
void Player::onBusMessage(const QGst::MessagePtr & message)
|
||||||
{
|
{
|
||||||
switch (message->type()) {
|
switch (message->type()) {
|
||||||
|
@ -706,7 +706,7 @@ void Player::handlePipelineStateChange(const QGst::StateChangedMessagePtr & scm)
|
||||||
|
|
||||||
Finally, we tell `playbin` what to play by setting the `uri` property:
|
Finally, we tell `playbin` what to play by setting the `uri` property:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
m_pipeline->setProperty("uri", realUri);
|
m_pipeline->setProperty("uri", realUri);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -717,7 +717,7 @@ After `Player::setUri()` is called, `MediaApp::openFile()` calls
|
||||||
|
|
||||||
**Player::play()**
|
**Player::play()**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
void Player::play()
|
void Player::play()
|
||||||
{
|
{
|
||||||
if (m_pipeline) {
|
if (m_pipeline) {
|
||||||
|
@ -730,7 +730,7 @@ The other state control methods are equally simple:
|
||||||
|
|
||||||
**Player state functions**
|
**Player state functions**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
void Player::pause()
|
void Player::pause()
|
||||||
{
|
{
|
||||||
if (m_pipeline) {
|
if (m_pipeline) {
|
||||||
|
@ -754,7 +754,7 @@ is emitted on the GStreamer bus which gets picked up by the `Player`:
|
||||||
|
|
||||||
**Player::onBusMessage()**
|
**Player::onBusMessage()**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
void Player::onBusMessage(const QGst::MessagePtr & message)
|
void Player::onBusMessage(const QGst::MessagePtr & message)
|
||||||
{
|
{
|
||||||
switch (message->type()) {
|
switch (message->type()) {
|
||||||
|
@ -781,7 +781,7 @@ handled:
|
||||||
|
|
||||||
**MediaApp::onStateChanged()**
|
**MediaApp::onStateChanged()**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
void MediaApp::onStateChanged()
|
void MediaApp::onStateChanged()
|
||||||
{
|
{
|
||||||
QGst::State newState = m_player->state();
|
QGst::State newState = m_player->state();
|
||||||
|
@ -810,7 +810,7 @@ UI to handle:
|
||||||
|
|
||||||
**MediaApp::onPositionChanged()**
|
**MediaApp::onPositionChanged()**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
void MediaApp::onPositionChanged()
|
void MediaApp::onPositionChanged()
|
||||||
{
|
{
|
||||||
QTime length(0,0);
|
QTime length(0,0);
|
||||||
|
@ -842,7 +842,7 @@ to `gst_element_query_position()`:
|
||||||
|
|
||||||
**Player::position()**
|
**Player::position()**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
QTime Player::position() const
|
QTime Player::position() const
|
||||||
{
|
{
|
||||||
if (m_pipeline) {
|
if (m_pipeline) {
|
||||||
|
|
|
@ -34,7 +34,7 @@ Copy this code into a text file named `basic-tutorial-15.c`..
|
||||||
|
|
||||||
**basic-tutorial-15.c**
|
**basic-tutorial-15.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <clutter-gst/clutter-gst.h>
|
#include <clutter-gst/clutter-gst.h>
|
||||||
|
|
||||||
/* Setup the video texture once its size is known */
|
/* Setup the video texture once its size is known */
|
||||||
|
@ -163,7 +163,7 @@ how to integrate GStreamer with it. This is accomplished through the
|
||||||
clutter-gst library, so its header must be included (and the program
|
clutter-gst library, so its header must be included (and the program
|
||||||
must link against it):
|
must link against it):
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <clutter-gst/clutter-gst.h>
|
#include <clutter-gst/clutter-gst.h>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -171,7 +171,7 @@ The first thing this library does is initialize both GStreamer and
|
||||||
Clutter, so you must call ` clutter-gst-init()` instead of initializing
|
Clutter, so you must call ` clutter-gst-init()` instead of initializing
|
||||||
these libraries yourself.
|
these libraries yourself.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* clutter-gst takes care of initializing Clutter and GStreamer */
|
/* clutter-gst takes care of initializing Clutter and GStreamer */
|
||||||
if (clutter_gst_init (&argc, &argv) != CLUTTER_INIT_SUCCESS) {
|
if (clutter_gst_init (&argc, &argv) != CLUTTER_INIT_SUCCESS) {
|
||||||
g_error ("Failed to initialize clutter\n");
|
g_error ("Failed to initialize clutter\n");
|
||||||
|
@ -184,7 +184,7 @@ create a texture. Just remember to disable texture slicing to allow for
|
||||||
proper
|
proper
|
||||||
integration:
|
integration:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Create new texture and disable slicing so the video is properly mapped onto it */
|
/* Create new texture and disable slicing so the video is properly mapped onto it */
|
||||||
texture = CLUTTER_ACTOR (g_object_new (CLUTTER_TYPE_TEXTURE, "disable-slicing", TRUE, NULL));
|
texture = CLUTTER_ACTOR (g_object_new (CLUTTER_TYPE_TEXTURE, "disable-slicing", TRUE, NULL));
|
||||||
g_signal_connect (texture, "size-change", G_CALLBACK (size_change), NULL);
|
g_signal_connect (texture, "size-change", G_CALLBACK (size_change), NULL);
|
||||||
|
@ -193,7 +193,7 @@ g_signal_connect (texture, "size-change", G_CALLBACK (size_change), NULL);
|
||||||
We connect to the size-change signal so we can perform final setup once
|
We connect to the size-change signal so we can perform final setup once
|
||||||
the video size is known.
|
the video size is known.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Instantiate the Clutter sink */
|
/* Instantiate the Clutter sink */
|
||||||
sink = gst_element_factory_make ("autocluttersink", NULL);
|
sink = gst_element_factory_make ("autocluttersink", NULL);
|
||||||
if (sink == NULL) {
|
if (sink == NULL) {
|
||||||
|
@ -214,14 +214,14 @@ release of the SDK, so, if it cannot be found, the
|
||||||
simpler `cluttersink` element is created
|
simpler `cluttersink` element is created
|
||||||
instead.
|
instead.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Link GStreamer with Clutter by passing the Clutter texture to the Clutter sink*/
|
/* Link GStreamer with Clutter by passing the Clutter texture to the Clutter sink*/
|
||||||
g_object_set (sink, "texture", texture, NULL);
|
g_object_set (sink, "texture", texture, NULL);
|
||||||
```
|
```
|
||||||
|
|
||||||
This texture is everything GStreamer needs to know about Clutter.
|
This texture is everything GStreamer needs to know about Clutter.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Add the Clutter sink to the pipeline */
|
/* Add the Clutter sink to the pipeline */
|
||||||
g_object_set (pipeline, "video-sink", sink, NULL);
|
g_object_set (pipeline, "video-sink", sink, NULL);
|
||||||
```
|
```
|
||||||
|
|
|
@ -22,7 +22,7 @@ in the SDK installation).
|
||||||
|
|
||||||
**basic-tutorial-2.c**
|
**basic-tutorial-2.c**
|
||||||
|
|
||||||
```
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
|
|
||||||
int main(int argc, char *argv[]) {
|
int main(int argc, char *argv[]) {
|
||||||
|
@ -130,7 +130,7 @@ through filter elements.
|
||||||
We will skip GStreamer initialization, since it is the same as the
|
We will skip GStreamer initialization, since it is the same as the
|
||||||
previous tutorial:
|
previous tutorial:
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Create the elements */
|
/* Create the elements */
|
||||||
source = gst_element_factory_make ("videotestsrc", "source");
|
source = gst_element_factory_make ("videotestsrc", "source");
|
||||||
sink = gst_element_factory_make ("autovideosink", "sink");
|
sink = gst_element_factory_make ("autovideosink", "sink");
|
||||||
|
@ -164,7 +164,7 @@ platform-independent.
|
||||||
|
|
||||||
### Pipeline creation
|
### Pipeline creation
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Create the empty pipeline */
|
/* Create the empty pipeline */
|
||||||
pipeline = gst_pipeline_new ("test-pipeline");
|
pipeline = gst_pipeline_new ("test-pipeline");
|
||||||
```
|
```
|
||||||
|
@ -173,7 +173,7 @@ All elements in GStreamer must typically be contained inside a pipeline
|
||||||
before they can be used, because it takes care of some clocking and
|
before they can be used, because it takes care of some clocking and
|
||||||
messaging functions. We create the pipeline with `gst_pipeline_new()`.
|
messaging functions. We create the pipeline with `gst_pipeline_new()`.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Build the pipeline */
|
/* Build the pipeline */
|
||||||
gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
|
gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
|
||||||
if (gst_element_link (source, sink) != TRUE) {
|
if (gst_element_link (source, sink) != TRUE) {
|
||||||
|
@ -200,7 +200,7 @@ trying to link them!
|
||||||
|
|
||||||
### Properties
|
### Properties
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Modify the source's properties */
|
/* Modify the source's properties */
|
||||||
g_object_set (source, "pattern", 0, NULL);
|
g_object_set (source, "pattern", 0, NULL);
|
||||||
```
|
```
|
||||||
|
@ -233,7 +233,7 @@ At this point, we have the whole pipeline built and setup, and the rest
|
||||||
of the tutorial is very similar to the previous one, but we are going to
|
of the tutorial is very similar to the previous one, but we are going to
|
||||||
add more error checking:
|
add more error checking:
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Start playing */
|
/* Start playing */
|
||||||
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
if (ret == GST_STATE_CHANGE_FAILURE) {
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
@ -248,7 +248,7 @@ value for errors. Changing states is a delicate process and a few more
|
||||||
details are given in [Basic tutorial 3: Dynamic
|
details are given in [Basic tutorial 3: Dynamic
|
||||||
pipelines](sdk-basic-tutorial-dynamic-pipelines.md).
|
pipelines](sdk-basic-tutorial-dynamic-pipelines.md).
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Wait until error or EOS */
|
/* Wait until error or EOS */
|
||||||
bus = gst_element_get_bus (pipeline);
|
bus = gst_element_get_bus (pipeline);
|
||||||
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
|
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
|
||||||
|
|
|
@ -157,7 +157,7 @@ as the Debug category in the output log).
|
||||||
To change the category to something more meaningful, add these two lines
|
To change the category to something more meaningful, add these two lines
|
||||||
at the top of your code:
|
at the top of your code:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
GST_DEBUG_CATEGORY_STATIC (my_category);
|
GST_DEBUG_CATEGORY_STATIC (my_category);
|
||||||
#define GST_CAT_DEFAULT my_category
|
#define GST_CAT_DEFAULT my_category
|
||||||
```
|
```
|
||||||
|
@ -165,7 +165,7 @@ GST_DEBUG_CATEGORY_STATIC (my_category);
|
||||||
And then this one after you have initialized GStreamer with
|
And then this one after you have initialized GStreamer with
|
||||||
`gst_init()`:
|
`gst_init()`:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
GST_DEBUG_CATEGORY_INIT (my_category, "my category", 0, "This is my very own");
|
GST_DEBUG_CATEGORY_INIT (my_category, "my category", 0, "This is my very own");
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -85,7 +85,7 @@ in the SDK installation).
|
||||||
|
|
||||||
**basic-tutorial-3.c**
|
**basic-tutorial-3.c**
|
||||||
|
|
||||||
```
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
|
|
||||||
/* Structure to contain all our information, so we can pass it to callbacks */
|
/* Structure to contain all our information, so we can pass it to callbacks */
|
||||||
|
@ -251,7 +251,7 @@ exit:
|
||||||
|
|
||||||
## Walkthrough
|
## Walkthrough
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Structure to contain all our information, so we can pass it to callbacks */
|
/* Structure to contain all our information, so we can pass it to callbacks */
|
||||||
typedef struct _CustomData {
|
typedef struct _CustomData {
|
||||||
GstElement *pipeline;
|
GstElement *pipeline;
|
||||||
|
@ -266,14 +266,14 @@ to `GstElement`s, basically) as local variables. Since this tutorial
|
||||||
(and most real applications) involves callbacks, we will group all our
|
(and most real applications) involves callbacks, we will group all our
|
||||||
data in a structure for easier handling.
|
data in a structure for easier handling.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Handler for the pad-added signal */
|
/* Handler for the pad-added signal */
|
||||||
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
|
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
|
||||||
```
|
```
|
||||||
|
|
||||||
This is a forward reference, to be used later.
|
This is a forward reference, to be used later.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Create the elements */
|
/* Create the elements */
|
||||||
data.source = gst_element_factory_make ("uridecodebin", "source");
|
data.source = gst_element_factory_make ("uridecodebin", "source");
|
||||||
data.convert = gst_element_factory_make ("audioconvert", "convert");
|
data.convert = gst_element_factory_make ("audioconvert", "convert");
|
||||||
|
@ -295,7 +295,7 @@ The `autoaudiosink` is the equivalent of `autovideosink` seen in the
|
||||||
previous tutorial, for audio. It will render the audio stream to the
|
previous tutorial, for audio. It will render the audio stream to the
|
||||||
audio card.
|
audio card.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
if (!gst_element_link (data.convert, data.sink)) {
|
if (!gst_element_link (data.convert, data.sink)) {
|
||||||
g_printerr ("Elements could not be linked.\n");
|
g_printerr ("Elements could not be linked.\n");
|
||||||
gst_object_unref (data.pipeline);
|
gst_object_unref (data.pipeline);
|
||||||
|
@ -307,7 +307,7 @@ Here we link the converter element to the sink, but we **DO NOT** link
|
||||||
them with the source, since at this point it contains no source pads. We
|
them with the source, since at this point it contains no source pads. We
|
||||||
just leave this branch (converter + sink) unlinked, until later on.
|
just leave this branch (converter + sink) unlinked, until later on.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Set the URI to play */
|
/* Set the URI to play */
|
||||||
g_object_set (data.source, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
|
g_object_set (data.source, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
|
||||||
```
|
```
|
||||||
|
@ -317,7 +317,7 @@ the previous tutorial.
|
||||||
|
|
||||||
### Signals
|
### Signals
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Connect to the pad-added signal */
|
/* Connect to the pad-added signal */
|
||||||
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
|
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
|
||||||
```
|
```
|
||||||
|
@ -351,7 +351,7 @@ producing data, it will create source pads, and trigger the “pad-added”
|
||||||
signal. At this point our callback will be
|
signal. At this point our callback will be
|
||||||
called:
|
called:
|
||||||
|
|
||||||
```
|
``` c
|
||||||
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
|
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -366,7 +366,7 @@ This is usually the pad to which we want to link.
|
||||||
`data` is the pointer we provided when attaching to the signal. In this
|
`data` is the pointer we provided when attaching to the signal. In this
|
||||||
example, we use it to pass the `CustomData` pointer.
|
example, we use it to pass the `CustomData` pointer.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
|
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -376,7 +376,7 @@ which we want to link `new_pad`. In the previous tutorial we linked
|
||||||
element against element, and let GStreamer choose the appropriate pads.
|
element against element, and let GStreamer choose the appropriate pads.
|
||||||
Now we are going to link the pads directly.
|
Now we are going to link the pads directly.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* If our converter is already linked, we have nothing to do here */
|
/* If our converter is already linked, we have nothing to do here */
|
||||||
if (gst_pad_is_linked (sink_pad)) {
|
if (gst_pad_is_linked (sink_pad)) {
|
||||||
g_print (" We are already linked. Ignoring.\n");
|
g_print (" We are already linked. Ignoring.\n");
|
||||||
|
@ -388,7 +388,7 @@ if (gst_pad_is_linked (sink_pad)) {
|
||||||
this callback will be called. These lines of code will prevent us from
|
this callback will be called. These lines of code will prevent us from
|
||||||
trying to link to a new pad once we are already linked.
|
trying to link to a new pad once we are already linked.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Check the new pad's type */
|
/* Check the new pad's type */
|
||||||
new_pad_caps = gst_pad_query_caps (new_pad, NULL);
|
new_pad_caps = gst_pad_query_caps (new_pad, NULL);
|
||||||
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
|
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
|
||||||
|
@ -423,7 +423,7 @@ audio pad, and we are not interested in it.
|
||||||
|
|
||||||
Otherwise, attempt the link:
|
Otherwise, attempt the link:
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Attempt the link */
|
/* Attempt the link */
|
||||||
ret = gst_pad_link (new_pad, sink_pad);
|
ret = gst_pad_link (new_pad, sink_pad);
|
||||||
if (GST_PAD_LINK_FAILED (ret)) {
|
if (GST_PAD_LINK_FAILED (ret)) {
|
||||||
|
@ -462,7 +462,7 @@ to PLAYING, you have to go through the intermediate READY and PAUSED
|
||||||
states. If you set the pipeline to PLAYING, though, GStreamer will make
|
states. If you set the pipeline to PLAYING, though, GStreamer will make
|
||||||
the intermediate transitions for you.
|
the intermediate transitions for you.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
case GST_MESSAGE_STATE_CHANGED:
|
case GST_MESSAGE_STATE_CHANGED:
|
||||||
/* We are only interested in state-changed messages from the pipeline */
|
/* We are only interested in state-changed messages from the pipeline */
|
||||||
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
|
||||||
|
|
|
@ -35,11 +35,11 @@ a `decodebin2` element. It acts like a demuxer, so it offers as many
|
||||||
source pads as streams are found in the
|
source pads as streams are found in the
|
||||||
media.
|
media.
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
gst-launch-1.0 uridecodebin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! ffmpegcolorspace ! autovideosink
|
gst-launch-1.0 uridecodebin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! ffmpegcolorspace ! autovideosink
|
||||||
```
|
```
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
gst-launch-1.0 uridecodebin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! audioconvert ! autoaudiosink
|
gst-launch-1.0 uridecodebin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! audioconvert ! autoaudiosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ replaces the old `decodebin` element. It acts like a demuxer, so it
|
||||||
offers as many source pads as streams are found in the
|
offers as many source pads as streams are found in the
|
||||||
media.
|
media.
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
gst-launch-1.0 souphttpsrc location=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! decodebin2 ! autovideosink
|
gst-launch-1.0 souphttpsrc location=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! decodebin2 ! autovideosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ using a `typefind` element or by setting the `typefind` property
|
||||||
of `filesrc` to
|
of `filesrc` to
|
||||||
`TRUE`.
|
`TRUE`.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
gst-launch-1.0 filesrc location=f:\\media\\sintel\\sintel_trailer-480p.webm ! decodebin2 ! autovideosink
|
gst-launch-1.0 filesrc location=f:\\media\\sintel\\sintel_trailer-480p.webm ! decodebin2 ! autovideosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -89,7 +89,7 @@ This element receives data as a client over the network via HTTP using
|
||||||
the SOUP library. Set the URL to retrieve through the `location`
|
the SOUP library. Set the URL to retrieve through the `location`
|
||||||
property.
|
property.
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
gst-launch-1.0 souphttpsrc location=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! decodebin2 ! autovideosink
|
gst-launch-1.0 souphttpsrc location=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! decodebin2 ! autovideosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ are “guaranteed” to work.
|
||||||
This element produces a video pattern (selectable among many different
|
This element produces a video pattern (selectable among many different
|
||||||
options with the `pattern` property). Use it to test video pipelines.
|
options with the `pattern` property). Use it to test video pipelines.
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
gst-launch-1.0 videotestsrc ! ffmpegcolorspace ! autovideosink
|
gst-launch-1.0 videotestsrc ! ffmpegcolorspace ! autovideosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -113,7 +113,7 @@ gst-launch-1.0 videotestsrc ! ffmpegcolorspace ! autovideosink
|
||||||
This element produces an audio wave (selectable among many different
|
This element produces an audio wave (selectable among many different
|
||||||
options with the `wave` property). Use it to test video pipelines.
|
options with the `wave` property). Use it to test video pipelines.
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
gst-launch-1.0 audiotestsrc ! audioconvert ! autoaudiosink
|
gst-launch-1.0 audiotestsrc ! audioconvert ! autoaudiosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -135,7 +135,7 @@ elements whose Caps are unknown at design time, like `autovideosink`, or
|
||||||
that can vary depending on external factors, like decoding a
|
that can vary depending on external factors, like decoding a
|
||||||
user-provided file.
|
user-provided file.
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
gst-launch-1.0 videotestsrc ! ffmpegcolorspace ! autovideosink
|
gst-launch-1.0 videotestsrc ! ffmpegcolorspace ! autovideosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -155,7 +155,7 @@ It is therefore a good idea to always use it whenever the actual frame
|
||||||
rate is unknown at design time, just in
|
rate is unknown at design time, just in
|
||||||
case.
|
case.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
gst-launch-1.0 videotestsrc ! video/x-raw-rgb,framerate=30/1 ! videorate ! video/x-raw-rgb,framerate=1/1 ! ffmpegcolorspace ! autovideosink
|
gst-launch-1.0 videotestsrc ! video/x-raw-rgb,framerate=30/1 ! videorate ! video/x-raw-rgb,framerate=1/1 ! ffmpegcolorspace ! autovideosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -176,7 +176,7 @@ user, it is a good idea to use a `videoscale` element, since not all
|
||||||
video sinks are capable of performing scaling
|
video sinks are capable of performing scaling
|
||||||
operations.
|
operations.
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
gst-launch-1.0 uridecodebin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! videoscale ! video/x-raw-yuv,width=178,height=100 ! ffmpegcolorspace ! autovideosink
|
gst-launch-1.0 uridecodebin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! videoscale ! video/x-raw-yuv,width=178,height=100 ! ffmpegcolorspace ! autovideosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -193,7 +193,7 @@ Like `ffmpegcolorspace` does for video, you use this to solve
|
||||||
negotiation problems with audio, and it is generally safe to use it
|
negotiation problems with audio, and it is generally safe to use it
|
||||||
liberally, since this element does nothing if it is not needed.
|
liberally, since this element does nothing if it is not needed.
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
gst-launch-1.0 audiotestsrc ! audioconvert ! autoaudiosink
|
gst-launch-1.0 audiotestsrc ! audioconvert ! autoaudiosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -206,7 +206,7 @@ Again, use it to solve negotiation problems regarding sampling rates and
|
||||||
do not fear to use it
|
do not fear to use it
|
||||||
generously.
|
generously.
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
gst-launch-1.0 uridecodebin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! audioresample ! audio/x-raw-float,rate=4000 ! audioconvert ! autoaudiosink
|
gst-launch-1.0 uridecodebin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm ! audioresample ! audio/x-raw-float,rate=4000 ! audioconvert ! autoaudiosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -309,7 +309,7 @@ the `capsfilter` element. This element does not modify data as such,
|
||||||
but enforces limitations on the data
|
but enforces limitations on the data
|
||||||
format.
|
format.
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
gst-launch-1.0 videotestsrc ! video/x-raw-gray ! ffmpegcolorspace ! autovideosink
|
gst-launch-1.0 videotestsrc ! video/x-raw-gray ! ffmpegcolorspace ! autovideosink
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -60,7 +60,7 @@ Internet, along with audio. Congratulations!
|
||||||
|
|
||||||
Let's review these lines of code and see what they do:
|
Let's review these lines of code and see what they do:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Initialize GStreamer */
|
/* Initialize GStreamer */
|
||||||
gst_init (&argc, &argv);
|
gst_init (&argc, &argv);
|
||||||
```
|
```
|
||||||
|
@ -79,7 +79,7 @@ If you always pass your command-line parameters
|
||||||
benefit from the GStreamer standard command-line options (more on this
|
benefit from the GStreamer standard command-line options (more on this
|
||||||
in [Basic tutorial 10: GStreamer tools])
|
in [Basic tutorial 10: GStreamer tools])
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Build the pipeline */
|
/* Build the pipeline */
|
||||||
pipeline = gst_parse_launch ("playbin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
|
pipeline = gst_parse_launch ("playbin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
|
||||||
```
|
```
|
||||||
|
@ -132,7 +132,7 @@ plug-in, GStreamer provides several notification mechanisms, but the
|
||||||
only thing we are doing in this example is exiting on error, so do not
|
only thing we are doing in this example is exiting on error, so do not
|
||||||
expect much feedback.
|
expect much feedback.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Start playing */
|
/* Start playing */
|
||||||
gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
|
|
||||||
|
@ -146,7 +146,7 @@ In this line, `gst_element_set_state()` is setting `pipeline` (our only
|
||||||
element, remember) to the PLAYING state, thus initiating playback.
|
element, remember) to the PLAYING state, thus initiating playback.
|
||||||
```
|
```
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Wait until error or EOS */
|
/* Wait until error or EOS */
|
||||||
bus = gst_element_get_bus (pipeline);
|
bus = gst_element_get_bus (pipeline);
|
||||||
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
|
gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
|
||||||
|
@ -170,7 +170,7 @@ control-C in the console.
|
||||||
Before terminating the application, though, there is a couple of things
|
Before terminating the application, though, there is a couple of things
|
||||||
we need to do to tidy up correctly after ourselves.
|
we need to do to tidy up correctly after ourselves.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Free resources */
|
/* Free resources */
|
||||||
if (msg != NULL)
|
if (msg != NULL)
|
||||||
gst_message_unref (msg);
|
gst_message_unref (msg);
|
||||||
|
|
|
@ -122,7 +122,7 @@ in the SDK installation).
|
||||||
|
|
||||||
**basic-tutorial-6.c**
|
**basic-tutorial-6.c**
|
||||||
|
|
||||||
```
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
|
|
||||||
/* Functions below print the Capabilities in a human-friendly format */
|
/* Functions below print the Capabilities in a human-friendly format */
|
||||||
|
@ -353,7 +353,7 @@ want to learn about the internal organization of the
|
||||||
`GstCaps` structure, read the `GStreamer Documentation` regarding Pad
|
`GstCaps` structure, read the `GStreamer Documentation` regarding Pad
|
||||||
Caps.
|
Caps.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Shows the CURRENT capabilities of the requested pad in the given element */
|
/* Shows the CURRENT capabilities of the requested pad in the given element */
|
||||||
static void print_pad_capabilities (GstElement *element, gchar *pad_name) {
|
static void print_pad_capabilities (GstElement *element, gchar *pad_name) {
|
||||||
GstPad *pad = NULL;
|
GstPad *pad = NULL;
|
||||||
|
@ -394,7 +394,7 @@ as the actual hardware Capabilities might be queried.
|
||||||
|
|
||||||
We then print these Capabilities.
|
We then print these Capabilities.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Create the element factories */
|
/* Create the element factories */
|
||||||
source_factory = gst_element_factory_find ("audiotestsrc");
|
source_factory = gst_element_factory_find ("audiotestsrc");
|
||||||
sink_factory = gst_element_factory_find ("autoaudiosink");
|
sink_factory = gst_element_factory_find ("autoaudiosink");
|
||||||
|
@ -429,7 +429,7 @@ are printed as soon as the factories are created.
|
||||||
We skip the pipeline creation and start, and go to the State-Changed
|
We skip the pipeline creation and start, and go to the State-Changed
|
||||||
message handling:
|
message handling:
|
||||||
|
|
||||||
```
|
``` c
|
||||||
case GST_MESSAGE_STATE_CHANGED:
|
case GST_MESSAGE_STATE_CHANGED:
|
||||||
/* We are only interested in state-changed messages from the pipeline */
|
/* We are only interested in state-changed messages from the pipeline */
|
||||||
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (pipeline)) {
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (pipeline)) {
|
||||||
|
|
|
@ -79,7 +79,7 @@ in the SDK installation).
|
||||||
|
|
||||||
**basic-tutorial-9.c**
|
**basic-tutorial-9.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
#include <gst/pbutils/pbutils.h>
|
#include <gst/pbutils/pbutils.h>
|
||||||
|
@ -326,7 +326,7 @@ int main (int argc, char **argv) {
|
||||||
|
|
||||||
These are the main steps to use the `GstDiscoverer`:
|
These are the main steps to use the `GstDiscoverer`:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Instantiate the Discoverer */
|
/* Instantiate the Discoverer */
|
||||||
data.discoverer = gst_discoverer_new (5 * GST_SECOND, &err);
|
data.discoverer = gst_discoverer_new (5 * GST_SECOND, &err);
|
||||||
if (!data.discoverer) {
|
if (!data.discoverer) {
|
||||||
|
@ -340,7 +340,7 @@ if (!data.discoverer) {
|
||||||
parameter is the timeout per file, in nanoseconds (use the
|
parameter is the timeout per file, in nanoseconds (use the
|
||||||
`GST_SECOND` macro for simplicity).
|
`GST_SECOND` macro for simplicity).
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Connect to the interesting signals */
|
/* Connect to the interesting signals */
|
||||||
g_signal_connect (data.discoverer, "discovered", G_CALLBACK (on_discovered_cb), &data);
|
g_signal_connect (data.discoverer, "discovered", G_CALLBACK (on_discovered_cb), &data);
|
||||||
g_signal_connect (data.discoverer, "finished", G_CALLBACK (on_finished_cb), &data);
|
g_signal_connect (data.discoverer, "finished", G_CALLBACK (on_finished_cb), &data);
|
||||||
|
@ -349,7 +349,7 @@ g_signal_connect (data.discoverer, "finished", G_CALLBACK (on_finished_cb), &dat
|
||||||
Connect to the interesting signals, as usual. We discuss them in the
|
Connect to the interesting signals, as usual. We discuss them in the
|
||||||
snippet for their callbacks.
|
snippet for their callbacks.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Start the discoverer process (nothing to do yet) */
|
/* Start the discoverer process (nothing to do yet) */
|
||||||
gst_discoverer_start (data.discoverer);
|
gst_discoverer_start (data.discoverer);
|
||||||
```
|
```
|
||||||
|
@ -358,7 +358,7 @@ gst_discoverer_start (data.discoverer);
|
||||||
not provided any URI to discover yet. This is done
|
not provided any URI to discover yet. This is done
|
||||||
next:
|
next:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Add a request to process asynchronously the URI passed through the command line */
|
/* Add a request to process asynchronously the URI passed through the command line */
|
||||||
if (!gst_discoverer_discover_uri_async (data.discoverer, uri)) {
|
if (!gst_discoverer_discover_uri_async (data.discoverer, uri)) {
|
||||||
g_print ("Failed to start discovering URI '%s'\n", uri);
|
g_print ("Failed to start discovering URI '%s'\n", uri);
|
||||||
|
@ -373,7 +373,7 @@ discovery process for each of them finishes, the registered callback
|
||||||
functions will be fired
|
functions will be fired
|
||||||
up.
|
up.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Create a GLib Main Loop and set it to run, so we can wait for the signals */
|
/* Create a GLib Main Loop and set it to run, so we can wait for the signals */
|
||||||
data.loop = g_main_loop_new (NULL, FALSE);
|
data.loop = g_main_loop_new (NULL, FALSE);
|
||||||
g_main_loop_run (data.loop);
|
g_main_loop_run (data.loop);
|
||||||
|
@ -383,7 +383,7 @@ The usual GLib main loop is instantiated and executed. We will get out
|
||||||
of it when `g_main_loop_quit()` is called from the
|
of it when `g_main_loop_quit()` is called from the
|
||||||
`on_finished_cb` callback.
|
`on_finished_cb` callback.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Stop the discoverer process */
|
/* Stop the discoverer process */
|
||||||
gst_discoverer_stop (data.discoverer);
|
gst_discoverer_stop (data.discoverer);
|
||||||
```
|
```
|
||||||
|
@ -394,7 +394,7 @@ Once we are done with the discoverer, we stop it with
|
||||||
Let's review now the callbacks we have
|
Let's review now the callbacks we have
|
||||||
registered:
|
registered:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* This function is called every time the discoverer has information regarding
|
/* This function is called every time the discoverer has information regarding
|
||||||
* one of the URIs we provided.*/
|
* one of the URIs we provided.*/
|
||||||
static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info, GError *err, CustomData *data) {
|
static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info, GError *err, CustomData *data) {
|
||||||
|
@ -415,7 +415,7 @@ case we had multiple discover process running, which is not the case in
|
||||||
this example) with `gst_discoverer_info_get_uri()` and the discovery
|
this example) with `gst_discoverer_info_get_uri()` and the discovery
|
||||||
result with `gst_discoverer_info_get_result()`.
|
result with `gst_discoverer_info_get_result()`.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
switch (result) {
|
switch (result) {
|
||||||
case GST_DISCOVERER_URI_INVALID:
|
case GST_DISCOVERER_URI_INVALID:
|
||||||
g_print ("Invalid URI '%s'\n", uri);
|
g_print ("Invalid URI '%s'\n", uri);
|
||||||
|
@ -465,7 +465,7 @@ If no error happened, information can be retrieved from the
|
||||||
Bits of information which are made of lists, like tags and stream info,
|
Bits of information which are made of lists, like tags and stream info,
|
||||||
needs some extra parsing:
|
needs some extra parsing:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
tags = gst_discoverer_info_get_tags (info);
|
tags = gst_discoverer_info_get_tags (info);
|
||||||
if (tags) {
|
if (tags) {
|
||||||
g_print ("Tags:\n");
|
g_print ("Tags:\n");
|
||||||
|
@ -480,7 +480,7 @@ or a specific tag could be searched for with
|
||||||
`gst_tag_list_get_string()`). The code for `print_tag_foreach` is pretty
|
`gst_tag_list_get_string()`). The code for `print_tag_foreach` is pretty
|
||||||
much self-explicative.
|
much self-explicative.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
sinfo = gst_discoverer_info_get_stream_info (info);
|
sinfo = gst_discoverer_info_get_stream_info (info);
|
||||||
if (!sinfo)
|
if (!sinfo)
|
||||||
return;
|
return;
|
||||||
|
@ -497,7 +497,7 @@ a `GstDiscovererStreamInfo` structure that is parsed in
|
||||||
the `print_topology` function, and then discarded
|
the `print_topology` function, and then discarded
|
||||||
with `gst_discoverer_stream_info_unref()`.
|
with `gst_discoverer_stream_info_unref()`.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Print information regarding a stream and its substreams, if any */
|
/* Print information regarding a stream and its substreams, if any */
|
||||||
static void print_topology (GstDiscovererStreamInfo *info, gint depth) {
|
static void print_topology (GstDiscovererStreamInfo *info, gint depth) {
|
||||||
GstDiscovererStreamInfo *next;
|
GstDiscovererStreamInfo *next;
|
||||||
|
|
|
@ -84,7 +84,7 @@ in the SDK installation).
|
||||||
|
|
||||||
**basic-tutorial-7.c**
|
**basic-tutorial-7.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
|
|
||||||
int main(int argc, char *argv[]) {
|
int main(int argc, char *argv[]) {
|
||||||
|
@ -191,7 +191,7 @@ int main(int argc, char *argv[]) {
|
||||||
|
|
||||||
## Walkthrough
|
## Walkthrough
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Create the elements */
|
/* Create the elements */
|
||||||
audio_source = gst_element_factory_make ("audiotestsrc", "audio_source");
|
audio_source = gst_element_factory_make ("audiotestsrc", "audio_source");
|
||||||
tee = gst_element_factory_make ("tee", "tee");
|
tee = gst_element_factory_make ("tee", "tee");
|
||||||
|
@ -220,7 +220,7 @@ Caps produced by the `audiotestsrc` and `wavescope`. If the Caps
|
||||||
matched, though, these elements act in “pass-through” mode and do not
|
matched, though, these elements act in “pass-through” mode and do not
|
||||||
modify the signal, having negligible impact on performance.
|
modify the signal, having negligible impact on performance.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Configure elements */
|
/* Configure elements */
|
||||||
g_object_set (audio_source, "freq", 215.0f, NULL);
|
g_object_set (audio_source, "freq", 215.0f, NULL);
|
||||||
g_object_set (visual, "shader", 0, "style", 1, NULL);
|
g_object_set (visual, "shader", 0, "style", 1, NULL);
|
||||||
|
@ -235,7 +235,7 @@ tools](sdk-basic-tutorial-gstreamer-tools.md) to learn all
|
||||||
the properties of these
|
the properties of these
|
||||||
elements.
|
elements.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Link all elements that can be automatically linked because they have "Always" pads */
|
/* Link all elements that can be automatically linked because they have "Always" pads */
|
||||||
gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_sink,
|
gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_sink,
|
||||||
video_queue, visual, video_convert, video_sink, NULL);
|
video_queue, visual, video_convert, video_sink, NULL);
|
||||||
|
@ -255,7 +255,7 @@ comment says).
|
||||||
> ![Warning](images/icons/emoticons/warning.png)
|
> ![Warning](images/icons/emoticons/warning.png)
|
||||||
> `gst_element_link_many()` can actually link elements with Request Pads. It internally requests the Pads so you do not have worry about the elements being linked having Always or Request Pads. Strange as it might seem, this is actually inconvenient, because you still need to release the requested Pads afterwards, and, if the Pad was requested automatically by `gst_element_link_many()`, it is easy to forget. Stay out of trouble by always requesting Request Pads manually, as shown in the next code block.
|
> `gst_element_link_many()` can actually link elements with Request Pads. It internally requests the Pads so you do not have worry about the elements being linked having Always or Request Pads. Strange as it might seem, this is actually inconvenient, because you still need to release the requested Pads afterwards, and, if the Pad was requested automatically by `gst_element_link_many()`, it is easy to forget. Stay out of trouble by always requesting Request Pads manually, as shown in the next code block.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Manually link the Tee, which has "Request" pads */
|
/* Manually link the Tee, which has "Request" pads */
|
||||||
tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src_%d");
|
tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src_%d");
|
||||||
tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL);
|
tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL);
|
||||||
|
@ -301,7 +301,7 @@ We then set the pipeline to playing as usual, and wait until an error
|
||||||
message or an EOS is produced. The only thing left to so is cleanup the
|
message or an EOS is produced. The only thing left to so is cleanup the
|
||||||
requested Pads:
|
requested Pads:
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Release the request pads from the Tee, and unref them */
|
/* Release the request pads from the Tee, and unref them */
|
||||||
gst_element_release_request_pad (tee, tee_audio_pad);
|
gst_element_release_request_pad (tee, tee_audio_pad);
|
||||||
gst_element_release_request_pad (tee, tee_video_pad);
|
gst_element_release_request_pad (tee, tee_video_pad);
|
||||||
|
|
|
@ -67,7 +67,7 @@ Copy this code into a text file named `basic-tutorial-13.c`.
|
||||||
|
|
||||||
**basic-tutorial-13.c**
|
**basic-tutorial-13.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
@ -248,7 +248,7 @@ keystrokes and a GLib main loop is executed.
|
||||||
|
|
||||||
Then, in the keyboard handler function:
|
Then, in the keyboard handler function:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Process keyboard input */
|
/* Process keyboard input */
|
||||||
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
|
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
|
||||||
gchar *str = NULL;
|
gchar *str = NULL;
|
||||||
|
@ -268,7 +268,7 @@ static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomDa
|
||||||
Pause / Playing toggle is handled with `gst_element_set_state()` as in
|
Pause / Playing toggle is handled with `gst_element_set_state()` as in
|
||||||
previous tutorials.
|
previous tutorials.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
case 's':
|
case 's':
|
||||||
if (g_ascii_isupper (str[0])) {
|
if (g_ascii_isupper (str[0])) {
|
||||||
data->rate *= 2.0;
|
data->rate *= 2.0;
|
||||||
|
@ -288,7 +288,7 @@ reverse the current playback direction. In both cases, the
|
||||||
`rate` variable is updated and `send_seek_event` is called. Let’s
|
`rate` variable is updated and `send_seek_event` is called. Let’s
|
||||||
review this function.
|
review this function.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Send seek event to change rate */
|
/* Send seek event to change rate */
|
||||||
static void send_seek_event (CustomData *data) {
|
static void send_seek_event (CustomData *data) {
|
||||||
gint64 position;
|
gint64 position;
|
||||||
|
@ -310,7 +310,7 @@ want to move, we jump to the current position. Using a Step Event would
|
||||||
be simpler, but this event is not currently fully functional, as
|
be simpler, but this event is not currently fully functional, as
|
||||||
explained in the Introduction.
|
explained in the Introduction.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Create the seek event */
|
/* Create the seek event */
|
||||||
if (data->rate > 0) {
|
if (data->rate > 0) {
|
||||||
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
|
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
|
||||||
|
@ -327,7 +327,7 @@ position. Regardless of the playback direction, the start position must
|
||||||
be smaller than the stop position, so the two playback directions are
|
be smaller than the stop position, so the two playback directions are
|
||||||
treated differently.
|
treated differently.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
if (data->video_sink == NULL) {
|
if (data->video_sink == NULL) {
|
||||||
/* If we have not done so, obtain the sink through which we will send the seek events */
|
/* If we have not done so, obtain the sink through which we will send the seek events */
|
||||||
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
|
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
|
||||||
|
@ -341,7 +341,7 @@ at this time instead at initialization time because the actual sink may
|
||||||
change depending on the media contents, and this won’t be known until
|
change depending on the media contents, and this won’t be known until
|
||||||
the pipeline is PLAYING and some media has been read.
|
the pipeline is PLAYING and some media has been read.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Send the event */
|
/* Send the event */
|
||||||
gst_element_send_event (data->video_sink, seek_event);
|
gst_element_send_event (data->video_sink, seek_event);
|
||||||
```
|
```
|
||||||
|
@ -352,7 +352,7 @@ The new Event is finally sent to the selected sink with
|
||||||
Back to the keyboard handler, we still miss the frame stepping code,
|
Back to the keyboard handler, we still miss the frame stepping code,
|
||||||
which is really simple:
|
which is really simple:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
case 'n':
|
case 'n':
|
||||||
if (data->video_sink == NULL) {
|
if (data->video_sink == NULL) {
|
||||||
/* If we have not done so, obtain the sink through which we will send the step events */
|
/* If we have not done so, obtain the sink through which we will send the step events */
|
||||||
|
|
|
@ -94,7 +94,7 @@ been received, but it could obviously perform more complex tasks.
|
||||||
Copy this code into a text file named `basic-tutorial-8.c` (or find it
|
Copy this code into a text file named `basic-tutorial-8.c` (or find it
|
||||||
in the SDK installation).
|
in the SDK installation).
|
||||||
|
|
||||||
```
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
#include <gst/audio/audio.h>
|
#include <gst/audio/audio.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
@ -355,7 +355,7 @@ Always Pads, and manually link the Request Pads of the `tee` element.
|
||||||
|
|
||||||
Regarding the configuration of the `appsrc` and `appsink` elements:
|
Regarding the configuration of the `appsrc` and `appsink` elements:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Configure appsrc */
|
/* Configure appsrc */
|
||||||
audio_caps_text = g_strdup_printf (AUDIO_CAPS, SAMPLE_RATE);
|
audio_caps_text = g_strdup_printf (AUDIO_CAPS, SAMPLE_RATE);
|
||||||
audio_caps = gst_caps_from_string (audio_caps_text);
|
audio_caps = gst_caps_from_string (audio_caps_text);
|
||||||
|
@ -376,7 +376,7 @@ fired by `appsrc` when its internal queue of data is running low or
|
||||||
almost full, respectively. We will use these signals to start and stop
|
almost full, respectively. We will use these signals to start and stop
|
||||||
(respectively) our signal generation process.
|
(respectively) our signal generation process.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Configure appsink */
|
/* Configure appsink */
|
||||||
g_object_set (data.app_sink, "emit-signals", TRUE, "caps", audio_caps, NULL);
|
g_object_set (data.app_sink, "emit-signals", TRUE, "caps", audio_caps, NULL);
|
||||||
g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data);
|
g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data);
|
||||||
|
@ -393,7 +393,7 @@ Starting the pipeline, waiting for messages and final cleanup is done as
|
||||||
usual. Let's review the callbacks we have just
|
usual. Let's review the callbacks we have just
|
||||||
registered:
|
registered:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* This signal callback triggers when appsrc needs data. Here, we add an idle handler
|
/* This signal callback triggers when appsrc needs data. Here, we add an idle handler
|
||||||
* to the mainloop to start pushing data into the appsrc */
|
* to the mainloop to start pushing data into the appsrc */
|
||||||
static void start_feed (GstElement *source, guint size, CustomData *data) {
|
static void start_feed (GstElement *source, guint size, CustomData *data) {
|
||||||
|
@ -422,7 +422,7 @@ We take note of the sourceid that `g_idle_add()` returns, so we can
|
||||||
disable it
|
disable it
|
||||||
later.
|
later.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* This callback triggers when appsrc has enough data and we can stop sending.
|
/* This callback triggers when appsrc has enough data and we can stop sending.
|
||||||
* We remove the idle handler from the mainloop */
|
* We remove the idle handler from the mainloop */
|
||||||
static void stop_feed (GstElement *source, CustomData *data) {
|
static void stop_feed (GstElement *source, CustomData *data) {
|
||||||
|
@ -439,7 +439,7 @@ enough so we stop pushing data. Here we simply remove the idle function
|
||||||
by using `g_source_remove()` (The idle function is implemented as a
|
by using `g_source_remove()` (The idle function is implemented as a
|
||||||
`GSource`).
|
`GSource`).
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc.
|
/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc.
|
||||||
* The ide handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
|
* The ide handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
|
||||||
* and is removed when appsrc has enough data (enough-data signal).
|
* and is removed when appsrc has enough data (enough-data signal).
|
||||||
|
@ -489,7 +489,7 @@ We will skip over the waveform generation, since it is outside the scope
|
||||||
of this tutorial (it is simply a funny way of generating a pretty
|
of this tutorial (it is simply a funny way of generating a pretty
|
||||||
psychedelic wave).
|
psychedelic wave).
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Push the buffer into the appsrc */
|
/* Push the buffer into the appsrc */
|
||||||
g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
|
g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
|
||||||
|
|
||||||
|
@ -503,7 +503,7 @@ tutorial 1: Playbin
|
||||||
usage](sdk-playback-tutorial-playbin-usage.md)), and then
|
usage](sdk-playback-tutorial-playbin-usage.md)), and then
|
||||||
`gst_buffer_unref()` it since we no longer need it.
|
`gst_buffer_unref()` it since we no longer need it.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* The appsink has received a buffer */
|
/* The appsink has received a buffer */
|
||||||
static void new_sample (GstElement *sink, CustomData *data) {
|
static void new_sample (GstElement *sink, CustomData *data) {
|
||||||
GstSample *sample;
|
GstSample *sample;
|
||||||
|
|
|
@ -60,7 +60,7 @@ Copy this code into a text file named `basic-tutorial-12.c`.
|
||||||
|
|
||||||
**basic-tutorial-12.c**
|
**basic-tutorial-12.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
|
@ -193,7 +193,7 @@ therefore, the initialization code is very simple and should be
|
||||||
self-explanative by now. The only new bit is the detection of live
|
self-explanative by now. The only new bit is the detection of live
|
||||||
streams:
|
streams:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Start playing */
|
/* Start playing */
|
||||||
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
if (ret == GST_STATE_CHANGE_FAILURE) {
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
@ -219,7 +219,7 @@ them, so we take note of the result of `gst_element_set_state()` in the
|
||||||
|
|
||||||
Let’s now review the interesting parts of the message parsing callback:
|
Let’s now review the interesting parts of the message parsing callback:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
case GST_MESSAGE_BUFFERING: {
|
case GST_MESSAGE_BUFFERING: {
|
||||||
gint percent = 0;
|
gint percent = 0;
|
||||||
|
|
||||||
|
@ -252,7 +252,7 @@ network becomes slow or unresponsive and our buffer depletes, we will
|
||||||
receive new buffering messages with levels below 100% so we will pause
|
receive new buffering messages with levels below 100% so we will pause
|
||||||
the pipeline again until enough buffer has been built up.
|
the pipeline again until enough buffer has been built up.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
case GST_MESSAGE_CLOCK_LOST:
|
case GST_MESSAGE_CLOCK_LOST:
|
||||||
/* Get a new clock */
|
/* Get a new clock */
|
||||||
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
||||||
|
|
|
@ -35,7 +35,7 @@ in the SDK installation).
|
||||||
|
|
||||||
**basic-tutorial-4.c**
|
**basic-tutorial-4.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
|
|
||||||
/* Structure to contain all our information, so we can pass it around */
|
/* Structure to contain all our information, so we can pass it around */
|
||||||
|
|
|
@ -69,7 +69,7 @@ in the SDK installation).
|
||||||
|
|
||||||
**basic-tutorial-5.c**
|
**basic-tutorial-5.c**
|
||||||
|
|
||||||
```
|
``` c
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
#include <gtk/gtk.h>
|
#include <gtk/gtk.h>
|
||||||
|
@ -477,7 +477,7 @@ used. Also, for clarity of explanation, the order in which the snippets
|
||||||
of code are presented will not always match the program order. Use the
|
of code are presented will not always match the program order. Use the
|
||||||
line numbers to locate the snippets in the complete code.
|
line numbers to locate the snippets in the complete code.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
#include <gdk/gdk.h>
|
#include <gdk/gdk.h>
|
||||||
#if defined (GDK_WINDOWING_X11)
|
#if defined (GDK_WINDOWING_X11)
|
||||||
#include <gdk/gdkx.h>
|
#include <gdk/gdkx.h>
|
||||||
|
@ -498,7 +498,7 @@ This tutorial is composed mostly of callback functions, which will be
|
||||||
called from GStreamer or GTK+, so let's review the `main` function,
|
called from GStreamer or GTK+, so let's review the `main` function,
|
||||||
which registers all these callbacks.
|
which registers all these callbacks.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
int main(int argc, char *argv[]) {
|
int main(int argc, char *argv[]) {
|
||||||
CustomData data;
|
CustomData data;
|
||||||
GstStateChangeReturn ret;
|
GstStateChangeReturn ret;
|
||||||
|
@ -529,7 +529,7 @@ int main(int argc, char *argv[]) {
|
||||||
Standard GStreamer initialization and playbin pipeline creation, along
|
Standard GStreamer initialization and playbin pipeline creation, along
|
||||||
with GTK+ initialization. Not much new.
|
with GTK+ initialization. Not much new.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Connect to interesting signals in playbin */
|
/* Connect to interesting signals in playbin */
|
||||||
g_signal_connect (G_OBJECT (data.playbin), "video-tags-changed", (GCallback) tags_cb, &data);
|
g_signal_connect (G_OBJECT (data.playbin), "video-tags-changed", (GCallback) tags_cb, &data);
|
||||||
g_signal_connect (G_OBJECT (data.playbin), "audio-tags-changed", (GCallback) tags_cb, &data);
|
g_signal_connect (G_OBJECT (data.playbin), "audio-tags-changed", (GCallback) tags_cb, &data);
|
||||||
|
@ -540,7 +540,7 @@ We are interested in being notified when new tags (metadata) appears on
|
||||||
the stream. For simplicity, we are going to handle all kinds of tags
|
the stream. For simplicity, we are going to handle all kinds of tags
|
||||||
(video, audio and text) from the same callback `tags_cb`.
|
(video, audio and text) from the same callback `tags_cb`.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Create the GUI */
|
/* Create the GUI */
|
||||||
create_ui (&data);
|
create_ui (&data);
|
||||||
```
|
```
|
||||||
|
@ -551,7 +551,7 @@ over its definition. The signals to which it registers convey user
|
||||||
commands, as shown below when reviewing the
|
commands, as shown below when reviewing the
|
||||||
callbacks.
|
callbacks.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
|
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
|
||||||
bus = gst_element_get_bus (data.playbin);
|
bus = gst_element_get_bus (data.playbin);
|
||||||
gst_bus_add_signal_watch (bus);
|
gst_bus_add_signal_watch (bus);
|
||||||
|
@ -583,7 +583,7 @@ Keep in mind that, in order for the bus watches to work (be it a
|
||||||
GLib `Main Loop` running. In this case, it is hidden inside the
|
GLib `Main Loop` running. In this case, it is hidden inside the
|
||||||
[GTK+](http://www.gtk.org/) main loop.
|
[GTK+](http://www.gtk.org/) main loop.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* Register a function that GLib will call every second */
|
/* Register a function that GLib will call every second */
|
||||||
g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);
|
g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);
|
||||||
```
|
```
|
||||||
|
@ -600,7 +600,7 @@ signature, depending on who will call it. You can look up the signature
|
||||||
(the meaning of the parameters and the return value) in the
|
(the meaning of the parameters and the return value) in the
|
||||||
documentation of the signal.
|
documentation of the signal.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* This function is called when the GUI toolkit creates the physical window that will hold the video.
|
/* This function is called when the GUI toolkit creates the physical window that will hold the video.
|
||||||
* At this point we can retrieve its handler (which has a different meaning depending on the windowing system)
|
* At this point we can retrieve its handler (which has a different meaning depending on the windowing system)
|
||||||
* and pass it to GStreamer through the GstVideoOverlay interface. */
|
* and pass it to GStreamer through the GstVideoOverlay interface. */
|
||||||
|
@ -636,7 +636,7 @@ and uses this one.
|
||||||
Not much more to see here; `playbin` and the `GstVideoOverlay` really simplify
|
Not much more to see here; `playbin` and the `GstVideoOverlay` really simplify
|
||||||
this process a lot!
|
this process a lot!
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* This function is called when the PLAY button is clicked */
|
/* This function is called when the PLAY button is clicked */
|
||||||
static void play_cb (GtkButton *button, CustomData *data) {
|
static void play_cb (GtkButton *button, CustomData *data) {
|
||||||
gst_element_set_state (data->playbin, GST_STATE_PLAYING);
|
gst_element_set_state (data->playbin, GST_STATE_PLAYING);
|
||||||
|
@ -661,7 +661,7 @@ corresponding state. Note that in the STOP state we set the pipeline to
|
||||||
resources (like the audio device) would need to be released and
|
resources (like the audio device) would need to be released and
|
||||||
re-acquired.
|
re-acquired.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* This function is called when the main window is closed */
|
/* This function is called when the main window is closed */
|
||||||
static void delete_event_cb (GtkWidget *widget, GdkEvent *event, CustomData *data) {
|
static void delete_event_cb (GtkWidget *widget, GdkEvent *event, CustomData *data) {
|
||||||
stop_cb (NULL, data);
|
stop_cb (NULL, data);
|
||||||
|
@ -674,7 +674,7 @@ in `main` to terminate, which, in this case, finishes the program. Here,
|
||||||
we call it when the main window is closed, after stopping the pipeline
|
we call it when the main window is closed, after stopping the pipeline
|
||||||
(just for the sake of tidiness).
|
(just for the sake of tidiness).
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* This function is called everytime the video window needs to be redrawn (due to damage/exposure,
|
/* This function is called everytime the video window needs to be redrawn (due to damage/exposure,
|
||||||
* rescaling, etc). GStreamer takes care of this in the PAUSED and PLAYING states, otherwise,
|
* rescaling, etc). GStreamer takes care of this in the PAUSED and PLAYING states, otherwise,
|
||||||
* we simply draw a black rectangle to avoid garbage showing up. */
|
* we simply draw a black rectangle to avoid garbage showing up. */
|
||||||
|
@ -704,7 +704,7 @@ other cases, however, it will not, so we have to do it. In this example,
|
||||||
we just fill the window with a black
|
we just fill the window with a black
|
||||||
rectangle.
|
rectangle.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* This function is called when the slider changes its position. We perform a seek to the
|
/* This function is called when the slider changes its position. We perform a seek to the
|
||||||
* new position here. */
|
* new position here. */
|
||||||
static void slider_cb (GtkRange *range, CustomData *data) {
|
static void slider_cb (GtkRange *range, CustomData *data) {
|
||||||
|
@ -730,7 +730,7 @@ before allowing another one. Otherwise, the application might look
|
||||||
unresponsive if the user drags the slider frantically, which would not
|
unresponsive if the user drags the slider frantically, which would not
|
||||||
allow any seek to complete before a new one is queued.
|
allow any seek to complete before a new one is queued.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* This function is called periodically to refresh the GUI */
|
/* This function is called periodically to refresh the GUI */
|
||||||
static gboolean refresh_ui (CustomData *data) {
|
static gboolean refresh_ui (CustomData *data) {
|
||||||
gint64 current = -1;
|
gint64 current = -1;
|
||||||
|
@ -745,7 +745,7 @@ the media. First off, if we are not in the `PLAYING` state, we have
|
||||||
nothing to do here (plus, position and duration queries will normally
|
nothing to do here (plus, position and duration queries will normally
|
||||||
fail).
|
fail).
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* If we didn't know it yet, query the stream duration */
|
/* If we didn't know it yet, query the stream duration */
|
||||||
if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
|
if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
|
||||||
if (!gst_element_query_duration (data->playbin, GST_FORMAT_TIME, &data->duration)) {
|
if (!gst_element_query_duration (data->playbin, GST_FORMAT_TIME, &data->duration)) {
|
||||||
|
@ -760,7 +760,7 @@ if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
|
||||||
We recover the duration of the clip if we didn't know it, so we can set
|
We recover the duration of the clip if we didn't know it, so we can set
|
||||||
the range for the slider.
|
the range for the slider.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
if (gst_element_query_position (data->playbin, GST_FORMAT_TIME, ¤t)) {
|
if (gst_element_query_position (data->playbin, GST_FORMAT_TIME, ¤t)) {
|
||||||
/* Block the "value-changed" signal, so the slider_cb function is not called
|
/* Block the "value-changed" signal, so the slider_cb function is not called
|
||||||
* (which would trigger a seek the user has not requested) */
|
* (which would trigger a seek the user has not requested) */
|
||||||
|
@ -785,7 +785,7 @@ Returning TRUE from this function will keep it called in the future. If
|
||||||
we return FALSE, the timer will be
|
we return FALSE, the timer will be
|
||||||
removed.
|
removed.
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* This function is called when new metadata is discovered in the stream */
|
/* This function is called when new metadata is discovered in the stream */
|
||||||
static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
|
static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
|
||||||
/* We are possibly in a GStreamer working thread, so we notify the main
|
/* We are possibly in a GStreamer working thread, so we notify the main
|
||||||
|
@ -823,7 +823,7 @@ Later, once in the main thread, the bus will receive this message and
|
||||||
emit the `message::application` signal, which we have associated to the
|
emit the `message::application` signal, which we have associated to the
|
||||||
`application_cb` function:
|
`application_cb` function:
|
||||||
|
|
||||||
```
|
``` c
|
||||||
/* This function is called when an "application" message is posted on the bus.
|
/* This function is called when an "application" message is posted on the bus.
|
||||||
* Here we retrieve the message posted by the tags_cb callback */
|
* Here we retrieve the message posted by the tags_cb callback */
|
||||||
static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
|
|
@ -168,7 +168,7 @@ installed in a non-standard location `/opt/gstreamer-sdk`. The shell
|
||||||
script `gst-sdk-shell` sets the required environment variables for
|
script `gst-sdk-shell` sets the required environment variables for
|
||||||
building applications with the GStreamer SDK:
|
building applications with the GStreamer SDK:
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
/opt/gstreamer-sdk/bin/gst-sdk-shell
|
/opt/gstreamer-sdk/bin/gst-sdk-shell
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -252,7 +252,7 @@ Using the file name of the tutorial you are interested in
|
||||||
To run the tutorials, simply execute the desired tutorial (**from within
|
To run the tutorials, simply execute the desired tutorial (**from within
|
||||||
the `gst-sdk-shell`**):
|
the `gst-sdk-shell`**):
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
./basic-tutorial-1
|
./basic-tutorial-1
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ should replace `$INSTALL_PATH` with the path where your installer copied
|
||||||
the SDK's disk image files (the `/tmp` directory is good place to
|
the SDK's disk image files (the `/tmp` directory is good place to
|
||||||
install it as it will be removed at the end of the installation):
|
install it as it will be removed at the end of the installation):
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
hdiutil attach $INSTALL_PATH/gstreamer-sdk-2012.7-x86.dmg
|
hdiutil attach $INSTALL_PATH/gstreamer-sdk-2012.7-x86.dmg
|
||||||
cd /Volumes/gstreamer-sdk-2012.7-x86/
|
cd /Volumes/gstreamer-sdk-2012.7-x86/
|
||||||
installer -pkg gstreamer-sdk-2012.7-x86.pkg -target "/"
|
installer -pkg gstreamer-sdk-2012.7-x86.pkg -target "/"
|
||||||
|
@ -45,7 +45,7 @@ simply copy the framework to the application's Frameworks folder as
|
||||||
defined in the [bundle programming
|
defined in the [bundle programming
|
||||||
guide](https://developer.apple.com/library/mac/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html#//apple_ref/doc/uid/10000123i-CH101-SW19):
|
guide](https://developer.apple.com/library/mac/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html#//apple_ref/doc/uid/10000123i-CH101-SW19):
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
cp -r /Library/Frameworks/GStreamer.framework ~/MyApp.app/Contents/Frameworks
|
cp -r /Library/Frameworks/GStreamer.framework ~/MyApp.app/Contents/Frameworks
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -54,7 +54,7 @@ different architectures, installed in the system. Make sure you only
|
||||||
copy the version you need and that you update accordingly the link
|
copy the version you need and that you update accordingly the link
|
||||||
`GStreamer.framework/Version/Current`:
|
`GStreamer.framework/Version/Current`:
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
$ ls -l Frameworks/GStreamer.framework/Version/Current
|
$ ls -l Frameworks/GStreamer.framework/Version/Current
|
||||||
lrwxr-xr-x 1 fluendo staff 21 Jun 5 18:46 Frameworks/GStreamer.framework/Versions/Current -> ../Versions/0.10/x86
|
lrwxr-xr-x 1 fluendo staff 21 Jun 5 18:46 Frameworks/GStreamer.framework/Versions/Current -> ../Versions/0.10/x86
|
||||||
```
|
```
|
||||||
|
@ -272,7 +272,7 @@ We can get the list of paths used by an object file to locate its
|
||||||
dependent dynamic libraries
|
dependent dynamic libraries
|
||||||
using [otool](https://developer.apple.com/library/mac/#documentation/darwin/reference/manpages/man1/otool.1.html):
|
using [otool](https://developer.apple.com/library/mac/#documentation/darwin/reference/manpages/man1/otool.1.html):
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
$ otool -L /Library/Frameworks/GStreamer.framework/Commands/gst-launch-1.0
|
$ otool -L /Library/Frameworks/GStreamer.framework/Commands/gst-launch-1.0
|
||||||
/Library/Frameworks/GStreamer.framework/Commands/gst-launch-1.0:
|
/Library/Frameworks/GStreamer.framework/Commands/gst-launch-1.0:
|
||||||
/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation (compatibility version 150.0.0, current version 550.43.0)
|
/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation (compatibility version 150.0.0, current version 550.43.0)
|
||||||
|
@ -291,7 +291,7 @@ This full path is extracted from the dynamic library ***install name***
|
||||||
install name of a library can be retrieved with
|
install name of a library can be retrieved with
|
||||||
[otool](https://developer.apple.com/library/mac/#documentation/darwin/reference/manpages/man1/otool.1.html) too:
|
[otool](https://developer.apple.com/library/mac/#documentation/darwin/reference/manpages/man1/otool.1.html) too:
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
$ otool -D /Library/Frameworks/GStreamer.framework/Libraries/libgstreamer-1.0.dylib
|
$ otool -D /Library/Frameworks/GStreamer.framework/Libraries/libgstreamer-1.0.dylib
|
||||||
/Library/Frameworks/GStreamer.framework/Libraries/libgstreamer-1.0.dylib:
|
/Library/Frameworks/GStreamer.framework/Libraries/libgstreamer-1.0.dylib:
|
||||||
/Library/Frameworks/GStreamer.framework/Versions/0.10/x86/lib/libgstreamer-1.0.0.dylib
|
/Library/Frameworks/GStreamer.framework/Versions/0.10/x86/lib/libgstreamer-1.0.0.dylib
|
||||||
|
@ -346,7 +346,7 @@ When looking for binaries to fix, we will run the script in the
|
||||||
following
|
following
|
||||||
directories:
|
directories:
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
$ osxrelocator.py MyApp.app/Contents/Frameworks/GStreamer.framework/Versions/Current/lib /Library/Frameworks/GStreamer.framework/ @executable_path/../Frameworks/GStreamer.framework/ -r
|
$ osxrelocator.py MyApp.app/Contents/Frameworks/GStreamer.framework/Versions/Current/lib /Library/Frameworks/GStreamer.framework/ @executable_path/../Frameworks/GStreamer.framework/ -r
|
||||||
$ osxrelocator.py MyApp.app/Contents/Frameworks/GStreamer.framework/Versions/Current/libexec /Library/Frameworks/GStreamer.framework/ @executable_path/../Frameworks/GStreamer.framework/ -r
|
$ osxrelocator.py MyApp.app/Contents/Frameworks/GStreamer.framework/Versions/Current/libexec /Library/Frameworks/GStreamer.framework/ @executable_path/../Frameworks/GStreamer.framework/ -r
|
||||||
$ osxrelocator.py MyApp.app/Contents/Frameworks/GStreamer.framework/Versions/Current/bin /Library/Frameworks/GStreamer.framework/ @executable_path/../Frameworks/GStreamer.framework/ -r
|
$ osxrelocator.py MyApp.app/Contents/Frameworks/GStreamer.framework/Versions/Current/bin /Library/Frameworks/GStreamer.framework/ @executable_path/../Frameworks/GStreamer.framework/ -r
|
||||||
|
|
|
@ -26,7 +26,7 @@ In the Cerbero installation directory you will find the
|
||||||
`cerbero-uninstalled` script. Execute it without parameters to see the
|
`cerbero-uninstalled` script. Execute it without parameters to see the
|
||||||
list of commands it accepts:
|
list of commands it accepts:
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
./cerbero-uninstalled
|
./cerbero-uninstalled
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ list of commands it accepts:
|
||||||
The first step is to create an empty recipe that you can then tailor to
|
The first step is to create an empty recipe that you can then tailor to
|
||||||
your needs:
|
your needs:
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
./cerbero-uninstalled add-recipe my-app 1.0
|
./cerbero-uninstalled add-recipe my-app 1.0
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -240,7 +240,7 @@ Snappy.
|
||||||
|
|
||||||
Once the recipe is ready, instruct Cerbero to build it:
|
Once the recipe is ready, instruct Cerbero to build it:
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
./cerbero-uninstalled build my-app
|
./cerbero-uninstalled build my-app
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -255,7 +255,7 @@ files in `cerbero/packages`.
|
||||||
|
|
||||||
Now, to create an empty package, do:
|
Now, to create an empty package, do:
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
./cerbero-uninstalled add-package my-app 1.0
|
./cerbero-uninstalled add-package my-app 1.0
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -408,7 +408,7 @@ Alternatively you can also pass some options to `cerbero-uninstalled`,
|
||||||
for
|
for
|
||||||
example:
|
example:
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
./cerbero-uninstalled add-package my-app 1.0 --license "LGPL" --codename MyApp --vendor MyAppVendor --url "http://www.my-app.com" --files=my-app:bins:libs --files-devel=my-app:devel --platform-files=linux:my-app:linux_specific --platform-files-devel=linux:my-app:linux_specific_devel,windows:my-app:windows_specific_devel --deps base-system --includes gstreamer-core
|
./cerbero-uninstalled add-package my-app 1.0 --license "LGPL" --codename MyApp --vendor MyAppVendor --url "http://www.my-app.com" --files=my-app:bins:libs --files-devel=my-app:devel --platform-files=linux:my-app:linux_specific --platform-files-devel=linux:my-app:linux_specific_devel,windows:my-app:windows_specific_devel --deps base-system --includes gstreamer-core
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -470,7 +470,7 @@ packages\_prefix as the ones in your Cerbero configuration file.
|
||||||
|
|
||||||
Finally, build your package by using:
|
Finally, build your package by using:
|
||||||
|
|
||||||
``` lang=bash
|
``` bash
|
||||||
./cerbero-uninstalled package your-package
|
./cerbero-uninstalled package your-package
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -39,7 +39,7 @@ Copy this code into a text file named `playback-tutorial-6.c`.
|
||||||
|
|
||||||
**playback-tutorial-6.c**
|
**playback-tutorial-6.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
|
|
||||||
/* playbin flags */
|
/* playbin flags */
|
||||||
|
@ -161,7 +161,7 @@ First off, we indicate `playbin` that we want an audio visualization by
|
||||||
setting the `GST_PLAY_FLAG_VIS` flag. If the media already contains
|
setting the `GST_PLAY_FLAG_VIS` flag. If the media already contains
|
||||||
video, this flag has no effect.
|
video, this flag has no effect.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Set the visualization flag */
|
/* Set the visualization flag */
|
||||||
g_object_get (pipeline, "flags", &flags, NULL);
|
g_object_get (pipeline, "flags", &flags, NULL);
|
||||||
flags |= GST_PLAY_FLAG_VIS;
|
flags |= GST_PLAY_FLAG_VIS;
|
||||||
|
@ -173,7 +173,7 @@ If no visualization plugin is enforced by the user, `playbin` will use
|
||||||
available). The rest of the tutorial shows how to find out the available
|
available). The rest of the tutorial shows how to find out the available
|
||||||
visualization elements and enforce one to `playbin`.
|
visualization elements and enforce one to `playbin`.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Get a list of all visualization plugins */
|
/* Get a list of all visualization plugins */
|
||||||
list = gst_registry_feature_filter (gst_registry_get_default (), filter_vis_features, FALSE, NULL);
|
list = gst_registry_feature_filter (gst_registry_get_default (), filter_vis_features, FALSE, NULL);
|
||||||
```
|
```
|
||||||
|
@ -183,7 +183,7 @@ GStreamer registry and selects those for which
|
||||||
the `filter_vis_features` function returns TRUE. This function selects
|
the `filter_vis_features` function returns TRUE. This function selects
|
||||||
only the Visualization plugins:
|
only the Visualization plugins:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Return TRUE if this is a Visualization element */
|
/* Return TRUE if this is a Visualization element */
|
||||||
static gboolean filter_vis_features (GstPluginFeature *feature, gpointer data) {
|
static gboolean filter_vis_features (GstPluginFeature *feature, gpointer data) {
|
||||||
GstElementFactory *factory;
|
GstElementFactory *factory;
|
||||||
|
@ -213,7 +213,7 @@ is a “string describing the type of element, as an unordered list
|
||||||
separated with slashes (/)”. Examples of classes are “Source/Network”,
|
separated with slashes (/)”. Examples of classes are “Source/Network”,
|
||||||
“Codec/Decoder/Video”, “Codec/Encoder/Audio” or “Visualization”.
|
“Codec/Decoder/Video”, “Codec/Encoder/Audio” or “Visualization”.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Print their names */
|
/* Print their names */
|
||||||
g_print("Available visualization plugins:\n");
|
g_print("Available visualization plugins:\n");
|
||||||
for (walk = list; walk != NULL; walk = g_list_next (walk)) {
|
for (walk = list; walk != NULL; walk = g_list_next (walk)) {
|
||||||
|
@ -234,7 +234,7 @@ Once we have the list of Visualization plugins, we print their names
|
||||||
(`gst_element_factory_get_longname()`) and choose one (in this case,
|
(`gst_element_factory_get_longname()`) and choose one (in this case,
|
||||||
GOOM).
|
GOOM).
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* We have now selected a factory for the visualization element */
|
/* We have now selected a factory for the visualization element */
|
||||||
g_print ("Selected '%s'\n", gst_element_factory_get_longname (selected_factory));
|
g_print ("Selected '%s'\n", gst_element_factory_get_longname (selected_factory));
|
||||||
vis_plugin = gst_element_factory_create (selected_factory, NULL);
|
vis_plugin = gst_element_factory_create (selected_factory, NULL);
|
||||||
|
@ -245,7 +245,7 @@ if (!vis_plugin)
|
||||||
The selected factory is used to instantiate an actual `GstElement` which
|
The selected factory is used to instantiate an actual `GstElement` which
|
||||||
is then passed to `playbin` through the `vis-plugin` property:
|
is then passed to `playbin` through the `vis-plugin` property:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* set vis plugin for playbin */
|
/* set vis plugin for playbin */
|
||||||
g_object_set (pipeline, "vis-plugin", vis_plugin, NULL);
|
g_object_set (pipeline, "vis-plugin", vis_plugin, NULL);
|
||||||
```
|
```
|
||||||
|
|
|
@ -42,7 +42,7 @@ Copy this code into a text file named `playback-tutorial-5.c`.
|
||||||
|
|
||||||
**playback-tutorial-5.c**
|
**playback-tutorial-5.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
#include <gst/interfaces/colorbalance.h>
|
#include <gst/interfaces/colorbalance.h>
|
||||||
|
@ -223,7 +223,7 @@ The `main()` function is fairly simple. A `playbin` pipeline is
|
||||||
instantiated and set to run, and a keyboard watch is installed so
|
instantiated and set to run, and a keyboard watch is installed so
|
||||||
keystrokes can be monitored.
|
keystrokes can be monitored.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Output the current values of all Color Balance channels */
|
/* Output the current values of all Color Balance channels */
|
||||||
static void print_current_values (GstElement *pipeline) {
|
static void print_current_values (GstElement *pipeline) {
|
||||||
const GList *channels, *l;
|
const GList *channels, *l;
|
||||||
|
@ -253,7 +253,7 @@ retrieve the current value.
|
||||||
In this example, the minimum and maximum values are used to output the
|
In this example, the minimum and maximum values are used to output the
|
||||||
current value as a percentage.
|
current value as a percentage.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Process a color balance command */
|
/* Process a color balance command */
|
||||||
static void update_color_channel (const gchar *channel_name, gboolean increase, GstColorBalance *cb) {
|
static void update_color_channel (const gchar *channel_name, gboolean increase, GstColorBalance *cb) {
|
||||||
gdouble step;
|
gdouble step;
|
||||||
|
@ -281,7 +281,7 @@ parsed looking for the channel with the specified name. Obviously, this
|
||||||
list could be parsed only once and the pointers to the channels be
|
list could be parsed only once and the pointers to the channels be
|
||||||
stored and indexed by something more efficient than a string.
|
stored and indexed by something more efficient than a string.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Change the channel's value */
|
/* Change the channel's value */
|
||||||
step = 0.1 * (channel->max_value - channel->min_value);
|
step = 0.1 * (channel->max_value - channel->min_value);
|
||||||
value = gst_color_balance_get_value (cb, channel);
|
value = gst_color_balance_get_value (cb, channel);
|
||||||
|
|
|
@ -53,7 +53,7 @@ Copy this code into a text file named `playback-tutorial-7.c`.
|
||||||
|
|
||||||
**playback-tutorial7.c**
|
**playback-tutorial7.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
|
|
||||||
int main(int argc, char *argv[]) {
|
int main(int argc, char *argv[]) {
|
||||||
|
@ -137,7 +137,7 @@ int main(int argc, char *argv[]) {
|
||||||
|
|
||||||
# Walkthrough
|
# Walkthrough
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Create the elements inside the sink bin */
|
/* Create the elements inside the sink bin */
|
||||||
equalizer = gst_element_factory_make ("equalizer-3bands", "equalizer");
|
equalizer = gst_element_factory_make ("equalizer-3bands", "equalizer");
|
||||||
convert = gst_element_factory_make ("audioconvert", "convert");
|
convert = gst_element_factory_make ("audioconvert", "convert");
|
||||||
|
@ -153,7 +153,7 @@ All the Elements that compose our sink-bin are instantiated. We use an
|
||||||
between, because we are not sure of the capabilities of the audio sink
|
between, because we are not sure of the capabilities of the audio sink
|
||||||
(since they are hardware-dependant).
|
(since they are hardware-dependant).
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Create the sink bin, add the elements and link them */
|
/* Create the sink bin, add the elements and link them */
|
||||||
bin = gst_bin_new ("audio_sink_bin");
|
bin = gst_bin_new ("audio_sink_bin");
|
||||||
gst_bin_add_many (GST_BIN (bin), equalizer, convert, sink, NULL);
|
gst_bin_add_many (GST_BIN (bin), equalizer, convert, sink, NULL);
|
||||||
|
@ -163,7 +163,7 @@ gst_element_link_many (equalizer, convert, sink, NULL);
|
||||||
This adds the new Elements to the Bin and links them just as we would do
|
This adds the new Elements to the Bin and links them just as we would do
|
||||||
if this was a pipeline.
|
if this was a pipeline.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
pad = gst_element_get_static_pad (equalizer, "sink");
|
pad = gst_element_get_static_pad (equalizer, "sink");
|
||||||
ghost_pad = gst_ghost_pad_new ("sink", pad);
|
ghost_pad = gst_ghost_pad_new ("sink", pad);
|
||||||
gst_pad_set_active (ghost_pad, TRUE);
|
gst_pad_set_active (ghost_pad, TRUE);
|
||||||
|
@ -192,7 +192,7 @@ with `gst_object_unref()`.
|
||||||
At this point, we have a functional sink-bin, which we can use as the
|
At this point, we have a functional sink-bin, which we can use as the
|
||||||
audio sink in `playbin`. We just need to instruct `playbin` to use it:
|
audio sink in `playbin`. We just need to instruct `playbin` to use it:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Set playbin's audio sink to be our sink bin */
|
/* Set playbin's audio sink to be our sink bin */
|
||||||
g_object_set (GST_OBJECT (pipeline), "audio-sink", bin, NULL);
|
g_object_set (GST_OBJECT (pipeline), "audio-sink", bin, NULL);
|
||||||
```
|
```
|
||||||
|
@ -200,7 +200,7 @@ g_object_set (GST_OBJECT (pipeline), "audio-sink", bin, NULL);
|
||||||
It is as simple as setting the `audio-sink` property on `playbin` to
|
It is as simple as setting the `audio-sink` property on `playbin` to
|
||||||
the newly created sink.
|
the newly created sink.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Configure the equalizer */
|
/* Configure the equalizer */
|
||||||
g_object_set (G_OBJECT (equalizer), "band1", (gdouble)-24.0, NULL);
|
g_object_set (G_OBJECT (equalizer), "band1", (gdouble)-24.0, NULL);
|
||||||
g_object_set (G_OBJECT (equalizer), "band2", (gdouble)-24.0, NULL);
|
g_object_set (G_OBJECT (equalizer), "band2", (gdouble)-24.0, NULL);
|
||||||
|
|
|
@ -171,7 +171,7 @@ type. Therefore, the easiest way to make sure hardware acceleration is
|
||||||
enabled or disabled is by changing the rank of the associated element,
|
enabled or disabled is by changing the rank of the associated element,
|
||||||
as shown in this code:
|
as shown in this code:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void enable_factory (const gchar *name, gboolean enable) {
|
static void enable_factory (const gchar *name, gboolean enable) {
|
||||||
GstRegistry *registry = NULL;
|
GstRegistry *registry = NULL;
|
||||||
GstElementFactory *factory = NULL;
|
GstElementFactory *factory = NULL;
|
||||||
|
|
|
@ -59,7 +59,7 @@ it in the SDK installation).
|
||||||
|
|
||||||
**playback-tutorial-1.c**
|
**playback-tutorial-1.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
|
|
||||||
/* Structure to contain all our information, so we can pass it around */
|
/* Structure to contain all our information, so we can pass it around */
|
||||||
|
@ -304,7 +304,7 @@ Required libraries: `gstreamer-1.0`
|
||||||
|
|
||||||
# Walkthrough
|
# Walkthrough
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Structure to contain all our information, so we can pass it around */
|
/* Structure to contain all our information, so we can pass it around */
|
||||||
typedef struct _CustomData {
|
typedef struct _CustomData {
|
||||||
GstElement *playbin; /* Our one and only element */
|
GstElement *playbin; /* Our one and only element */
|
||||||
|
@ -327,7 +327,7 @@ streams of each type, and the currently playing one. Also, we are going
|
||||||
to use a different mechanism to wait for messages that allows
|
to use a different mechanism to wait for messages that allows
|
||||||
interactivity, so we need a GLib's main loop object.
|
interactivity, so we need a GLib's main loop object.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* playbin flags */
|
/* playbin flags */
|
||||||
typedef enum {
|
typedef enum {
|
||||||
GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
|
GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
|
||||||
|
@ -345,7 +345,7 @@ GObject allows introspection, so the possible values for these flags can
|
||||||
be retrieved at runtime without using this trick, but in a far more
|
be retrieved at runtime without using this trick, but in a far more
|
||||||
cumbersome way.
|
cumbersome way.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Forward definition for the message and keyboard processing functions */
|
/* Forward definition for the message and keyboard processing functions */
|
||||||
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
|
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
|
||||||
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data);
|
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data);
|
||||||
|
@ -364,7 +364,7 @@ pipeline, and use directly the `playbin` element.
|
||||||
|
|
||||||
We focus on some of the other properties of `playbin`, though:
|
We focus on some of the other properties of `playbin`, though:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Set flags to show Audio and Video but ignore Subtitles */
|
/* Set flags to show Audio and Video but ignore Subtitles */
|
||||||
g_object_get (data.playbin, "flags", &flags, NULL);
|
g_object_get (data.playbin, "flags", &flags, NULL);
|
||||||
flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
|
flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
|
||||||
|
@ -391,7 +391,7 @@ and disabling subtitles, leaving the rest of flags to their default
|
||||||
values (this is why we read the current value of the flags with
|
values (this is why we read the current value of the flags with
|
||||||
`g_object_get()` before overwriting it with `g_object_set()`).
|
`g_object_get()` before overwriting it with `g_object_set()`).
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Set connection speed. This will affect some internal decisions of playbin */
|
/* Set connection speed. This will affect some internal decisions of playbin */
|
||||||
g_object_set (data.playbin, "connection-speed", 56, NULL);
|
g_object_set (data.playbin, "connection-speed", 56, NULL);
|
||||||
```
|
```
|
||||||
|
@ -406,13 +406,13 @@ mostly used in combination with streaming protocols like `mms` or
|
||||||
We have set all these properties one by one, but we could have all of
|
We have set all these properties one by one, but we could have all of
|
||||||
them with a single call to `g_object_set()`:
|
them with a single call to `g_object_set()`:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
g_object_set (data.playbin, "uri", "http://docs.gstreamer.com/media/sintel_cropped_multilingual.webm", "flags", flags, "connection-speed", 56, NULL);
|
g_object_set (data.playbin, "uri", "http://docs.gstreamer.com/media/sintel_cropped_multilingual.webm", "flags", flags, "connection-speed", 56, NULL);
|
||||||
```
|
```
|
||||||
|
|
||||||
This is why `g_object_set()` requires a NULL as the last parameter.
|
This is why `g_object_set()` requires a NULL as the last parameter.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Add a keyboard watch so we get notified of keystrokes */
|
/* Add a keyboard watch so we get notified of keystrokes */
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
|
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
|
||||||
|
@ -429,7 +429,7 @@ Applications normally have their own way of handling user input, and
|
||||||
GStreamer has little to do with it besides the Navigation interface
|
GStreamer has little to do with it besides the Navigation interface
|
||||||
discussed briefly in [Tutorial 17: DVD playback].
|
discussed briefly in [Tutorial 17: DVD playback].
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Create a GLib Main Loop and set it to run */
|
/* Create a GLib Main Loop and set it to run */
|
||||||
data.main_loop = g_main_loop_new (NULL, FALSE);
|
data.main_loop = g_main_loop_new (NULL, FALSE);
|
||||||
g_main_loop_run (data.main_loop);
|
g_main_loop_run (data.main_loop);
|
||||||
|
@ -446,7 +446,7 @@ times: `handle_message` when a message appears on the bus, and
|
||||||
There is nothing new in handle\_message, except that when the pipeline
|
There is nothing new in handle\_message, except that when the pipeline
|
||||||
moves to the PLAYING state, it will call the `analyze_streams` function:
|
moves to the PLAYING state, it will call the `analyze_streams` function:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Extract some metadata from the streams and print it on the screen */
|
/* Extract some metadata from the streams and print it on the screen */
|
||||||
static void analyze_streams (CustomData *data) {
|
static void analyze_streams (CustomData *data) {
|
||||||
gint i;
|
gint i;
|
||||||
|
@ -465,7 +465,7 @@ media and prints it on the screen. The number of video, audio and
|
||||||
subtitle streams is directly available through the `n-video`,
|
subtitle streams is directly available through the `n-video`,
|
||||||
`n-audio` and `n-text` properties.
|
`n-audio` and `n-text` properties.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
for (i = 0; i < data->n_video; i++) {
|
for (i = 0; i < data->n_video; i++) {
|
||||||
tags = NULL;
|
tags = NULL;
|
||||||
/* Retrieve the stream's video tags */
|
/* Retrieve the stream's video tags */
|
||||||
|
@ -503,7 +503,7 @@ documentation. In this example we are interested in the
|
||||||
`GST_TAG_LANGUAGE_CODE` of the streams and their `GST_TAG_*_CODEC`
|
`GST_TAG_LANGUAGE_CODE` of the streams and their `GST_TAG_*_CODEC`
|
||||||
(audio, video or text).
|
(audio, video or text).
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
g_object_get (data->playbin, "current-video", &data->current_video, NULL);
|
g_object_get (data->playbin, "current-video", &data->current_video, NULL);
|
||||||
g_object_get (data->playbin, "current-audio", &data->current_audio, NULL);
|
g_object_get (data->playbin, "current-audio", &data->current_audio, NULL);
|
||||||
g_object_get (data->playbin, "current-text", &data->current_text, NULL);
|
g_object_get (data->playbin, "current-text", &data->current_text, NULL);
|
||||||
|
@ -519,7 +519,7 @@ never make any assumption. Multiple internal conditions can make
|
||||||
which the streams are listed can change from one run to another, so
|
which the streams are listed can change from one run to another, so
|
||||||
checking the metadata to identify one particular stream becomes crucial.
|
checking the metadata to identify one particular stream becomes crucial.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Process keyboard input */
|
/* Process keyboard input */
|
||||||
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
|
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
|
||||||
gchar *str = NULL;
|
gchar *str = NULL;
|
||||||
|
|
|
@ -52,7 +52,7 @@ Copy this code into a text file named `playback-tutorial-4.c`.
|
||||||
|
|
||||||
**playback-tutorial-4.c**
|
**playback-tutorial-4.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
|
@ -258,7 +258,7 @@ only the differences.
|
||||||
|
|
||||||
#### Setup
|
#### Setup
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Set the download flag */
|
/* Set the download flag */
|
||||||
g_object_get (pipeline, "flags", &flags, NULL);
|
g_object_get (pipeline, "flags", &flags, NULL);
|
||||||
flags |= GST_PLAY_FLAG_DOWNLOAD;
|
flags |= GST_PLAY_FLAG_DOWNLOAD;
|
||||||
|
@ -269,7 +269,7 @@ By setting this flag, `playbin` instructs its internal queue (a
|
||||||
`queue2` element, actually) to store all downloaded
|
`queue2` element, actually) to store all downloaded
|
||||||
data.
|
data.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
g_signal_connect (pipeline, "deep-notify::temp-location", G_CALLBACK (got_location), NULL);
|
g_signal_connect (pipeline, "deep-notify::temp-location", G_CALLBACK (got_location), NULL);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -280,7 +280,7 @@ changes, indicating that the `queue2` has decided where to store the
|
||||||
downloaded
|
downloaded
|
||||||
data.
|
data.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static void got_location (GstObject *gstobject, GstObject *prop_object, GParamSpec *prop, gpointer data) {
|
static void got_location (GstObject *gstobject, GstObject *prop_object, GParamSpec *prop, gpointer data) {
|
||||||
gchar *location;
|
gchar *location;
|
||||||
g_object_get (G_OBJECT (prop_object), "temp-location", &location, NULL);
|
g_object_get (G_OBJECT (prop_object), "temp-location", &location, NULL);
|
||||||
|
@ -311,7 +311,7 @@ removed. As the comment reads, you can keep it by setting the
|
||||||
In `main` we also install a timer which we use to refresh the UI every
|
In `main` we also install a timer which we use to refresh the UI every
|
||||||
second.
|
second.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Register a function that GLib will call every second */
|
/* Register a function that GLib will call every second */
|
||||||
g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);
|
g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);
|
||||||
```
|
```
|
||||||
|
@ -330,7 +330,7 @@ pipeline is paused). Keep in mind that if your network is fast enough,
|
||||||
you will not see the download bar (the dashes) advance at all; it will
|
you will not see the download bar (the dashes) advance at all; it will
|
||||||
be completely full from the beginning.
|
be completely full from the beginning.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
static gboolean refresh_ui (CustomData *data) {
|
static gboolean refresh_ui (CustomData *data) {
|
||||||
GstQuery *query;
|
GstQuery *query;
|
||||||
gboolean result;
|
gboolean result;
|
||||||
|
@ -354,7 +354,7 @@ succeeded. The answer to the query is contained in the same
|
||||||
`GstQuery` structure we created, and can be retrieved using multiple
|
`GstQuery` structure we created, and can be retrieved using multiple
|
||||||
parse methods:
|
parse methods:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
n_ranges = gst_query_get_n_buffering_ranges (query);
|
n_ranges = gst_query_get_n_buffering_ranges (query);
|
||||||
for (range = 0; range < n_ranges; range++) {
|
for (range = 0; range < n_ranges; range++) {
|
||||||
gint64 start, stop;
|
gint64 start, stop;
|
||||||
|
@ -378,7 +378,7 @@ range) depends on what we requested in the
|
||||||
`gst_query_new_buffering()` call. In this case, PERCENTAGE. These
|
`gst_query_new_buffering()` call. In this case, PERCENTAGE. These
|
||||||
values are used to generate the graph.
|
values are used to generate the graph.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
if (gst_element_query_position (data->pipeline, &format, &position) &&
|
if (gst_element_query_position (data->pipeline, &format, &position) &&
|
||||||
GST_CLOCK_TIME_IS_VALID (position) &&
|
GST_CLOCK_TIME_IS_VALID (position) &&
|
||||||
gst_element_query_duration (data->pipeline, &format, &duration) &&
|
gst_element_query_duration (data->pipeline, &format, &duration) &&
|
||||||
|
@ -400,7 +400,7 @@ depending on the buffering level. If it is below 100%, the code in the
|
||||||
an ‘`X`’. If the buffering level is 100% the pipeline is in the
|
an ‘`X`’. If the buffering level is 100% the pipeline is in the
|
||||||
`PLAYING` state and we print a ‘`>`’.
|
`PLAYING` state and we print a ‘`>`’.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
if (data->buffering_level < 100) {
|
if (data->buffering_level < 100) {
|
||||||
g_print (" Buffering: %3d%%", data->buffering_level);
|
g_print (" Buffering: %3d%%", data->buffering_level);
|
||||||
} else {
|
} else {
|
||||||
|
@ -413,7 +413,7 @@ information (and delete it otherwise).
|
||||||
|
|
||||||
#### Limiting the size of the downloaded file
|
#### Limiting the size of the downloaded file
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Uncomment this line to limit the amount of downloaded data */
|
/* Uncomment this line to limit the amount of downloaded data */
|
||||||
/* g_object_set (pipeline, "ring-buffer-max-size", (guint64)4000000, NULL); */
|
/* g_object_set (pipeline, "ring-buffer-max-size", (guint64)4000000, NULL); */
|
||||||
```
|
```
|
||||||
|
|
|
@ -30,7 +30,7 @@ Copy this code into a text file named `playback-tutorial-3.c`.
|
||||||
|
|
||||||
**playback-tutorial-3.c**
|
**playback-tutorial-3.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
|
@ -188,7 +188,7 @@ int main(int argc, char *argv[]) {
|
||||||
To use an `appsrc` as the source for the pipeline, simply instantiate a
|
To use an `appsrc` as the source for the pipeline, simply instantiate a
|
||||||
`playbin` and set its URI to `appsrc://`
|
`playbin` and set its URI to `appsrc://`
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Create the playbin element */
|
/* Create the playbin element */
|
||||||
data.pipeline = gst_parse_launch ("playbin uri=appsrc://", NULL);
|
data.pipeline = gst_parse_launch ("playbin uri=appsrc://", NULL);
|
||||||
```
|
```
|
||||||
|
@ -197,7 +197,7 @@ data.pipeline = gst_parse_launch ("playbin uri=appsrc://", NULL);
|
||||||
`source-setup` signal to allow the application to configure
|
`source-setup` signal to allow the application to configure
|
||||||
it:
|
it:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
g_signal_connect (data.pipeline, "source-setup", G_CALLBACK (source_setup), &data);
|
g_signal_connect (data.pipeline, "source-setup", G_CALLBACK (source_setup), &data);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -206,7 +206,7 @@ since, once the signal handler returns, `playbin` will instantiate the
|
||||||
next element in the pipeline according to these
|
next element in the pipeline according to these
|
||||||
caps:
|
caps:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* This function is called when playbin has created the appsrc element, so we have
|
/* This function is called when playbin has created the appsrc element, so we have
|
||||||
* a chance to configure it. */
|
* a chance to configure it. */
|
||||||
static void source_setup (GstElement *pipeline, GstElement *source, CustomData *data) {
|
static void source_setup (GstElement *pipeline, GstElement *source, CustomData *data) {
|
||||||
|
|
|
@ -39,7 +39,7 @@ it in the SDK installation).
|
||||||
|
|
||||||
**playback-tutorial-2.c**
|
**playback-tutorial-2.c**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
|
|
||||||
/* Structure to contain all our information, so we can pass it around */
|
/* Structure to contain all our information, so we can pass it around */
|
||||||
|
@ -295,7 +295,7 @@ This tutorial is copied from [Playback tutorial 1: Playbin
|
||||||
usage](Playback%2Btutorial%2B1%253A%2BPlaybin%2Busage.html) with some
|
usage](Playback%2Btutorial%2B1%253A%2BPlaybin%2Busage.html) with some
|
||||||
changes, so let's review only the changes.
|
changes, so let's review only the changes.
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Set the subtitle URI to play and some font description */
|
/* Set the subtitle URI to play and some font description */
|
||||||
g_object_set (data.playbin, "suburi", "http://docs.gstreamer.com/media/sintel_trailer_gr.srt", NULL);
|
g_object_set (data.playbin, "suburi", "http://docs.gstreamer.com/media/sintel_trailer_gr.srt", NULL);
|
||||||
g_object_set (data.playbin, "subtitle-font-desc", "Sans, 18", NULL);
|
g_object_set (data.playbin, "subtitle-font-desc", "Sans, 18", NULL);
|
||||||
|
@ -349,7 +349,7 @@ Extra-Expanded, Ultra-Expanded
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* Set flags to show Audio, Video and Subtitles */
|
/* Set flags to show Audio, Video and Subtitles */
|
||||||
g_object_get (data.playbin, "flags", &flags, NULL);
|
g_object_get (data.playbin, "flags", &flags, NULL);
|
||||||
flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_TEXT;
|
flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_TEXT;
|
||||||
|
|
|
@ -63,13 +63,13 @@ with the g\[st\]\_\<class\> prefix removed and converted to camel case.
|
||||||
|
|
||||||
For example,
|
For example,
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
gboolean gst_caps_is_emtpy(const GstCaps *caps);
|
gboolean gst_caps_is_emtpy(const GstCaps *caps);
|
||||||
```
|
```
|
||||||
|
|
||||||
becomes:
|
becomes:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
namespace QGst {
|
namespace QGst {
|
||||||
class Caps {
|
class Caps {
|
||||||
bool isEmpty() const;
|
bool isEmpty() const;
|
||||||
|
@ -102,7 +102,7 @@ to call `g_object_ref()`` and g_object_unref()`.
|
||||||
QtGStreamer provides access to the underlying C objects, in case you
|
QtGStreamer provides access to the underlying C objects, in case you
|
||||||
need them. This is accessible with a simple cast:
|
need them. This is accessible with a simple cast:
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
ElementPtr qgstElement = QGst::ElementFactory::make("playbin");
|
ElementPtr qgstElement = QGst::ElementFactory::make("playbin");
|
||||||
GstElement* gstElement = GST_ELEMENT(qgstElement);
|
GstElement* gstElement = GST_ELEMENT(qgstElement);
|
||||||
```
|
```
|
||||||
|
|
|
@ -35,7 +35,7 @@ target_link_libraries(appsink-src ${QTGSTREAMER_UTILS_LIBRARIES} ${QT_QTCORE_LIB
|
||||||
|
|
||||||
**main.cpp**
|
**main.cpp**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <QtCore/QCoreApplication>
|
#include <QtCore/QCoreApplication>
|
||||||
#include <QGlib/Error>
|
#include <QGlib/Error>
|
||||||
|
@ -144,7 +144,7 @@ As this is a very simple example, most of the action happens in the
|
||||||
|
|
||||||
**GStreamer Initialization**
|
**GStreamer Initialization**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
QGst::init(&argc, &argv);
|
QGst::init(&argc, &argv);
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -152,7 +152,7 @@ Now we can construct the first half of the pipeline:
|
||||||
|
|
||||||
**Pipeline Setup**
|
**Pipeline Setup**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
const char *caps = "audio/x-raw-int,channels=1,rate=8000,"
|
const char *caps = "audio/x-raw-int,channels=1,rate=8000,"
|
||||||
"signed=(boolean)true,width=16,depth=16,endianness=1234";
|
"signed=(boolean)true,width=16,depth=16,endianness=1234";
|
||||||
|
|
||||||
|
@ -186,7 +186,7 @@ The second half of the pipeline is created similarly:
|
||||||
|
|
||||||
**Second Pipeline**
|
**Second Pipeline**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* sink pipeline */
|
/* sink pipeline */
|
||||||
QString pipe2Descr = QString("appsrc name=\"mysrc\" caps=\"%1\" ! autoaudiosink").arg(caps);
|
QString pipe2Descr = QString("appsrc name=\"mysrc\" caps=\"%1\" ! autoaudiosink").arg(caps);
|
||||||
pipeline2 = QGst::Parse::launch(pipe2Descr).dynamicCast<QGst::Pipeline>();
|
pipeline2 = QGst::Parse::launch(pipe2Descr).dynamicCast<QGst::Pipeline>();
|
||||||
|
@ -199,7 +199,7 @@ Finally, the pipeline is started:
|
||||||
|
|
||||||
**Starting the pipeline**
|
**Starting the pipeline**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
/* start playing */
|
/* start playing */
|
||||||
pipeline1->setState(QGst::StatePlaying);
|
pipeline1->setState(QGst::StatePlaying);
|
||||||
pipeline2->setState(QGst::StatePlaying);
|
pipeline2->setState(QGst::StatePlaying);
|
||||||
|
@ -212,7 +212,7 @@ ready for processing:
|
||||||
|
|
||||||
**MySink::newBuffer()**
|
**MySink::newBuffer()**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
virtual QGst::FlowReturn newBuffer()
|
virtual QGst::FlowReturn newBuffer()
|
||||||
{
|
{
|
||||||
m_src->pushBuffer(pullBuffer());
|
m_src->pushBuffer(pullBuffer());
|
||||||
|
@ -225,7 +225,7 @@ Our implementation takes the new buffer and pushes it into the
|
||||||
|
|
||||||
**Player::Player()**
|
**Player::Player()**
|
||||||
|
|
||||||
``` lang=c
|
``` c
|
||||||
Player::Player(int argc, char **argv)
|
Player::Player(int argc, char **argv)
|
||||||
: QCoreApplication(argc, argv), m_sink(&m_src)
|
: QCoreApplication(argc, argv), m_sink(&m_src)
|
||||||
```
|
```
|
||||||
|
|
Loading…
Reference in a new issue