Merge remote-tracking branch 'tutorials/master'
22
.gitignore
vendored
|
@ -1,2 +1,24 @@
|
||||||
/built_doc
|
/built_doc
|
||||||
/hotdoc-private*
|
/hotdoc-private*
|
||||||
|
Debug
|
||||||
|
Release
|
||||||
|
ipch
|
||||||
|
*.user
|
||||||
|
*.sdf
|
||||||
|
*.suo
|
||||||
|
*.opensdf
|
||||||
|
vs/2010/libs
|
||||||
|
bin
|
||||||
|
gen
|
||||||
|
libs
|
||||||
|
obj
|
||||||
|
.classpath
|
||||||
|
.project
|
||||||
|
.settings
|
||||||
|
.libs
|
||||||
|
.cproject
|
||||||
|
gst-build
|
||||||
|
project.properties
|
||||||
|
gst_sdk
|
||||||
|
.DS_Store
|
||||||
|
xcuserdata
|
||||||
|
|
17
tutorials/android-tutorial-1/AndroidManifest.xml
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
package="org.freedesktop.gstreamer.tutorials.tutorial_1"
|
||||||
|
android:versionCode="1"
|
||||||
|
android:versionName="1.0">
|
||||||
|
<uses-sdk android:minSdkVersion="9" android:targetSdkVersion="14"/>
|
||||||
|
<application android:label="@string/app_name"
|
||||||
|
android:icon="@drawable/gstreamer_logo_1">
|
||||||
|
<activity android:name=".Tutorial1"
|
||||||
|
android:label="@string/app_name">
|
||||||
|
<intent-filter>
|
||||||
|
<action android:name="android.intent.action.MAIN" />
|
||||||
|
<category android:name="android.intent.category.LAUNCHER" />
|
||||||
|
</intent-filter>
|
||||||
|
</activity>
|
||||||
|
</application>
|
||||||
|
</manifest>
|
31
tutorials/android-tutorial-1/jni/Android.mk
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
LOCAL_PATH := $(call my-dir)
|
||||||
|
|
||||||
|
include $(CLEAR_VARS)
|
||||||
|
|
||||||
|
LOCAL_MODULE := tutorial-1
|
||||||
|
LOCAL_SRC_FILES := tutorial-1.c
|
||||||
|
LOCAL_SHARED_LIBRARIES := gstreamer_android
|
||||||
|
LOCAL_LDLIBS := -llog
|
||||||
|
include $(BUILD_SHARED_LIBRARY)
|
||||||
|
|
||||||
|
ifndef GSTREAMER_ROOT_ANDROID
|
||||||
|
$(error GSTREAMER_ROOT_ANDROID is not defined!)
|
||||||
|
endif
|
||||||
|
|
||||||
|
ifeq ($(TARGET_ARCH_ABI),armeabi)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/arm
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/armv7
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),arm64-v8a)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/arm64
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),x86)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/x86
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),x86_64)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/x86_64
|
||||||
|
else
|
||||||
|
$(error Target arch ABI not supported: $(TARGET_ARCH_ABI))
|
||||||
|
endif
|
||||||
|
|
||||||
|
GSTREAMER_NDK_BUILD_PATH := $(GSTREAMER_ROOT)/share/gst-android/ndk-build/
|
||||||
|
GSTREAMER_PLUGINS := coreelements
|
||||||
|
include $(GSTREAMER_NDK_BUILD_PATH)/gstreamer-1.0.mk
|
1
tutorials/android-tutorial-1/jni/Application.mk
Normal file
|
@ -0,0 +1 @@
|
||||||
|
APP_ABI = armeabi armeabi-v7a arm64-v8a x86 x86_64
|
31
tutorials/android-tutorial-1/jni/tutorial-1.c
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
#include <string.h>
|
||||||
|
#include <jni.h>
|
||||||
|
#include <android/log.h>
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Java Bindings
|
||||||
|
*/
|
||||||
|
static jstring gst_native_get_gstreamer_info (JNIEnv* env, jobject thiz) {
|
||||||
|
char *version_utf8 = gst_version_string();
|
||||||
|
jstring *version_jstring = (*env)->NewStringUTF(env, version_utf8);
|
||||||
|
g_free (version_utf8);
|
||||||
|
return version_jstring;
|
||||||
|
}
|
||||||
|
|
||||||
|
static JNINativeMethod native_methods[] = {
|
||||||
|
{ "nativeGetGStreamerInfo", "()Ljava/lang/String;", (void *) gst_native_get_gstreamer_info}
|
||||||
|
};
|
||||||
|
|
||||||
|
jint JNI_OnLoad(JavaVM *vm, void *reserved) {
|
||||||
|
JNIEnv *env = NULL;
|
||||||
|
|
||||||
|
if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
||||||
|
__android_log_print (ANDROID_LOG_ERROR, "tutorial-1", "Could not retrieve JNIEnv");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
jclass klass = (*env)->FindClass (env, "org/freedesktop/gstreamer/tutorials/tutorial_1/Tutorial1");
|
||||||
|
(*env)->RegisterNatives (env, klass, native_methods, G_N_ELEMENTS(native_methods));
|
||||||
|
|
||||||
|
return JNI_VERSION_1_4;
|
||||||
|
}
|
After Width: | Height: | Size: 1.9 KiB |
After Width: | Height: | Size: 1 KiB |
After Width: | Height: | Size: 1.3 KiB |
After Width: | Height: | Size: 2.5 KiB |
After Width: | Height: | Size: 3.8 KiB |
After Width: | Height: | Size: 5 KiB |
14
tutorials/android-tutorial-1/res/layout/main.xml
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="match_parent"
|
||||||
|
android:orientation="vertical" >
|
||||||
|
|
||||||
|
<TextView
|
||||||
|
android:id="@+id/textview_info"
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="match_parent"
|
||||||
|
android:gravity="center_vertical|center_horizontal"
|
||||||
|
android:textAppearance="?android:attr/textAppearanceLarge" />
|
||||||
|
|
||||||
|
</LinearLayout>
|
4
tutorials/android-tutorial-1/res/values/strings.xml
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<resources>
|
||||||
|
<string name="app_name">GStreamer tutorial 1</string>
|
||||||
|
</resources>
|
|
@ -0,0 +1,38 @@
|
||||||
|
package org.freedesktop.gstreamer.tutorials.tutorial_1;
|
||||||
|
|
||||||
|
import android.app.Activity;
|
||||||
|
import android.os.Bundle;
|
||||||
|
import android.widget.TextView;
|
||||||
|
import android.widget.Toast;
|
||||||
|
|
||||||
|
import org.freedesktop.gstreamer.GStreamer;
|
||||||
|
|
||||||
|
public class Tutorial1 extends Activity {
|
||||||
|
private native String nativeGetGStreamerInfo();
|
||||||
|
|
||||||
|
// Called when the activity is first created.
|
||||||
|
@Override
|
||||||
|
public void onCreate(Bundle savedInstanceState)
|
||||||
|
{
|
||||||
|
super.onCreate(savedInstanceState);
|
||||||
|
|
||||||
|
try {
|
||||||
|
GStreamer.init(this);
|
||||||
|
} catch (Exception e) {
|
||||||
|
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
|
||||||
|
finish();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setContentView(R.layout.main);
|
||||||
|
|
||||||
|
TextView tv = (TextView)findViewById(R.id.textview_info);
|
||||||
|
tv.setText("Welcome to " + nativeGetGStreamerInfo() + " !");
|
||||||
|
}
|
||||||
|
|
||||||
|
static {
|
||||||
|
System.loadLibrary("gstreamer_android");
|
||||||
|
System.loadLibrary("tutorial-1");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
17
tutorials/android-tutorial-2/AndroidManifest.xml
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
package="org.freedesktop.gstreamer.tutorials.tutorial_2"
|
||||||
|
android:versionCode="1"
|
||||||
|
android:versionName="1.0">
|
||||||
|
<uses-sdk android:minSdkVersion="9" android:targetSdkVersion="14"/>
|
||||||
|
<application android:label="@string/app_name"
|
||||||
|
android:icon="@drawable/gstreamer_logo_2">
|
||||||
|
<activity android:name=".Tutorial2"
|
||||||
|
android:label="@string/app_name">
|
||||||
|
<intent-filter>
|
||||||
|
<action android:name="android.intent.action.MAIN" />
|
||||||
|
<category android:name="android.intent.category.LAUNCHER" />
|
||||||
|
</intent-filter>
|
||||||
|
</activity>
|
||||||
|
</application>
|
||||||
|
</manifest>
|
32
tutorials/android-tutorial-2/jni/Android.mk
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
LOCAL_PATH := $(call my-dir)
|
||||||
|
|
||||||
|
include $(CLEAR_VARS)
|
||||||
|
|
||||||
|
LOCAL_MODULE := tutorial-2
|
||||||
|
LOCAL_SRC_FILES := tutorial-2.c
|
||||||
|
LOCAL_SHARED_LIBRARIES := gstreamer_android
|
||||||
|
LOCAL_LDLIBS := -llog
|
||||||
|
include $(BUILD_SHARED_LIBRARY)
|
||||||
|
|
||||||
|
ifndef GSTREAMER_ROOT_ANDROID
|
||||||
|
$(error GSTREAMER_ROOT_ANDROID is not defined!)
|
||||||
|
endif
|
||||||
|
|
||||||
|
ifeq ($(TARGET_ARCH_ABI),armeabi)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/arm
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/armv7
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),arm64-v8a)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/arm64
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),x86)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/x86
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),x86_64)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/x86_64
|
||||||
|
else
|
||||||
|
$(error Target arch ABI not supported: $(TARGET_ARCH_ABI))
|
||||||
|
endif
|
||||||
|
|
||||||
|
GSTREAMER_NDK_BUILD_PATH := $(GSTREAMER_ROOT)/share/gst-android/ndk-build/
|
||||||
|
include $(GSTREAMER_NDK_BUILD_PATH)/plugins.mk
|
||||||
|
GSTREAMER_PLUGINS := $(GSTREAMER_PLUGINS_CORE) $(GSTREAMER_PLUGINS_SYS)
|
||||||
|
include $(GSTREAMER_NDK_BUILD_PATH)/gstreamer-1.0.mk
|
1
tutorials/android-tutorial-2/jni/Application.mk
Normal file
|
@ -0,0 +1 @@
|
||||||
|
APP_ABI = armeabi armeabi-v7a arm64-v8a x86 x86_64
|
275
tutorials/android-tutorial-2/jni/tutorial-2.c
Normal file
|
@ -0,0 +1,275 @@
|
||||||
|
#include <string.h>
|
||||||
|
#include <jni.h>
|
||||||
|
#include <android/log.h>
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <pthread.h>
|
||||||
|
|
||||||
|
GST_DEBUG_CATEGORY_STATIC (debug_category);
|
||||||
|
#define GST_CAT_DEFAULT debug_category
|
||||||
|
|
||||||
|
/*
|
||||||
|
* These macros provide a way to store the native pointer to CustomData, which might be 32 or 64 bits, into
|
||||||
|
* a jlong, which is always 64 bits, without warnings.
|
||||||
|
*/
|
||||||
|
#if GLIB_SIZEOF_VOID_P == 8
|
||||||
|
# define GET_CUSTOM_DATA(env, thiz, fieldID) (CustomData *)(*env)->GetLongField (env, thiz, fieldID)
|
||||||
|
# define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField (env, thiz, fieldID, (jlong)data)
|
||||||
|
#else
|
||||||
|
# define GET_CUSTOM_DATA(env, thiz, fieldID) (CustomData *)(jint)(*env)->GetLongField (env, thiz, fieldID)
|
||||||
|
# define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField (env, thiz, fieldID, (jlong)(jint)data)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it to callbacks */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
jobject app; /* Application instance, used to call its methods. A global reference is kept. */
|
||||||
|
GstElement *pipeline; /* The running pipeline */
|
||||||
|
GMainContext *context; /* GLib context used to run the main loop */
|
||||||
|
GMainLoop *main_loop; /* GLib main loop */
|
||||||
|
gboolean initialized; /* To avoid informing the UI multiple times about the initialization */
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* These global variables cache values which are not changing during execution */
|
||||||
|
static pthread_t gst_app_thread;
|
||||||
|
static pthread_key_t current_jni_env;
|
||||||
|
static JavaVM *java_vm;
|
||||||
|
static jfieldID custom_data_field_id;
|
||||||
|
static jmethodID set_message_method_id;
|
||||||
|
static jmethodID on_gstreamer_initialized_method_id;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Private methods
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Register this thread with the VM */
|
||||||
|
static JNIEnv *attach_current_thread (void) {
|
||||||
|
JNIEnv *env;
|
||||||
|
JavaVMAttachArgs args;
|
||||||
|
|
||||||
|
GST_DEBUG ("Attaching thread %p", g_thread_self ());
|
||||||
|
args.version = JNI_VERSION_1_4;
|
||||||
|
args.name = NULL;
|
||||||
|
args.group = NULL;
|
||||||
|
|
||||||
|
if ((*java_vm)->AttachCurrentThread (java_vm, &env, &args) < 0) {
|
||||||
|
GST_ERROR ("Failed to attach current thread");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
return env;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Unregister this thread from the VM */
|
||||||
|
static void detach_current_thread (void *env) {
|
||||||
|
GST_DEBUG ("Detaching thread %p", g_thread_self ());
|
||||||
|
(*java_vm)->DetachCurrentThread (java_vm);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Retrieve the JNI environment for this thread */
|
||||||
|
static JNIEnv *get_jni_env (void) {
|
||||||
|
JNIEnv *env;
|
||||||
|
|
||||||
|
if ((env = pthread_getspecific (current_jni_env)) == NULL) {
|
||||||
|
env = attach_current_thread ();
|
||||||
|
pthread_setspecific (current_jni_env, env);
|
||||||
|
}
|
||||||
|
|
||||||
|
return env;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Change the content of the UI's TextView */
|
||||||
|
static void set_ui_message (const gchar *message, CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
GST_DEBUG ("Setting message to: %s", message);
|
||||||
|
jstring jmessage = (*env)->NewStringUTF(env, message);
|
||||||
|
(*env)->CallVoidMethod (env, data->app, set_message_method_id, jmessage);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
(*env)->DeleteLocalRef (env, jmessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Retrieve errors from the bus and show them on the UI */
|
||||||
|
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
gchar *message_string;
|
||||||
|
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
message_string = g_strdup_printf ("Error received from element %s: %s", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
set_ui_message (message_string, data);
|
||||||
|
g_free (message_string);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Notify UI about pipeline state changes */
|
||||||
|
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GstState old_state, new_state, pending_state;
|
||||||
|
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
|
||||||
|
/* Only pay attention to messages coming from the pipeline, not its children */
|
||||||
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->pipeline)) {
|
||||||
|
gchar *message = g_strdup_printf("State changed to %s", gst_element_state_get_name(new_state));
|
||||||
|
set_ui_message(message, data);
|
||||||
|
g_free (message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Check if all conditions are met to report GStreamer as initialized.
|
||||||
|
* These conditions will change depending on the application */
|
||||||
|
static void check_initialization_complete (CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
if (!data->initialized && data->main_loop) {
|
||||||
|
GST_DEBUG ("Initialization complete, notifying application. main_loop:%p", data->main_loop);
|
||||||
|
(*env)->CallVoidMethod (env, data->app, on_gstreamer_initialized_method_id);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
data->initialized = TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Main method for the native code. This is executed on its own thread. */
|
||||||
|
static void *app_function (void *userdata) {
|
||||||
|
JavaVMAttachArgs args;
|
||||||
|
GstBus *bus;
|
||||||
|
CustomData *data = (CustomData *)userdata;
|
||||||
|
GSource *bus_source;
|
||||||
|
GError *error = NULL;
|
||||||
|
|
||||||
|
GST_DEBUG ("Creating pipeline in CustomData at %p", data);
|
||||||
|
|
||||||
|
/* Create our own GLib Main Context and make it the default one */
|
||||||
|
data->context = g_main_context_new ();
|
||||||
|
g_main_context_push_thread_default(data->context);
|
||||||
|
|
||||||
|
/* Build pipeline */
|
||||||
|
data->pipeline = gst_parse_launch("audiotestsrc ! audioconvert ! audioresample ! autoaudiosink", &error);
|
||||||
|
if (error) {
|
||||||
|
gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
|
||||||
|
g_clear_error (&error);
|
||||||
|
set_ui_message(message, data);
|
||||||
|
g_free (message);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
|
||||||
|
bus = gst_element_get_bus (data->pipeline);
|
||||||
|
bus_source = gst_bus_create_watch (bus);
|
||||||
|
g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
|
||||||
|
g_source_attach (bus_source, data->context);
|
||||||
|
g_source_unref (bus_source);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, data);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
|
||||||
|
/* Create a GLib Main Loop and set it to run */
|
||||||
|
GST_DEBUG ("Entering main loop... (CustomData:%p)", data);
|
||||||
|
data->main_loop = g_main_loop_new (data->context, FALSE);
|
||||||
|
check_initialization_complete (data);
|
||||||
|
g_main_loop_run (data->main_loop);
|
||||||
|
GST_DEBUG ("Exited main loop");
|
||||||
|
g_main_loop_unref (data->main_loop);
|
||||||
|
data->main_loop = NULL;
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
g_main_context_pop_thread_default(data->context);
|
||||||
|
g_main_context_unref (data->context);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data->pipeline);
|
||||||
|
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Java Bindings
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Instruct the native code to create its internal data structure, pipeline and thread */
|
||||||
|
static void gst_native_init (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = g_new0 (CustomData, 1);
|
||||||
|
SET_CUSTOM_DATA (env, thiz, custom_data_field_id, data);
|
||||||
|
GST_DEBUG_CATEGORY_INIT (debug_category, "tutorial-2", 0, "Android tutorial 2");
|
||||||
|
gst_debug_set_threshold_for_name("tutorial-2", GST_LEVEL_DEBUG);
|
||||||
|
GST_DEBUG ("Created CustomData at %p", data);
|
||||||
|
data->app = (*env)->NewGlobalRef (env, thiz);
|
||||||
|
GST_DEBUG ("Created GlobalRef for app object at %p", data->app);
|
||||||
|
pthread_create (&gst_app_thread, NULL, &app_function, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Quit the main loop, remove the native thread and free resources */
|
||||||
|
static void gst_native_finalize (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Quitting main loop...");
|
||||||
|
g_main_loop_quit (data->main_loop);
|
||||||
|
GST_DEBUG ("Waiting for thread to finish...");
|
||||||
|
pthread_join (gst_app_thread, NULL);
|
||||||
|
GST_DEBUG ("Deleting GlobalRef for app object at %p", data->app);
|
||||||
|
(*env)->DeleteGlobalRef (env, data->app);
|
||||||
|
GST_DEBUG ("Freeing CustomData at %p", data);
|
||||||
|
g_free (data);
|
||||||
|
SET_CUSTOM_DATA (env, thiz, custom_data_field_id, NULL);
|
||||||
|
GST_DEBUG ("Done finalizing");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set pipeline to PLAYING state */
|
||||||
|
static void gst_native_play (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Setting state to PLAYING");
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set pipeline to PAUSED state */
|
||||||
|
static void gst_native_pause (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Setting state to PAUSED");
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Static class initializer: retrieve method and field IDs */
|
||||||
|
static jboolean gst_native_class_init (JNIEnv* env, jclass klass) {
|
||||||
|
custom_data_field_id = (*env)->GetFieldID (env, klass, "native_custom_data", "J");
|
||||||
|
set_message_method_id = (*env)->GetMethodID (env, klass, "setMessage", "(Ljava/lang/String;)V");
|
||||||
|
on_gstreamer_initialized_method_id = (*env)->GetMethodID (env, klass, "onGStreamerInitialized", "()V");
|
||||||
|
|
||||||
|
if (!custom_data_field_id || !set_message_method_id || !on_gstreamer_initialized_method_id) {
|
||||||
|
/* We emit this message through the Android log instead of the GStreamer log because the later
|
||||||
|
* has not been initialized yet.
|
||||||
|
*/
|
||||||
|
__android_log_print (ANDROID_LOG_ERROR, "tutorial-2", "The calling class does not implement all necessary interface methods");
|
||||||
|
return JNI_FALSE;
|
||||||
|
}
|
||||||
|
return JNI_TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* List of implemented native methods */
|
||||||
|
static JNINativeMethod native_methods[] = {
|
||||||
|
{ "nativeInit", "()V", (void *) gst_native_init},
|
||||||
|
{ "nativeFinalize", "()V", (void *) gst_native_finalize},
|
||||||
|
{ "nativePlay", "()V", (void *) gst_native_play},
|
||||||
|
{ "nativePause", "()V", (void *) gst_native_pause},
|
||||||
|
{ "nativeClassInit", "()Z", (void *) gst_native_class_init}
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Library initializer */
|
||||||
|
jint JNI_OnLoad(JavaVM *vm, void *reserved) {
|
||||||
|
JNIEnv *env = NULL;
|
||||||
|
|
||||||
|
java_vm = vm;
|
||||||
|
|
||||||
|
if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
||||||
|
__android_log_print (ANDROID_LOG_ERROR, "tutorial-2", "Could not retrieve JNIEnv");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
jclass klass = (*env)->FindClass (env, "org/freedesktop/gstreamer/tutorials/tutorial_2/Tutorial2");
|
||||||
|
(*env)->RegisterNatives (env, klass, native_methods, G_N_ELEMENTS(native_methods));
|
||||||
|
|
||||||
|
pthread_key_create (¤t_jni_env, detach_current_thread);
|
||||||
|
|
||||||
|
return JNI_VERSION_1_4;
|
||||||
|
}
|
After Width: | Height: | Size: 2.1 KiB |
After Width: | Height: | Size: 1 KiB |
After Width: | Height: | Size: 1.3 KiB |
After Width: | Height: | Size: 2.6 KiB |
After Width: | Height: | Size: 4 KiB |
After Width: | Height: | Size: 5.2 KiB |
38
tutorials/android-tutorial-2/res/layout/main.xml
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="match_parent"
|
||||||
|
android:gravity="center_vertical"
|
||||||
|
android:orientation="vertical" >
|
||||||
|
|
||||||
|
<TextView
|
||||||
|
android:id="@+id/textview_message"
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_marginBottom="16dip"
|
||||||
|
android:gravity="center_horizontal" />
|
||||||
|
|
||||||
|
<LinearLayout
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:gravity="center_horizontal"
|
||||||
|
android:orientation="horizontal" >
|
||||||
|
|
||||||
|
<ImageButton
|
||||||
|
android:id="@+id/button_play"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:contentDescription="@string/button_play"
|
||||||
|
android:src="@android:drawable/ic_media_play"
|
||||||
|
android:text="@string/button_play" />
|
||||||
|
|
||||||
|
<ImageButton
|
||||||
|
android:id="@+id/button_stop"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:contentDescription="@string/button_stop"
|
||||||
|
android:src="@android:drawable/ic_media_pause"
|
||||||
|
android:text="@string/button_stop" />
|
||||||
|
</LinearLayout>
|
||||||
|
|
||||||
|
</LinearLayout>
|
6
tutorials/android-tutorial-2/res/values/strings.xml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<resources>
|
||||||
|
<string name="app_name">GStreamer tutorial 2</string>
|
||||||
|
<string name="button_play">Play</string>
|
||||||
|
<string name="button_stop">Stop</string>
|
||||||
|
</resources>
|
|
@ -0,0 +1,119 @@
|
||||||
|
package org.freedesktop.gstreamer.tutorials.tutorial_2;
|
||||||
|
|
||||||
|
import android.app.Activity;
|
||||||
|
import android.os.Bundle;
|
||||||
|
import android.util.Log;
|
||||||
|
import android.view.View;
|
||||||
|
import android.view.View.OnClickListener;
|
||||||
|
import android.widget.ImageButton;
|
||||||
|
import android.widget.TextView;
|
||||||
|
import android.widget.Toast;
|
||||||
|
|
||||||
|
import org.freedesktop.gstreamer.GStreamer;
|
||||||
|
|
||||||
|
public class Tutorial2 extends Activity {
|
||||||
|
private native void nativeInit(); // Initialize native code, build pipeline, etc
|
||||||
|
private native void nativeFinalize(); // Destroy pipeline and shutdown native code
|
||||||
|
private native void nativePlay(); // Set pipeline to PLAYING
|
||||||
|
private native void nativePause(); // Set pipeline to PAUSED
|
||||||
|
private static native boolean nativeClassInit(); // Initialize native class: cache Method IDs for callbacks
|
||||||
|
private long native_custom_data; // Native code will use this to keep private data
|
||||||
|
|
||||||
|
private boolean is_playing_desired; // Whether the user asked to go to PLAYING
|
||||||
|
|
||||||
|
// Called when the activity is first created.
|
||||||
|
@Override
|
||||||
|
public void onCreate(Bundle savedInstanceState)
|
||||||
|
{
|
||||||
|
super.onCreate(savedInstanceState);
|
||||||
|
|
||||||
|
// Initialize GStreamer and warn if it fails
|
||||||
|
try {
|
||||||
|
GStreamer.init(this);
|
||||||
|
} catch (Exception e) {
|
||||||
|
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
|
||||||
|
finish();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setContentView(R.layout.main);
|
||||||
|
|
||||||
|
ImageButton play = (ImageButton) this.findViewById(R.id.button_play);
|
||||||
|
play.setOnClickListener(new OnClickListener() {
|
||||||
|
public void onClick(View v) {
|
||||||
|
is_playing_desired = true;
|
||||||
|
nativePlay();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ImageButton pause = (ImageButton) this.findViewById(R.id.button_stop);
|
||||||
|
pause.setOnClickListener(new OnClickListener() {
|
||||||
|
public void onClick(View v) {
|
||||||
|
is_playing_desired = false;
|
||||||
|
nativePause();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (savedInstanceState != null) {
|
||||||
|
is_playing_desired = savedInstanceState.getBoolean("playing");
|
||||||
|
Log.i ("GStreamer", "Activity created. Saved state is playing:" + is_playing_desired);
|
||||||
|
} else {
|
||||||
|
is_playing_desired = false;
|
||||||
|
Log.i ("GStreamer", "Activity created. There is no saved state, playing: false");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start with disabled buttons, until native code is initialized
|
||||||
|
this.findViewById(R.id.button_play).setEnabled(false);
|
||||||
|
this.findViewById(R.id.button_stop).setEnabled(false);
|
||||||
|
|
||||||
|
nativeInit();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void onSaveInstanceState (Bundle outState) {
|
||||||
|
Log.d ("GStreamer", "Saving state, playing:" + is_playing_desired);
|
||||||
|
outState.putBoolean("playing", is_playing_desired);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void onDestroy() {
|
||||||
|
nativeFinalize();
|
||||||
|
super.onDestroy();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code. This sets the content of the TextView from the UI thread.
|
||||||
|
private void setMessage(final String message) {
|
||||||
|
final TextView tv = (TextView) this.findViewById(R.id.textview_message);
|
||||||
|
runOnUiThread (new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
tv.setText(message);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code. Native code calls this once it has created its pipeline and
|
||||||
|
// the main loop is running, so it is ready to accept commands.
|
||||||
|
private void onGStreamerInitialized () {
|
||||||
|
Log.i ("GStreamer", "Gst initialized. Restoring state, playing:" + is_playing_desired);
|
||||||
|
// Restore previous playing state
|
||||||
|
if (is_playing_desired) {
|
||||||
|
nativePlay();
|
||||||
|
} else {
|
||||||
|
nativePause();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-enable buttons, now that GStreamer is initialized
|
||||||
|
final Activity activity = this;
|
||||||
|
runOnUiThread(new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
activity.findViewById(R.id.button_play).setEnabled(true);
|
||||||
|
activity.findViewById(R.id.button_stop).setEnabled(true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
static {
|
||||||
|
System.loadLibrary("gstreamer_android");
|
||||||
|
System.loadLibrary("tutorial-2");
|
||||||
|
nativeClassInit();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
18
tutorials/android-tutorial-3/AndroidManifest.xml
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
package="org.freedesktop.gstreamer.tutorials.tutorial_3"
|
||||||
|
android:versionCode="1"
|
||||||
|
android:versionName="1.0">
|
||||||
|
<uses-sdk android:minSdkVersion="9" android:targetSdkVersion="14"/>
|
||||||
|
<uses-feature android:glEsVersion="0x00020000"/>
|
||||||
|
<application android:label="@string/app_name"
|
||||||
|
android:icon="@drawable/gstreamer_logo_3">
|
||||||
|
<activity android:name=".Tutorial3"
|
||||||
|
android:label="@string/app_name">
|
||||||
|
<intent-filter>
|
||||||
|
<action android:name="android.intent.action.MAIN" />
|
||||||
|
<category android:name="android.intent.category.LAUNCHER" />
|
||||||
|
</intent-filter>
|
||||||
|
</activity>
|
||||||
|
</application>
|
||||||
|
</manifest>
|
33
tutorials/android-tutorial-3/jni/Android.mk
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
LOCAL_PATH := $(call my-dir)
|
||||||
|
|
||||||
|
include $(CLEAR_VARS)
|
||||||
|
|
||||||
|
LOCAL_MODULE := tutorial-3
|
||||||
|
LOCAL_SRC_FILES := tutorial-3.c
|
||||||
|
LOCAL_SHARED_LIBRARIES := gstreamer_android
|
||||||
|
LOCAL_LDLIBS := -llog -landroid
|
||||||
|
include $(BUILD_SHARED_LIBRARY)
|
||||||
|
|
||||||
|
ifndef GSTREAMER_ROOT_ANDROID
|
||||||
|
$(error GSTREAMER_ROOT_ANDROID is not defined!)
|
||||||
|
endif
|
||||||
|
|
||||||
|
ifeq ($(TARGET_ARCH_ABI),armeabi)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/arm
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/armv7
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),arm64-v8a)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/arm64
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),x86)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/x86
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),x86_64)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/x86_64
|
||||||
|
else
|
||||||
|
$(error Target arch ABI not supported: $(TARGET_ARCH_ABI))
|
||||||
|
endif
|
||||||
|
|
||||||
|
GSTREAMER_NDK_BUILD_PATH := $(GSTREAMER_ROOT)/share/gst-android/ndk-build/
|
||||||
|
include $(GSTREAMER_NDK_BUILD_PATH)/plugins.mk
|
||||||
|
GSTREAMER_PLUGINS := $(GSTREAMER_PLUGINS_CORE) $(GSTREAMER_PLUGINS_SYS) $(GSTREAMER_PLUGINS_EFFECTS)
|
||||||
|
GSTREAMER_EXTRA_DEPS := gstreamer-video-1.0
|
||||||
|
include $(GSTREAMER_NDK_BUILD_PATH)/gstreamer-1.0.mk
|
1
tutorials/android-tutorial-3/jni/Application.mk
Normal file
|
@ -0,0 +1 @@
|
||||||
|
APP_ABI = armeabi armeabi-v7a arm64-v8a x86 x86_64
|
337
tutorials/android-tutorial-3/jni/tutorial-3.c
Normal file
|
@ -0,0 +1,337 @@
|
||||||
|
#include <string.h>
|
||||||
|
#include <stdint.h>
|
||||||
|
#include <jni.h>
|
||||||
|
#include <android/log.h>
|
||||||
|
#include <android/native_window.h>
|
||||||
|
#include <android/native_window_jni.h>
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <gst/video/video.h>
|
||||||
|
#include <pthread.h>
|
||||||
|
|
||||||
|
GST_DEBUG_CATEGORY_STATIC (debug_category);
|
||||||
|
#define GST_CAT_DEFAULT debug_category
|
||||||
|
|
||||||
|
/*
|
||||||
|
* These macros provide a way to store the native pointer to CustomData, which might be 32 or 64 bits, into
|
||||||
|
* a jlong, which is always 64 bits, without warnings.
|
||||||
|
*/
|
||||||
|
#if GLIB_SIZEOF_VOID_P == 8
|
||||||
|
# define GET_CUSTOM_DATA(env, thiz, fieldID) (CustomData *)(*env)->GetLongField (env, thiz, fieldID)
|
||||||
|
# define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField (env, thiz, fieldID, (jlong)data)
|
||||||
|
#else
|
||||||
|
# define GET_CUSTOM_DATA(env, thiz, fieldID) (CustomData *)(jint)(*env)->GetLongField (env, thiz, fieldID)
|
||||||
|
# define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField (env, thiz, fieldID, (jlong)(jint)data)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it to callbacks */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
jobject app; /* Application instance, used to call its methods. A global reference is kept. */
|
||||||
|
GstElement *pipeline; /* The running pipeline */
|
||||||
|
GMainContext *context; /* GLib context used to run the main loop */
|
||||||
|
GMainLoop *main_loop; /* GLib main loop */
|
||||||
|
gboolean initialized; /* To avoid informing the UI multiple times about the initialization */
|
||||||
|
GstElement *video_sink; /* The video sink element which receives XOverlay commands */
|
||||||
|
ANativeWindow *native_window; /* The Android native window where video will be rendered */
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* These global variables cache values which are not changing during execution */
|
||||||
|
static pthread_t gst_app_thread;
|
||||||
|
static pthread_key_t current_jni_env;
|
||||||
|
static JavaVM *java_vm;
|
||||||
|
static jfieldID custom_data_field_id;
|
||||||
|
static jmethodID set_message_method_id;
|
||||||
|
static jmethodID on_gstreamer_initialized_method_id;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Private methods
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Register this thread with the VM */
|
||||||
|
static JNIEnv *attach_current_thread (void) {
|
||||||
|
JNIEnv *env;
|
||||||
|
JavaVMAttachArgs args;
|
||||||
|
|
||||||
|
GST_DEBUG ("Attaching thread %p", g_thread_self ());
|
||||||
|
args.version = JNI_VERSION_1_4;
|
||||||
|
args.name = NULL;
|
||||||
|
args.group = NULL;
|
||||||
|
|
||||||
|
if ((*java_vm)->AttachCurrentThread (java_vm, &env, &args) < 0) {
|
||||||
|
GST_ERROR ("Failed to attach current thread");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
return env;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Unregister this thread from the VM */
|
||||||
|
static void detach_current_thread (void *env) {
|
||||||
|
GST_DEBUG ("Detaching thread %p", g_thread_self ());
|
||||||
|
(*java_vm)->DetachCurrentThread (java_vm);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Retrieve the JNI environment for this thread */
|
||||||
|
static JNIEnv *get_jni_env (void) {
|
||||||
|
JNIEnv *env;
|
||||||
|
|
||||||
|
if ((env = pthread_getspecific (current_jni_env)) == NULL) {
|
||||||
|
env = attach_current_thread ();
|
||||||
|
pthread_setspecific (current_jni_env, env);
|
||||||
|
}
|
||||||
|
|
||||||
|
return env;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Change the content of the UI's TextView */
|
||||||
|
static void set_ui_message (const gchar *message, CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
GST_DEBUG ("Setting message to: %s", message);
|
||||||
|
jstring jmessage = (*env)->NewStringUTF(env, message);
|
||||||
|
(*env)->CallVoidMethod (env, data->app, set_message_method_id, jmessage);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
(*env)->DeleteLocalRef (env, jmessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Retrieve errors from the bus and show them on the UI */
|
||||||
|
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
gchar *message_string;
|
||||||
|
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
message_string = g_strdup_printf ("Error received from element %s: %s", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
set_ui_message (message_string, data);
|
||||||
|
g_free (message_string);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Notify UI about pipeline state changes */
|
||||||
|
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GstState old_state, new_state, pending_state;
|
||||||
|
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
|
||||||
|
/* Only pay attention to messages coming from the pipeline, not its children */
|
||||||
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->pipeline)) {
|
||||||
|
gchar *message = g_strdup_printf("State changed to %s", gst_element_state_get_name(new_state));
|
||||||
|
set_ui_message(message, data);
|
||||||
|
g_free (message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Check if all conditions are met to report GStreamer as initialized.
|
||||||
|
* These conditions will change depending on the application */
|
||||||
|
static void check_initialization_complete (CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
if (!data->initialized && data->native_window && data->main_loop) {
|
||||||
|
GST_DEBUG ("Initialization complete, notifying application. native_window:%p main_loop:%p", data->native_window, data->main_loop);
|
||||||
|
|
||||||
|
/* The main loop is running and we received a native window, inform the sink about it */
|
||||||
|
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->video_sink), (guintptr)data->native_window);
|
||||||
|
|
||||||
|
(*env)->CallVoidMethod (env, data->app, on_gstreamer_initialized_method_id);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
data->initialized = TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Main method for the native code. This is executed on its own thread. */
|
||||||
|
static void *app_function (void *userdata) {
|
||||||
|
JavaVMAttachArgs args;
|
||||||
|
GstBus *bus;
|
||||||
|
CustomData *data = (CustomData *)userdata;
|
||||||
|
GSource *bus_source;
|
||||||
|
GError *error = NULL;
|
||||||
|
|
||||||
|
GST_DEBUG ("Creating pipeline in CustomData at %p", data);
|
||||||
|
|
||||||
|
/* Create our own GLib Main Context and make it the default one */
|
||||||
|
data->context = g_main_context_new ();
|
||||||
|
g_main_context_push_thread_default(data->context);
|
||||||
|
|
||||||
|
/* Build pipeline */
|
||||||
|
data->pipeline = gst_parse_launch("videotestsrc ! warptv ! videoconvert ! autovideosink", &error);
|
||||||
|
if (error) {
|
||||||
|
gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
|
||||||
|
g_clear_error (&error);
|
||||||
|
set_ui_message(message, data);
|
||||||
|
g_free (message);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set the pipeline to READY, so it can already accept a window handle, if we have one */
|
||||||
|
gst_element_set_state(data->pipeline, GST_STATE_READY);
|
||||||
|
|
||||||
|
data->video_sink = gst_bin_get_by_interface(GST_BIN(data->pipeline), GST_TYPE_VIDEO_OVERLAY);
|
||||||
|
if (!data->video_sink) {
|
||||||
|
GST_ERROR ("Could not retrieve video sink");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
|
||||||
|
bus = gst_element_get_bus (data->pipeline);
|
||||||
|
bus_source = gst_bus_create_watch (bus);
|
||||||
|
g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
|
||||||
|
g_source_attach (bus_source, data->context);
|
||||||
|
g_source_unref (bus_source);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, data);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
|
||||||
|
/* Create a GLib Main Loop and set it to run */
|
||||||
|
GST_DEBUG ("Entering main loop... (CustomData:%p)", data);
|
||||||
|
data->main_loop = g_main_loop_new (data->context, FALSE);
|
||||||
|
check_initialization_complete (data);
|
||||||
|
g_main_loop_run (data->main_loop);
|
||||||
|
GST_DEBUG ("Exited main loop");
|
||||||
|
g_main_loop_unref (data->main_loop);
|
||||||
|
data->main_loop = NULL;
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
g_main_context_pop_thread_default(data->context);
|
||||||
|
g_main_context_unref (data->context);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data->video_sink);
|
||||||
|
gst_object_unref (data->pipeline);
|
||||||
|
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Java Bindings
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Instruct the native code to create its internal data structure, pipeline and thread */
|
||||||
|
static void gst_native_init (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = g_new0 (CustomData, 1);
|
||||||
|
SET_CUSTOM_DATA (env, thiz, custom_data_field_id, data);
|
||||||
|
GST_DEBUG_CATEGORY_INIT (debug_category, "tutorial-3", 0, "Android tutorial 3");
|
||||||
|
gst_debug_set_threshold_for_name("tutorial-3", GST_LEVEL_DEBUG);
|
||||||
|
GST_DEBUG ("Created CustomData at %p", data);
|
||||||
|
data->app = (*env)->NewGlobalRef (env, thiz);
|
||||||
|
GST_DEBUG ("Created GlobalRef for app object at %p", data->app);
|
||||||
|
pthread_create (&gst_app_thread, NULL, &app_function, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Quit the main loop, remove the native thread and free resources */
|
||||||
|
static void gst_native_finalize (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Quitting main loop...");
|
||||||
|
g_main_loop_quit (data->main_loop);
|
||||||
|
GST_DEBUG ("Waiting for thread to finish...");
|
||||||
|
pthread_join (gst_app_thread, NULL);
|
||||||
|
GST_DEBUG ("Deleting GlobalRef for app object at %p", data->app);
|
||||||
|
(*env)->DeleteGlobalRef (env, data->app);
|
||||||
|
GST_DEBUG ("Freeing CustomData at %p", data);
|
||||||
|
g_free (data);
|
||||||
|
SET_CUSTOM_DATA (env, thiz, custom_data_field_id, NULL);
|
||||||
|
GST_DEBUG ("Done finalizing");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set pipeline to PLAYING state */
|
||||||
|
static void gst_native_play (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Setting state to PLAYING");
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set pipeline to PAUSED state */
|
||||||
|
static void gst_native_pause (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Setting state to PAUSED");
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Static class initializer: retrieve method and field IDs */
|
||||||
|
static jboolean gst_native_class_init (JNIEnv* env, jclass klass) {
|
||||||
|
custom_data_field_id = (*env)->GetFieldID (env, klass, "native_custom_data", "J");
|
||||||
|
set_message_method_id = (*env)->GetMethodID (env, klass, "setMessage", "(Ljava/lang/String;)V");
|
||||||
|
on_gstreamer_initialized_method_id = (*env)->GetMethodID (env, klass, "onGStreamerInitialized", "()V");
|
||||||
|
|
||||||
|
if (!custom_data_field_id || !set_message_method_id || !on_gstreamer_initialized_method_id) {
|
||||||
|
/* We emit this message through the Android log instead of the GStreamer log because the later
|
||||||
|
* has not been initialized yet.
|
||||||
|
*/
|
||||||
|
__android_log_print (ANDROID_LOG_ERROR, "tutorial-3", "The calling class does not implement all necessary interface methods");
|
||||||
|
return JNI_FALSE;
|
||||||
|
}
|
||||||
|
return JNI_TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void gst_native_surface_init (JNIEnv *env, jobject thiz, jobject surface) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
ANativeWindow *new_native_window = ANativeWindow_fromSurface(env, surface);
|
||||||
|
GST_DEBUG ("Received surface %p (native window %p)", surface, new_native_window);
|
||||||
|
|
||||||
|
if (data->native_window) {
|
||||||
|
ANativeWindow_release (data->native_window);
|
||||||
|
if (data->native_window == new_native_window) {
|
||||||
|
GST_DEBUG ("New native window is the same as the previous one %p", data->native_window);
|
||||||
|
if (data->video_sink) {
|
||||||
|
gst_video_overlay_expose(GST_VIDEO_OVERLAY (data->video_sink));
|
||||||
|
gst_video_overlay_expose(GST_VIDEO_OVERLAY (data->video_sink));
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
GST_DEBUG ("Released previous native window %p", data->native_window);
|
||||||
|
data->initialized = FALSE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
data->native_window = new_native_window;
|
||||||
|
|
||||||
|
check_initialization_complete (data);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void gst_native_surface_finalize (JNIEnv *env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Releasing Native Window %p", data->native_window);
|
||||||
|
|
||||||
|
if (data->video_sink) {
|
||||||
|
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->video_sink), (guintptr)NULL);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_READY);
|
||||||
|
}
|
||||||
|
|
||||||
|
ANativeWindow_release (data->native_window);
|
||||||
|
data->native_window = NULL;
|
||||||
|
data->initialized = FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* List of implemented native methods */
|
||||||
|
static JNINativeMethod native_methods[] = {
|
||||||
|
{ "nativeInit", "()V", (void *) gst_native_init},
|
||||||
|
{ "nativeFinalize", "()V", (void *) gst_native_finalize},
|
||||||
|
{ "nativePlay", "()V", (void *) gst_native_play},
|
||||||
|
{ "nativePause", "()V", (void *) gst_native_pause},
|
||||||
|
{ "nativeSurfaceInit", "(Ljava/lang/Object;)V", (void *) gst_native_surface_init},
|
||||||
|
{ "nativeSurfaceFinalize", "()V", (void *) gst_native_surface_finalize},
|
||||||
|
{ "nativeClassInit", "()Z", (void *) gst_native_class_init}
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Library initializer */
|
||||||
|
jint JNI_OnLoad(JavaVM *vm, void *reserved) {
|
||||||
|
JNIEnv *env = NULL;
|
||||||
|
|
||||||
|
java_vm = vm;
|
||||||
|
|
||||||
|
if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
||||||
|
__android_log_print (ANDROID_LOG_ERROR, "tutorial-3", "Could not retrieve JNIEnv");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
jclass klass = (*env)->FindClass (env, "org/freedesktop/gstreamer/tutorials/tutorial_3/Tutorial3");
|
||||||
|
(*env)->RegisterNatives (env, klass, native_methods, G_N_ELEMENTS(native_methods));
|
||||||
|
|
||||||
|
pthread_key_create (¤t_jni_env, detach_current_thread);
|
||||||
|
|
||||||
|
return JNI_VERSION_1_4;
|
||||||
|
}
|
After Width: | Height: | Size: 2.1 KiB |
After Width: | Height: | Size: 1.1 KiB |
After Width: | Height: | Size: 1.4 KiB |
After Width: | Height: | Size: 2.8 KiB |
After Width: | Height: | Size: 4.2 KiB |
After Width: | Height: | Size: 5.5 KiB |
45
tutorials/android-tutorial-3/res/layout/main.xml
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="match_parent"
|
||||||
|
android:gravity="center_vertical"
|
||||||
|
android:orientation="vertical" >
|
||||||
|
|
||||||
|
<TextView
|
||||||
|
android:id="@+id/textview_message"
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_marginBottom="16dip"
|
||||||
|
android:gravity="center_horizontal" />
|
||||||
|
|
||||||
|
<LinearLayout
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_marginBottom="16dip"
|
||||||
|
android:gravity="center_horizontal"
|
||||||
|
android:orientation="horizontal" >
|
||||||
|
|
||||||
|
<ImageButton
|
||||||
|
android:id="@+id/button_play"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:contentDescription="@string/button_play"
|
||||||
|
android:src="@android:drawable/ic_media_play"
|
||||||
|
android:text="@string/button_play" />
|
||||||
|
|
||||||
|
<ImageButton
|
||||||
|
android:id="@+id/button_stop"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:contentDescription="@string/button_stop"
|
||||||
|
android:src="@android:drawable/ic_media_pause"
|
||||||
|
android:text="@string/button_stop" />
|
||||||
|
</LinearLayout>
|
||||||
|
|
||||||
|
<org.freedesktop.gstreamer.tutorials.tutorial_3.GStreamerSurfaceView
|
||||||
|
android:id="@+id/surface_video"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_gravity="center_vertical|center_horizontal" />
|
||||||
|
|
||||||
|
</LinearLayout>
|
6
tutorials/android-tutorial-3/res/values/strings.xml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<resources>
|
||||||
|
<string name="app_name">GStreamer tutorial 3</string>
|
||||||
|
<string name="button_play">Play</string>
|
||||||
|
<string name="button_stop">Stop</string>
|
||||||
|
</resources>
|
|
@ -0,0 +1,85 @@
|
||||||
|
package org.freedesktop.gstreamer.tutorials.tutorial_3;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
import android.util.AttributeSet;
|
||||||
|
import android.util.Log;
|
||||||
|
import android.view.SurfaceView;
|
||||||
|
import android.view.View;
|
||||||
|
|
||||||
|
// A simple SurfaceView whose width and height can be set from the outside
|
||||||
|
public class GStreamerSurfaceView extends SurfaceView {
|
||||||
|
public int media_width = 320;
|
||||||
|
public int media_height = 240;
|
||||||
|
|
||||||
|
// Mandatory constructors, they do not do much
|
||||||
|
public GStreamerSurfaceView(Context context, AttributeSet attrs,
|
||||||
|
int defStyle) {
|
||||||
|
super(context, attrs, defStyle);
|
||||||
|
}
|
||||||
|
|
||||||
|
public GStreamerSurfaceView(Context context, AttributeSet attrs) {
|
||||||
|
super(context, attrs);
|
||||||
|
}
|
||||||
|
|
||||||
|
public GStreamerSurfaceView (Context context) {
|
||||||
|
super(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called by the layout manager to find out our size and give us some rules.
|
||||||
|
// We will try to maximize our size, and preserve the media's aspect ratio if
|
||||||
|
// we are given the freedom to do so.
|
||||||
|
@Override
|
||||||
|
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
|
||||||
|
int width = 0, height = 0;
|
||||||
|
int wmode = View.MeasureSpec.getMode(widthMeasureSpec);
|
||||||
|
int hmode = View.MeasureSpec.getMode(heightMeasureSpec);
|
||||||
|
int wsize = View.MeasureSpec.getSize(widthMeasureSpec);
|
||||||
|
int hsize = View.MeasureSpec.getSize(heightMeasureSpec);
|
||||||
|
|
||||||
|
Log.i ("GStreamer", "onMeasure called with " + media_width + "x" + media_height);
|
||||||
|
// Obey width rules
|
||||||
|
switch (wmode) {
|
||||||
|
case View.MeasureSpec.AT_MOST:
|
||||||
|
if (hmode == View.MeasureSpec.EXACTLY) {
|
||||||
|
width = Math.min(hsize * media_width / media_height, wsize);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case View.MeasureSpec.EXACTLY:
|
||||||
|
width = wsize;
|
||||||
|
break;
|
||||||
|
case View.MeasureSpec.UNSPECIFIED:
|
||||||
|
width = media_width;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Obey height rules
|
||||||
|
switch (hmode) {
|
||||||
|
case View.MeasureSpec.AT_MOST:
|
||||||
|
if (wmode == View.MeasureSpec.EXACTLY) {
|
||||||
|
height = Math.min(wsize * media_height / media_width, hsize);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case View.MeasureSpec.EXACTLY:
|
||||||
|
height = hsize;
|
||||||
|
break;
|
||||||
|
case View.MeasureSpec.UNSPECIFIED:
|
||||||
|
height = media_height;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finally, calculate best size when both axis are free
|
||||||
|
if (hmode == View.MeasureSpec.AT_MOST && wmode == View.MeasureSpec.AT_MOST) {
|
||||||
|
int correct_height = width * media_height / media_width;
|
||||||
|
int correct_width = height * media_width / media_height;
|
||||||
|
|
||||||
|
if (correct_height < height)
|
||||||
|
height = correct_height;
|
||||||
|
else
|
||||||
|
width = correct_width;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Obey minimum size
|
||||||
|
width = Math.max (getSuggestedMinimumWidth(), width);
|
||||||
|
height = Math.max (getSuggestedMinimumHeight(), height);
|
||||||
|
setMeasuredDimension(width, height);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,143 @@
|
||||||
|
package org.freedesktop.gstreamer.tutorials.tutorial_3;
|
||||||
|
|
||||||
|
import android.app.Activity;
|
||||||
|
import android.os.Bundle;
|
||||||
|
import android.util.Log;
|
||||||
|
import android.view.SurfaceHolder;
|
||||||
|
import android.view.SurfaceView;
|
||||||
|
import android.view.View;
|
||||||
|
import android.view.View.OnClickListener;
|
||||||
|
import android.widget.ImageButton;
|
||||||
|
import android.widget.TextView;
|
||||||
|
import android.widget.Toast;
|
||||||
|
|
||||||
|
import org.freedesktop.gstreamer.GStreamer;
|
||||||
|
|
||||||
|
public class Tutorial3 extends Activity implements SurfaceHolder.Callback {
|
||||||
|
private native void nativeInit(); // Initialize native code, build pipeline, etc
|
||||||
|
private native void nativeFinalize(); // Destroy pipeline and shutdown native code
|
||||||
|
private native void nativePlay(); // Set pipeline to PLAYING
|
||||||
|
private native void nativePause(); // Set pipeline to PAUSED
|
||||||
|
private static native boolean nativeClassInit(); // Initialize native class: cache Method IDs for callbacks
|
||||||
|
private native void nativeSurfaceInit(Object surface);
|
||||||
|
private native void nativeSurfaceFinalize();
|
||||||
|
private long native_custom_data; // Native code will use this to keep private data
|
||||||
|
|
||||||
|
private boolean is_playing_desired; // Whether the user asked to go to PLAYING
|
||||||
|
|
||||||
|
// Called when the activity is first created.
|
||||||
|
@Override
|
||||||
|
public void onCreate(Bundle savedInstanceState)
|
||||||
|
{
|
||||||
|
super.onCreate(savedInstanceState);
|
||||||
|
|
||||||
|
// Initialize GStreamer and warn if it fails
|
||||||
|
try {
|
||||||
|
GStreamer.init(this);
|
||||||
|
} catch (Exception e) {
|
||||||
|
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
|
||||||
|
finish();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setContentView(R.layout.main);
|
||||||
|
|
||||||
|
ImageButton play = (ImageButton) this.findViewById(R.id.button_play);
|
||||||
|
play.setOnClickListener(new OnClickListener() {
|
||||||
|
public void onClick(View v) {
|
||||||
|
is_playing_desired = true;
|
||||||
|
nativePlay();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ImageButton pause = (ImageButton) this.findViewById(R.id.button_stop);
|
||||||
|
pause.setOnClickListener(new OnClickListener() {
|
||||||
|
public void onClick(View v) {
|
||||||
|
is_playing_desired = false;
|
||||||
|
nativePause();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
SurfaceView sv = (SurfaceView) this.findViewById(R.id.surface_video);
|
||||||
|
SurfaceHolder sh = sv.getHolder();
|
||||||
|
sh.addCallback(this);
|
||||||
|
|
||||||
|
if (savedInstanceState != null) {
|
||||||
|
is_playing_desired = savedInstanceState.getBoolean("playing");
|
||||||
|
Log.i ("GStreamer", "Activity created. Saved state is playing:" + is_playing_desired);
|
||||||
|
} else {
|
||||||
|
is_playing_desired = false;
|
||||||
|
Log.i ("GStreamer", "Activity created. There is no saved state, playing: false");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start with disabled buttons, until native code is initialized
|
||||||
|
this.findViewById(R.id.button_play).setEnabled(false);
|
||||||
|
this.findViewById(R.id.button_stop).setEnabled(false);
|
||||||
|
|
||||||
|
nativeInit();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void onSaveInstanceState (Bundle outState) {
|
||||||
|
Log.d ("GStreamer", "Saving state, playing:" + is_playing_desired);
|
||||||
|
outState.putBoolean("playing", is_playing_desired);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void onDestroy() {
|
||||||
|
nativeFinalize();
|
||||||
|
super.onDestroy();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code. This sets the content of the TextView from the UI thread.
|
||||||
|
private void setMessage(final String message) {
|
||||||
|
final TextView tv = (TextView) this.findViewById(R.id.textview_message);
|
||||||
|
runOnUiThread (new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
tv.setText(message);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code. Native code calls this once it has created its pipeline and
|
||||||
|
// the main loop is running, so it is ready to accept commands.
|
||||||
|
private void onGStreamerInitialized () {
|
||||||
|
Log.i ("GStreamer", "Gst initialized. Restoring state, playing:" + is_playing_desired);
|
||||||
|
// Restore previous playing state
|
||||||
|
if (is_playing_desired) {
|
||||||
|
nativePlay();
|
||||||
|
} else {
|
||||||
|
nativePause();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-enable buttons, now that GStreamer is initialized
|
||||||
|
final Activity activity = this;
|
||||||
|
runOnUiThread(new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
activity.findViewById(R.id.button_play).setEnabled(true);
|
||||||
|
activity.findViewById(R.id.button_stop).setEnabled(true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
static {
|
||||||
|
System.loadLibrary("gstreamer_android");
|
||||||
|
System.loadLibrary("tutorial-3");
|
||||||
|
nativeClassInit();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void surfaceChanged(SurfaceHolder holder, int format, int width,
|
||||||
|
int height) {
|
||||||
|
Log.d("GStreamer", "Surface changed to format " + format + " width "
|
||||||
|
+ width + " height " + height);
|
||||||
|
nativeSurfaceInit (holder.getSurface());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void surfaceCreated(SurfaceHolder holder) {
|
||||||
|
Log.d("GStreamer", "Surface created: " + holder.getSurface());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void surfaceDestroyed(SurfaceHolder holder) {
|
||||||
|
Log.d("GStreamer", "Surface destroyed");
|
||||||
|
nativeSurfaceFinalize ();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
19
tutorials/android-tutorial-4/AndroidManifest.xml
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
package="com.gst_sdk_tutorials.tutorial_4"
|
||||||
|
android:versionCode="1"
|
||||||
|
android:versionName="1.0">
|
||||||
|
<uses-sdk android:minSdkVersion="9" android:targetSdkVersion="14"/>
|
||||||
|
<uses-permission android:name="android.permission.INTERNET"/>
|
||||||
|
<uses-feature android:glEsVersion="0x00020000"/>
|
||||||
|
<application android:label="@string/app_name"
|
||||||
|
android:icon="@drawable/gstreamer_logo_4">
|
||||||
|
<activity android:name=".Tutorial4"
|
||||||
|
android:label="@string/app_name">
|
||||||
|
<intent-filter>
|
||||||
|
<action android:name="android.intent.action.MAIN" />
|
||||||
|
<category android:name="android.intent.category.LAUNCHER" />
|
||||||
|
</intent-filter>
|
||||||
|
</activity>
|
||||||
|
</application>
|
||||||
|
</manifest>
|
34
tutorials/android-tutorial-4/jni/Android.mk
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
LOCAL_PATH := $(call my-dir)
|
||||||
|
|
||||||
|
include $(CLEAR_VARS)
|
||||||
|
|
||||||
|
LOCAL_MODULE := tutorial-4
|
||||||
|
LOCAL_SRC_FILES := tutorial-4.c
|
||||||
|
LOCAL_SHARED_LIBRARIES := gstreamer_android
|
||||||
|
LOCAL_LDLIBS := -llog -landroid
|
||||||
|
include $(BUILD_SHARED_LIBRARY)
|
||||||
|
|
||||||
|
ifndef GSTREAMER_ROOT_ANDROID
|
||||||
|
$(error GSTREAMER_ROOT_ANDROID is not defined!)
|
||||||
|
endif
|
||||||
|
|
||||||
|
ifeq ($(TARGET_ARCH_ABI),armeabi)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/arm
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/armv7
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),arm64-v8a)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/arm64
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),x86)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/x86
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),x86_64)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/x86_64
|
||||||
|
else
|
||||||
|
$(error Target arch ABI not supported: $(TARGET_ARCH_ABI))
|
||||||
|
endif
|
||||||
|
|
||||||
|
GSTREAMER_NDK_BUILD_PATH := $(GSTREAMER_ROOT)/share/gst-android/ndk-build/
|
||||||
|
include $(GSTREAMER_NDK_BUILD_PATH)/plugins.mk
|
||||||
|
GSTREAMER_PLUGINS := $(GSTREAMER_PLUGINS_CORE) $(GSTREAMER_PLUGINS_PLAYBACK) $(GSTREAMER_PLUGINS_CODECS) $(GSTREAMER_PLUGINS_NET) $(GSTREAMER_PLUGINS_SYS)
|
||||||
|
G_IO_MODULES := gnutls
|
||||||
|
GSTREAMER_EXTRA_DEPS := gstreamer-video-1.0
|
||||||
|
include $(GSTREAMER_NDK_BUILD_PATH)/gstreamer-1.0.mk
|
1
tutorials/android-tutorial-4/jni/Application.mk
Normal file
|
@ -0,0 +1 @@
|
||||||
|
APP_ABI = armeabi armeabi-v7a arm64-v8a x86 x86_64
|
562
tutorials/android-tutorial-4/jni/tutorial-4.c
Normal file
|
@ -0,0 +1,562 @@
|
||||||
|
#include <string.h>
|
||||||
|
#include <stdint.h>
|
||||||
|
#include <jni.h>
|
||||||
|
#include <android/log.h>
|
||||||
|
#include <android/native_window.h>
|
||||||
|
#include <android/native_window_jni.h>
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <gst/video/video.h>
|
||||||
|
#include <pthread.h>
|
||||||
|
|
||||||
|
GST_DEBUG_CATEGORY_STATIC (debug_category);
|
||||||
|
#define GST_CAT_DEFAULT debug_category
|
||||||
|
|
||||||
|
/*
|
||||||
|
* These macros provide a way to store the native pointer to CustomData, which might be 32 or 64 bits, into
|
||||||
|
* a jlong, which is always 64 bits, without warnings.
|
||||||
|
*/
|
||||||
|
#if GLIB_SIZEOF_VOID_P == 8
|
||||||
|
# define GET_CUSTOM_DATA(env, thiz, fieldID) (CustomData *)(*env)->GetLongField (env, thiz, fieldID)
|
||||||
|
# define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField (env, thiz, fieldID, (jlong)data)
|
||||||
|
#else
|
||||||
|
# define GET_CUSTOM_DATA(env, thiz, fieldID) (CustomData *)(jint)(*env)->GetLongField (env, thiz, fieldID)
|
||||||
|
# define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField (env, thiz, fieldID, (jlong)(jint)data)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Do not allow seeks to be performed closer than this distance. It is visually useless, and will probably
|
||||||
|
* confuse some demuxers. */
|
||||||
|
#define SEEK_MIN_DELAY (500 * GST_MSECOND)
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it to callbacks */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
jobject app; /* Application instance, used to call its methods. A global reference is kept. */
|
||||||
|
GstElement *pipeline; /* The running pipeline */
|
||||||
|
GMainContext *context; /* GLib context used to run the main loop */
|
||||||
|
GMainLoop *main_loop; /* GLib main loop */
|
||||||
|
gboolean initialized; /* To avoid informing the UI multiple times about the initialization */
|
||||||
|
ANativeWindow *native_window; /* The Android native window where video will be rendered */
|
||||||
|
GstState state; /* Current pipeline state */
|
||||||
|
GstState target_state; /* Desired pipeline state, to be set once buffering is complete */
|
||||||
|
gint64 duration; /* Cached clip duration */
|
||||||
|
gint64 desired_position; /* Position to seek to, once the pipeline is running */
|
||||||
|
GstClockTime last_seek_time; /* For seeking overflow prevention (throttling) */
|
||||||
|
gboolean is_live; /* Live streams do not use buffering */
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* playbin flags */
|
||||||
|
typedef enum {
|
||||||
|
GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
|
||||||
|
} GstPlayFlags;
|
||||||
|
|
||||||
|
/* These global variables cache values which are not changing during execution */
|
||||||
|
static pthread_t gst_app_thread;
|
||||||
|
static pthread_key_t current_jni_env;
|
||||||
|
static JavaVM *java_vm;
|
||||||
|
static jfieldID custom_data_field_id;
|
||||||
|
static jmethodID set_message_method_id;
|
||||||
|
static jmethodID set_current_position_method_id;
|
||||||
|
static jmethodID on_gstreamer_initialized_method_id;
|
||||||
|
static jmethodID on_media_size_changed_method_id;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Private methods
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Register this thread with the VM */
|
||||||
|
static JNIEnv *attach_current_thread (void) {
|
||||||
|
JNIEnv *env;
|
||||||
|
JavaVMAttachArgs args;
|
||||||
|
|
||||||
|
GST_DEBUG ("Attaching thread %p", g_thread_self ());
|
||||||
|
args.version = JNI_VERSION_1_4;
|
||||||
|
args.name = NULL;
|
||||||
|
args.group = NULL;
|
||||||
|
|
||||||
|
if ((*java_vm)->AttachCurrentThread (java_vm, &env, &args) < 0) {
|
||||||
|
GST_ERROR ("Failed to attach current thread");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
return env;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Unregister this thread from the VM */
|
||||||
|
static void detach_current_thread (void *env) {
|
||||||
|
GST_DEBUG ("Detaching thread %p", g_thread_self ());
|
||||||
|
(*java_vm)->DetachCurrentThread (java_vm);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Retrieve the JNI environment for this thread */
|
||||||
|
static JNIEnv *get_jni_env (void) {
|
||||||
|
JNIEnv *env;
|
||||||
|
|
||||||
|
if ((env = pthread_getspecific (current_jni_env)) == NULL) {
|
||||||
|
env = attach_current_thread ();
|
||||||
|
pthread_setspecific (current_jni_env, env);
|
||||||
|
}
|
||||||
|
|
||||||
|
return env;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Change the content of the UI's TextView */
|
||||||
|
static void set_ui_message (const gchar *message, CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
GST_DEBUG ("Setting message to: %s", message);
|
||||||
|
jstring jmessage = (*env)->NewStringUTF(env, message);
|
||||||
|
(*env)->CallVoidMethod (env, data->app, set_message_method_id, jmessage);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
(*env)->DeleteLocalRef (env, jmessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tell the application what is the current position and clip duration */
|
||||||
|
static void set_current_ui_position (gint position, gint duration, CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
(*env)->CallVoidMethod (env, data->app, set_current_position_method_id, position, duration);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If we have pipeline and it is running, query the current position and clip duration and inform
|
||||||
|
* the application */
|
||||||
|
static gboolean refresh_ui (CustomData *data) {
|
||||||
|
gint64 current = -1;
|
||||||
|
gint64 position;
|
||||||
|
|
||||||
|
/* We do not want to update anything unless we have a working pipeline in the PAUSED or PLAYING state */
|
||||||
|
if (!data || !data->pipeline || data->state < GST_STATE_PAUSED)
|
||||||
|
return TRUE;
|
||||||
|
|
||||||
|
/* If we didn't know it yet, query the stream duration */
|
||||||
|
if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
|
||||||
|
if (!gst_element_query_duration (data->pipeline, GST_FORMAT_TIME, &data->duration)) {
|
||||||
|
GST_WARNING ("Could not query current duration");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (gst_element_query_position (data->pipeline, GST_FORMAT_TIME, &position)) {
|
||||||
|
/* Java expects these values in milliseconds, and GStreamer provides nanoseconds */
|
||||||
|
set_current_ui_position (position / GST_MSECOND, data->duration / GST_MSECOND, data);
|
||||||
|
}
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Forward declaration for the delayed seek callback */
|
||||||
|
static gboolean delayed_seek_cb (CustomData *data);
|
||||||
|
|
||||||
|
/* Perform seek, if we are not too close to the previous seek. Otherwise, schedule the seek for
|
||||||
|
* some time in the future. */
|
||||||
|
static void execute_seek (gint64 desired_position, CustomData *data) {
|
||||||
|
gint64 diff;
|
||||||
|
|
||||||
|
if (desired_position == GST_CLOCK_TIME_NONE)
|
||||||
|
return;
|
||||||
|
|
||||||
|
diff = gst_util_get_timestamp () - data->last_seek_time;
|
||||||
|
|
||||||
|
if (GST_CLOCK_TIME_IS_VALID (data->last_seek_time) && diff < SEEK_MIN_DELAY) {
|
||||||
|
/* The previous seek was too close, delay this one */
|
||||||
|
GSource *timeout_source;
|
||||||
|
|
||||||
|
if (data->desired_position == GST_CLOCK_TIME_NONE) {
|
||||||
|
/* There was no previous seek scheduled. Setup a timer for some time in the future */
|
||||||
|
timeout_source = g_timeout_source_new ((SEEK_MIN_DELAY - diff) / GST_MSECOND);
|
||||||
|
g_source_set_callback (timeout_source, (GSourceFunc)delayed_seek_cb, data, NULL);
|
||||||
|
g_source_attach (timeout_source, data->context);
|
||||||
|
g_source_unref (timeout_source);
|
||||||
|
}
|
||||||
|
/* Update the desired seek position. If multiple requests are received before it is time
|
||||||
|
* to perform a seek, only the last one is remembered. */
|
||||||
|
data->desired_position = desired_position;
|
||||||
|
GST_DEBUG ("Throttling seek to %" GST_TIME_FORMAT ", will be in %" GST_TIME_FORMAT,
|
||||||
|
GST_TIME_ARGS (desired_position), GST_TIME_ARGS (SEEK_MIN_DELAY - diff));
|
||||||
|
} else {
|
||||||
|
/* Perform the seek now */
|
||||||
|
GST_DEBUG ("Seeking to %" GST_TIME_FORMAT, GST_TIME_ARGS (desired_position));
|
||||||
|
data->last_seek_time = gst_util_get_timestamp ();
|
||||||
|
gst_element_seek_simple (data->pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, desired_position);
|
||||||
|
data->desired_position = GST_CLOCK_TIME_NONE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Delayed seek callback. This gets called by the timer setup in the above function. */
|
||||||
|
static gboolean delayed_seek_cb (CustomData *data) {
|
||||||
|
GST_DEBUG ("Doing delayed seek to %" GST_TIME_FORMAT, GST_TIME_ARGS (data->desired_position));
|
||||||
|
execute_seek (data->desired_position, data);
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Retrieve errors from the bus and show them on the UI */
|
||||||
|
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
gchar *message_string;
|
||||||
|
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
message_string = g_strdup_printf ("Error received from element %s: %s", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
set_ui_message (message_string, data);
|
||||||
|
g_free (message_string);
|
||||||
|
data->target_state = GST_STATE_NULL;
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Called when the End Of the Stream is reached. Just move to the beginning of the media and pause. */
|
||||||
|
static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
data->target_state = GST_STATE_PAUSED;
|
||||||
|
data->is_live = (gst_element_set_state (data->pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_NO_PREROLL);
|
||||||
|
execute_seek (0, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Called when the duration of the media changes. Just mark it as unknown, so we re-query it in the next UI refresh. */
|
||||||
|
static void duration_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
data->duration = GST_CLOCK_TIME_NONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Called when buffering messages are received. We inform the UI about the current buffering level and
|
||||||
|
* keep the pipeline paused until 100% buffering is reached. At that point, set the desired state. */
|
||||||
|
static void buffering_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
gint percent;
|
||||||
|
|
||||||
|
if (data->is_live)
|
||||||
|
return;
|
||||||
|
|
||||||
|
gst_message_parse_buffering (msg, &percent);
|
||||||
|
if (percent < 100 && data->target_state >= GST_STATE_PAUSED) {
|
||||||
|
gchar * message_string = g_strdup_printf ("Buffering %d%%", percent);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
||||||
|
set_ui_message (message_string, data);
|
||||||
|
g_free (message_string);
|
||||||
|
} else if (data->target_state >= GST_STATE_PLAYING) {
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
|
||||||
|
} else if (data->target_state >= GST_STATE_PAUSED) {
|
||||||
|
set_ui_message ("Buffering complete", data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Called when the clock is lost */
|
||||||
|
static void clock_lost_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
if (data->target_state >= GST_STATE_PLAYING) {
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Retrieve the video sink's Caps and tell the application about the media size */
|
||||||
|
static void check_media_size (CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
GstElement *video_sink;
|
||||||
|
GstPad *video_sink_pad;
|
||||||
|
GstCaps *caps;
|
||||||
|
GstVideoInfo info;
|
||||||
|
|
||||||
|
/* Retrieve the Caps at the entrance of the video sink */
|
||||||
|
g_object_get (data->pipeline, "video-sink", &video_sink, NULL);
|
||||||
|
video_sink_pad = gst_element_get_static_pad (video_sink, "sink");
|
||||||
|
caps = gst_pad_get_current_caps (video_sink_pad);
|
||||||
|
|
||||||
|
if (gst_video_info_from_caps (&info, caps)) {
|
||||||
|
info.width = info.width * info.par_n / info.par_d;
|
||||||
|
GST_DEBUG ("Media size is %dx%d, notifying application", info.width, info.height);
|
||||||
|
|
||||||
|
(*env)->CallVoidMethod (env, data->app, on_media_size_changed_method_id, (jint)info.width, (jint)info.height);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
gst_caps_unref(caps);
|
||||||
|
gst_object_unref (video_sink_pad);
|
||||||
|
gst_object_unref(video_sink);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Notify UI about pipeline state changes */
|
||||||
|
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GstState old_state, new_state, pending_state;
|
||||||
|
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
|
||||||
|
/* Only pay attention to messages coming from the pipeline, not its children */
|
||||||
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->pipeline)) {
|
||||||
|
data->state = new_state;
|
||||||
|
gchar *message = g_strdup_printf("State changed to %s", gst_element_state_get_name(new_state));
|
||||||
|
set_ui_message(message, data);
|
||||||
|
g_free (message);
|
||||||
|
|
||||||
|
/* The Ready to Paused state change is particularly interesting: */
|
||||||
|
if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED) {
|
||||||
|
/* By now the sink already knows the media size */
|
||||||
|
check_media_size(data);
|
||||||
|
|
||||||
|
/* If there was a scheduled seek, perform it now that we have moved to the Paused state */
|
||||||
|
if (GST_CLOCK_TIME_IS_VALID (data->desired_position))
|
||||||
|
execute_seek (data->desired_position, data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Check if all conditions are met to report GStreamer as initialized.
|
||||||
|
* These conditions will change depending on the application */
|
||||||
|
static void check_initialization_complete (CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
if (!data->initialized && data->native_window && data->main_loop) {
|
||||||
|
GST_DEBUG ("Initialization complete, notifying application. native_window:%p main_loop:%p", data->native_window, data->main_loop);
|
||||||
|
|
||||||
|
/* The main loop is running and we received a native window, inform the sink about it */
|
||||||
|
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->pipeline), (guintptr)data->native_window);
|
||||||
|
|
||||||
|
(*env)->CallVoidMethod (env, data->app, on_gstreamer_initialized_method_id);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
data->initialized = TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Main method for the native code. This is executed on its own thread. */
|
||||||
|
static void *app_function (void *userdata) {
|
||||||
|
JavaVMAttachArgs args;
|
||||||
|
GstBus *bus;
|
||||||
|
CustomData *data = (CustomData *)userdata;
|
||||||
|
GSource *timeout_source;
|
||||||
|
GSource *bus_source;
|
||||||
|
GError *error = NULL;
|
||||||
|
guint flags;
|
||||||
|
|
||||||
|
GST_DEBUG ("Creating pipeline in CustomData at %p", data);
|
||||||
|
|
||||||
|
/* Create our own GLib Main Context and make it the default one */
|
||||||
|
data->context = g_main_context_new ();
|
||||||
|
g_main_context_push_thread_default(data->context);
|
||||||
|
|
||||||
|
/* Build pipeline */
|
||||||
|
data->pipeline = gst_parse_launch("playbin", &error);
|
||||||
|
if (error) {
|
||||||
|
gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
|
||||||
|
g_clear_error (&error);
|
||||||
|
set_ui_message(message, data);
|
||||||
|
g_free (message);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Disable subtitles */
|
||||||
|
g_object_get (data->pipeline, "flags", &flags, NULL);
|
||||||
|
flags &= ~GST_PLAY_FLAG_TEXT;
|
||||||
|
g_object_set (data->pipeline, "flags", flags, NULL);
|
||||||
|
|
||||||
|
/* Set the pipeline to READY, so it can already accept a window handle, if we have one */
|
||||||
|
data->target_state = GST_STATE_READY;
|
||||||
|
gst_element_set_state(data->pipeline, GST_STATE_READY);
|
||||||
|
|
||||||
|
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
|
||||||
|
bus = gst_element_get_bus (data->pipeline);
|
||||||
|
bus_source = gst_bus_create_watch (bus);
|
||||||
|
g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
|
||||||
|
g_source_attach (bus_source, data->context);
|
||||||
|
g_source_unref (bus_source);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::duration", (GCallback)duration_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::buffering", (GCallback)buffering_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::clock-lost", (GCallback)clock_lost_cb, data);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
|
||||||
|
/* Register a function that GLib will call 4 times per second */
|
||||||
|
timeout_source = g_timeout_source_new (250);
|
||||||
|
g_source_set_callback (timeout_source, (GSourceFunc)refresh_ui, data, NULL);
|
||||||
|
g_source_attach (timeout_source, data->context);
|
||||||
|
g_source_unref (timeout_source);
|
||||||
|
|
||||||
|
/* Create a GLib Main Loop and set it to run */
|
||||||
|
GST_DEBUG ("Entering main loop... (CustomData:%p)", data);
|
||||||
|
data->main_loop = g_main_loop_new (data->context, FALSE);
|
||||||
|
check_initialization_complete (data);
|
||||||
|
g_main_loop_run (data->main_loop);
|
||||||
|
GST_DEBUG ("Exited main loop");
|
||||||
|
g_main_loop_unref (data->main_loop);
|
||||||
|
data->main_loop = NULL;
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
g_main_context_pop_thread_default(data->context);
|
||||||
|
g_main_context_unref (data->context);
|
||||||
|
data->target_state = GST_STATE_NULL;
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data->pipeline);
|
||||||
|
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Java Bindings
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Instruct the native code to create its internal data structure, pipeline and thread */
|
||||||
|
static void gst_native_init (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = g_new0 (CustomData, 1);
|
||||||
|
data->desired_position = GST_CLOCK_TIME_NONE;
|
||||||
|
data->last_seek_time = GST_CLOCK_TIME_NONE;
|
||||||
|
SET_CUSTOM_DATA (env, thiz, custom_data_field_id, data);
|
||||||
|
GST_DEBUG_CATEGORY_INIT (debug_category, "tutorial-4", 0, "Android tutorial 4");
|
||||||
|
gst_debug_set_threshold_for_name("tutorial-4", GST_LEVEL_DEBUG);
|
||||||
|
GST_DEBUG ("Created CustomData at %p", data);
|
||||||
|
data->app = (*env)->NewGlobalRef (env, thiz);
|
||||||
|
GST_DEBUG ("Created GlobalRef for app object at %p", data->app);
|
||||||
|
pthread_create (&gst_app_thread, NULL, &app_function, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Quit the main loop, remove the native thread and free resources */
|
||||||
|
static void gst_native_finalize (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Quitting main loop...");
|
||||||
|
g_main_loop_quit (data->main_loop);
|
||||||
|
GST_DEBUG ("Waiting for thread to finish...");
|
||||||
|
pthread_join (gst_app_thread, NULL);
|
||||||
|
GST_DEBUG ("Deleting GlobalRef for app object at %p", data->app);
|
||||||
|
(*env)->DeleteGlobalRef (env, data->app);
|
||||||
|
GST_DEBUG ("Freeing CustomData at %p", data);
|
||||||
|
g_free (data);
|
||||||
|
SET_CUSTOM_DATA (env, thiz, custom_data_field_id, NULL);
|
||||||
|
GST_DEBUG ("Done finalizing");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set playbin's URI */
|
||||||
|
void gst_native_set_uri (JNIEnv* env, jobject thiz, jstring uri) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data || !data->pipeline) return;
|
||||||
|
const jbyte *char_uri = (*env)->GetStringUTFChars (env, uri, NULL);
|
||||||
|
GST_DEBUG ("Setting URI to %s", char_uri);
|
||||||
|
if (data->target_state >= GST_STATE_READY)
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_READY);
|
||||||
|
g_object_set(data->pipeline, "uri", char_uri, NULL);
|
||||||
|
(*env)->ReleaseStringUTFChars (env, uri, char_uri);
|
||||||
|
data->duration = GST_CLOCK_TIME_NONE;
|
||||||
|
data->is_live = (gst_element_set_state (data->pipeline, data->target_state) == GST_STATE_CHANGE_NO_PREROLL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set pipeline to PLAYING state */
|
||||||
|
static void gst_native_play (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Setting state to PLAYING");
|
||||||
|
data->target_state = GST_STATE_PLAYING;
|
||||||
|
data->is_live = (gst_element_set_state (data->pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_NO_PREROLL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set pipeline to PAUSED state */
|
||||||
|
static void gst_native_pause (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Setting state to PAUSED");
|
||||||
|
data->target_state = GST_STATE_PAUSED;
|
||||||
|
data->is_live = (gst_element_set_state (data->pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_NO_PREROLL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Instruct the pipeline to seek to a different position */
|
||||||
|
void gst_native_set_position (JNIEnv* env, jobject thiz, int milliseconds) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
gint64 desired_position = (gint64)(milliseconds * GST_MSECOND);
|
||||||
|
if (data->state >= GST_STATE_PAUSED) {
|
||||||
|
execute_seek(desired_position, data);
|
||||||
|
} else {
|
||||||
|
GST_DEBUG ("Scheduling seek to %" GST_TIME_FORMAT " for later", GST_TIME_ARGS (desired_position));
|
||||||
|
data->desired_position = desired_position;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Static class initializer: retrieve method and field IDs */
|
||||||
|
static jboolean gst_native_class_init (JNIEnv* env, jclass klass) {
|
||||||
|
custom_data_field_id = (*env)->GetFieldID (env, klass, "native_custom_data", "J");
|
||||||
|
set_message_method_id = (*env)->GetMethodID (env, klass, "setMessage", "(Ljava/lang/String;)V");
|
||||||
|
set_current_position_method_id = (*env)->GetMethodID (env, klass, "setCurrentPosition", "(II)V");
|
||||||
|
on_gstreamer_initialized_method_id = (*env)->GetMethodID (env, klass, "onGStreamerInitialized", "()V");
|
||||||
|
on_media_size_changed_method_id = (*env)->GetMethodID (env, klass, "onMediaSizeChanged", "(II)V");
|
||||||
|
|
||||||
|
if (!custom_data_field_id || !set_message_method_id || !on_gstreamer_initialized_method_id ||
|
||||||
|
!on_media_size_changed_method_id || !set_current_position_method_id) {
|
||||||
|
/* We emit this message through the Android log instead of the GStreamer log because the later
|
||||||
|
* has not been initialized yet.
|
||||||
|
*/
|
||||||
|
__android_log_print (ANDROID_LOG_ERROR, "tutorial-4", "The calling class does not implement all necessary interface methods");
|
||||||
|
return JNI_FALSE;
|
||||||
|
}
|
||||||
|
return JNI_TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void gst_native_surface_init (JNIEnv *env, jobject thiz, jobject surface) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
ANativeWindow *new_native_window = ANativeWindow_fromSurface(env, surface);
|
||||||
|
GST_DEBUG ("Received surface %p (native window %p)", surface, new_native_window);
|
||||||
|
|
||||||
|
if (data->native_window) {
|
||||||
|
ANativeWindow_release (data->native_window);
|
||||||
|
if (data->native_window == new_native_window) {
|
||||||
|
GST_DEBUG ("New native window is the same as the previous one %p", data->native_window);
|
||||||
|
if (data->pipeline) {
|
||||||
|
gst_video_overlay_expose(GST_VIDEO_OVERLAY (data->pipeline));
|
||||||
|
gst_video_overlay_expose(GST_VIDEO_OVERLAY (data->pipeline));
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
GST_DEBUG ("Released previous native window %p", data->native_window);
|
||||||
|
data->initialized = FALSE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
data->native_window = new_native_window;
|
||||||
|
|
||||||
|
check_initialization_complete (data);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void gst_native_surface_finalize (JNIEnv *env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Releasing Native Window %p", data->native_window);
|
||||||
|
|
||||||
|
if (data->pipeline) {
|
||||||
|
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->pipeline), (guintptr)NULL);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_READY);
|
||||||
|
}
|
||||||
|
|
||||||
|
ANativeWindow_release (data->native_window);
|
||||||
|
data->native_window = NULL;
|
||||||
|
data->initialized = FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* List of implemented native methods */
|
||||||
|
static JNINativeMethod native_methods[] = {
|
||||||
|
{ "nativeInit", "()V", (void *) gst_native_init},
|
||||||
|
{ "nativeFinalize", "()V", (void *) gst_native_finalize},
|
||||||
|
{ "nativeSetUri", "(Ljava/lang/String;)V", (void *) gst_native_set_uri},
|
||||||
|
{ "nativePlay", "()V", (void *) gst_native_play},
|
||||||
|
{ "nativePause", "()V", (void *) gst_native_pause},
|
||||||
|
{ "nativeSetPosition", "(I)V", (void*) gst_native_set_position},
|
||||||
|
{ "nativeSurfaceInit", "(Ljava/lang/Object;)V", (void *) gst_native_surface_init},
|
||||||
|
{ "nativeSurfaceFinalize", "()V", (void *) gst_native_surface_finalize},
|
||||||
|
{ "nativeClassInit", "()Z", (void *) gst_native_class_init}
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Library initializer */
|
||||||
|
jint JNI_OnLoad(JavaVM *vm, void *reserved) {
|
||||||
|
JNIEnv *env = NULL;
|
||||||
|
|
||||||
|
java_vm = vm;
|
||||||
|
|
||||||
|
if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
||||||
|
__android_log_print (ANDROID_LOG_ERROR, "tutorial-4", "Could not retrieve JNIEnv");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
jclass klass = (*env)->FindClass (env, "com/gst_sdk_tutorials/tutorial_4/Tutorial4");
|
||||||
|
(*env)->RegisterNatives (env, klass, native_methods, G_N_ELEMENTS(native_methods));
|
||||||
|
|
||||||
|
pthread_key_create (¤t_jni_env, detach_current_thread);
|
||||||
|
|
||||||
|
return JNI_VERSION_1_4;
|
||||||
|
}
|
After Width: | Height: | Size: 2.1 KiB |
After Width: | Height: | Size: 1.1 KiB |
After Width: | Height: | Size: 1.4 KiB |
After Width: | Height: | Size: 2.7 KiB |
After Width: | Height: | Size: 4.1 KiB |
After Width: | Height: | Size: 5.4 KiB |
69
tutorials/android-tutorial-4/res/layout/main.xml
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="match_parent"
|
||||||
|
android:gravity="center_vertical"
|
||||||
|
android:orientation="vertical" >
|
||||||
|
|
||||||
|
<TextView
|
||||||
|
android:id="@+id/textview_message"
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_marginBottom="16dip"
|
||||||
|
android:gravity="center_horizontal" />
|
||||||
|
|
||||||
|
<LinearLayout
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_marginBottom="16dip"
|
||||||
|
android:gravity="center_horizontal"
|
||||||
|
android:orientation="horizontal" >
|
||||||
|
|
||||||
|
<ImageButton
|
||||||
|
android:id="@+id/button_play"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:contentDescription="@string/button_play"
|
||||||
|
android:src="@android:drawable/ic_media_play"
|
||||||
|
android:text="@string/button_play" />
|
||||||
|
|
||||||
|
<ImageButton
|
||||||
|
android:id="@+id/button_stop"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:contentDescription="@string/button_stop"
|
||||||
|
android:src="@android:drawable/ic_media_pause"
|
||||||
|
android:text="@string/button_stop" />
|
||||||
|
</LinearLayout>
|
||||||
|
|
||||||
|
<LinearLayout
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_marginBottom="16dip"
|
||||||
|
android:gravity="center_horizontal"
|
||||||
|
android:orientation="horizontal" >
|
||||||
|
|
||||||
|
<TextView
|
||||||
|
android:id="@+id/textview_time"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_gravity="center_vertical"
|
||||||
|
android:layout_marginLeft="5dip"
|
||||||
|
android:layout_marginRight="5dip" />
|
||||||
|
|
||||||
|
<SeekBar
|
||||||
|
android:id="@+id/seek_bar"
|
||||||
|
android:layout_width="0dip"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_gravity="center_vertical"
|
||||||
|
android:layout_weight="1"
|
||||||
|
android:indeterminate="false" />
|
||||||
|
</LinearLayout>
|
||||||
|
|
||||||
|
<com.gst_sdk_tutorials.tutorial_4.GStreamerSurfaceView
|
||||||
|
android:id="@+id/surface_video"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_gravity="center_vertical|center_horizontal" />
|
||||||
|
|
||||||
|
</LinearLayout>
|
6
tutorials/android-tutorial-4/res/values/strings.xml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<resources>
|
||||||
|
<string name="app_name">GStreamer tutorial 4</string>
|
||||||
|
<string name="button_play">Play</string>
|
||||||
|
<string name="button_stop">Stop</string>
|
||||||
|
</resources>
|
|
@ -0,0 +1,85 @@
|
||||||
|
package com.gst_sdk_tutorials.tutorial_4;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
import android.util.AttributeSet;
|
||||||
|
import android.util.Log;
|
||||||
|
import android.view.SurfaceView;
|
||||||
|
import android.view.View;
|
||||||
|
|
||||||
|
// A simple SurfaceView whose width and height can be set from the outside
|
||||||
|
public class GStreamerSurfaceView extends SurfaceView {
|
||||||
|
public int media_width = 320;
|
||||||
|
public int media_height = 240;
|
||||||
|
|
||||||
|
// Mandatory constructors, they do not do much
|
||||||
|
public GStreamerSurfaceView(Context context, AttributeSet attrs,
|
||||||
|
int defStyle) {
|
||||||
|
super(context, attrs, defStyle);
|
||||||
|
}
|
||||||
|
|
||||||
|
public GStreamerSurfaceView(Context context, AttributeSet attrs) {
|
||||||
|
super(context, attrs);
|
||||||
|
}
|
||||||
|
|
||||||
|
public GStreamerSurfaceView (Context context) {
|
||||||
|
super(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called by the layout manager to find out our size and give us some rules.
|
||||||
|
// We will try to maximize our size, and preserve the media's aspect ratio if
|
||||||
|
// we are given the freedom to do so.
|
||||||
|
@Override
|
||||||
|
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
|
||||||
|
int width = 0, height = 0;
|
||||||
|
int wmode = View.MeasureSpec.getMode(widthMeasureSpec);
|
||||||
|
int hmode = View.MeasureSpec.getMode(heightMeasureSpec);
|
||||||
|
int wsize = View.MeasureSpec.getSize(widthMeasureSpec);
|
||||||
|
int hsize = View.MeasureSpec.getSize(heightMeasureSpec);
|
||||||
|
|
||||||
|
Log.i ("GStreamer", "onMeasure called with " + media_width + "x" + media_height);
|
||||||
|
// Obey width rules
|
||||||
|
switch (wmode) {
|
||||||
|
case View.MeasureSpec.AT_MOST:
|
||||||
|
if (hmode == View.MeasureSpec.EXACTLY) {
|
||||||
|
width = Math.min(hsize * media_width / media_height, wsize);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case View.MeasureSpec.EXACTLY:
|
||||||
|
width = wsize;
|
||||||
|
break;
|
||||||
|
case View.MeasureSpec.UNSPECIFIED:
|
||||||
|
width = media_width;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Obey height rules
|
||||||
|
switch (hmode) {
|
||||||
|
case View.MeasureSpec.AT_MOST:
|
||||||
|
if (wmode == View.MeasureSpec.EXACTLY) {
|
||||||
|
height = Math.min(wsize * media_height / media_width, hsize);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case View.MeasureSpec.EXACTLY:
|
||||||
|
height = hsize;
|
||||||
|
break;
|
||||||
|
case View.MeasureSpec.UNSPECIFIED:
|
||||||
|
height = media_height;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finally, calculate best size when both axis are free
|
||||||
|
if (hmode == View.MeasureSpec.AT_MOST && wmode == View.MeasureSpec.AT_MOST) {
|
||||||
|
int correct_height = width * media_height / media_width;
|
||||||
|
int correct_width = height * media_width / media_height;
|
||||||
|
|
||||||
|
if (correct_height < height)
|
||||||
|
height = correct_height;
|
||||||
|
else
|
||||||
|
width = correct_width;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Obey minimum size
|
||||||
|
width = Math.max (getSuggestedMinimumWidth(), width);
|
||||||
|
height = Math.max (getSuggestedMinimumHeight(), height);
|
||||||
|
setMeasuredDimension(width, height);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,250 @@
|
||||||
|
package com.gst_sdk_tutorials.tutorial_4;
|
||||||
|
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.TimeZone;
|
||||||
|
|
||||||
|
import android.app.Activity;
|
||||||
|
import android.os.Bundle;
|
||||||
|
import android.util.Log;
|
||||||
|
import android.view.SurfaceHolder;
|
||||||
|
import android.view.SurfaceView;
|
||||||
|
import android.view.View;
|
||||||
|
import android.view.View.OnClickListener;
|
||||||
|
import android.widget.ImageButton;
|
||||||
|
import android.widget.SeekBar;
|
||||||
|
import android.widget.SeekBar.OnSeekBarChangeListener;
|
||||||
|
import android.widget.TextView;
|
||||||
|
import android.widget.Toast;
|
||||||
|
|
||||||
|
import org.freedesktop.gstreamer.GStreamer;
|
||||||
|
|
||||||
|
public class Tutorial4 extends Activity implements SurfaceHolder.Callback, OnSeekBarChangeListener {
|
||||||
|
private native void nativeInit(); // Initialize native code, build pipeline, etc
|
||||||
|
private native void nativeFinalize(); // Destroy pipeline and shutdown native code
|
||||||
|
private native void nativeSetUri(String uri); // Set the URI of the media to play
|
||||||
|
private native void nativePlay(); // Set pipeline to PLAYING
|
||||||
|
private native void nativeSetPosition(int milliseconds); // Seek to the indicated position, in milliseconds
|
||||||
|
private native void nativePause(); // Set pipeline to PAUSED
|
||||||
|
private static native boolean nativeClassInit(); // Initialize native class: cache Method IDs for callbacks
|
||||||
|
private native void nativeSurfaceInit(Object surface); // A new surface is available
|
||||||
|
private native void nativeSurfaceFinalize(); // Surface about to be destroyed
|
||||||
|
private long native_custom_data; // Native code will use this to keep private data
|
||||||
|
|
||||||
|
private boolean is_playing_desired; // Whether the user asked to go to PLAYING
|
||||||
|
private int position; // Current position, reported by native code
|
||||||
|
private int duration; // Current clip duration, reported by native code
|
||||||
|
private boolean is_local_media; // Whether this clip is stored locally or is being streamed
|
||||||
|
private int desired_position; // Position where the users wants to seek to
|
||||||
|
private String mediaUri; // URI of the clip being played
|
||||||
|
|
||||||
|
private final String defaultMediaUri = "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.ogv";
|
||||||
|
|
||||||
|
// Called when the activity is first created.
|
||||||
|
@Override
|
||||||
|
public void onCreate(Bundle savedInstanceState)
|
||||||
|
{
|
||||||
|
super.onCreate(savedInstanceState);
|
||||||
|
|
||||||
|
// Initialize GStreamer and warn if it fails
|
||||||
|
try {
|
||||||
|
GStreamer.init(this);
|
||||||
|
} catch (Exception e) {
|
||||||
|
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
|
||||||
|
finish();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setContentView(R.layout.main);
|
||||||
|
|
||||||
|
ImageButton play = (ImageButton) this.findViewById(R.id.button_play);
|
||||||
|
play.setOnClickListener(new OnClickListener() {
|
||||||
|
public void onClick(View v) {
|
||||||
|
is_playing_desired = true;
|
||||||
|
nativePlay();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ImageButton pause = (ImageButton) this.findViewById(R.id.button_stop);
|
||||||
|
pause.setOnClickListener(new OnClickListener() {
|
||||||
|
public void onClick(View v) {
|
||||||
|
is_playing_desired = false;
|
||||||
|
nativePause();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
SurfaceView sv = (SurfaceView) this.findViewById(R.id.surface_video);
|
||||||
|
SurfaceHolder sh = sv.getHolder();
|
||||||
|
sh.addCallback(this);
|
||||||
|
|
||||||
|
SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
||||||
|
sb.setOnSeekBarChangeListener(this);
|
||||||
|
|
||||||
|
// Retrieve our previous state, or initialize it to default values
|
||||||
|
if (savedInstanceState != null) {
|
||||||
|
is_playing_desired = savedInstanceState.getBoolean("playing");
|
||||||
|
position = savedInstanceState.getInt("position");
|
||||||
|
duration = savedInstanceState.getInt("duration");
|
||||||
|
mediaUri = savedInstanceState.getString("mediaUri");
|
||||||
|
Log.i ("GStreamer", "Activity created with saved state:");
|
||||||
|
} else {
|
||||||
|
is_playing_desired = false;
|
||||||
|
position = duration = 0;
|
||||||
|
mediaUri = defaultMediaUri;
|
||||||
|
Log.i ("GStreamer", "Activity created with no saved state:");
|
||||||
|
}
|
||||||
|
is_local_media = false;
|
||||||
|
Log.i ("GStreamer", " playing:" + is_playing_desired + " position:" + position +
|
||||||
|
" duration: " + duration + " uri: " + mediaUri);
|
||||||
|
|
||||||
|
// Start with disabled buttons, until native code is initialized
|
||||||
|
this.findViewById(R.id.button_play).setEnabled(false);
|
||||||
|
this.findViewById(R.id.button_stop).setEnabled(false);
|
||||||
|
|
||||||
|
nativeInit();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void onSaveInstanceState (Bundle outState) {
|
||||||
|
Log.d ("GStreamer", "Saving state, playing:" + is_playing_desired + " position:" + position +
|
||||||
|
" duration: " + duration + " uri: " + mediaUri);
|
||||||
|
outState.putBoolean("playing", is_playing_desired);
|
||||||
|
outState.putInt("position", position);
|
||||||
|
outState.putInt("duration", duration);
|
||||||
|
outState.putString("mediaUri", mediaUri);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void onDestroy() {
|
||||||
|
nativeFinalize();
|
||||||
|
super.onDestroy();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code. This sets the content of the TextView from the UI thread.
|
||||||
|
private void setMessage(final String message) {
|
||||||
|
final TextView tv = (TextView) this.findViewById(R.id.textview_message);
|
||||||
|
runOnUiThread (new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
tv.setText(message);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the URI to play, and record whether it is a local or remote file
|
||||||
|
private void setMediaUri() {
|
||||||
|
nativeSetUri (mediaUri);
|
||||||
|
is_local_media = mediaUri.startsWith("file://");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code. Native code calls this once it has created its pipeline and
|
||||||
|
// the main loop is running, so it is ready to accept commands.
|
||||||
|
private void onGStreamerInitialized () {
|
||||||
|
Log.i ("GStreamer", "GStreamer initialized:");
|
||||||
|
Log.i ("GStreamer", " playing:" + is_playing_desired + " position:" + position + " uri: " + mediaUri);
|
||||||
|
|
||||||
|
// Restore previous playing state
|
||||||
|
setMediaUri ();
|
||||||
|
nativeSetPosition (position);
|
||||||
|
if (is_playing_desired) {
|
||||||
|
nativePlay();
|
||||||
|
} else {
|
||||||
|
nativePause();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-enable buttons, now that GStreamer is initialized
|
||||||
|
final Activity activity = this;
|
||||||
|
runOnUiThread(new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
activity.findViewById(R.id.button_play).setEnabled(true);
|
||||||
|
activity.findViewById(R.id.button_stop).setEnabled(true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// The text widget acts as an slave for the seek bar, so it reflects what the seek bar shows, whether
|
||||||
|
// it is an actual pipeline position or the position the user is currently dragging to.
|
||||||
|
private void updateTimeWidget () {
|
||||||
|
final TextView tv = (TextView) this.findViewById(R.id.textview_time);
|
||||||
|
final SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
||||||
|
final int pos = sb.getProgress();
|
||||||
|
|
||||||
|
SimpleDateFormat df = new SimpleDateFormat("HH:mm:ss");
|
||||||
|
df.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||||
|
final String message = df.format(new Date (pos)) + " / " + df.format(new Date (duration));
|
||||||
|
tv.setText(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code
|
||||||
|
private void setCurrentPosition(final int position, final int duration) {
|
||||||
|
final SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
||||||
|
|
||||||
|
// Ignore position messages from the pipeline if the seek bar is being dragged
|
||||||
|
if (sb.isPressed()) return;
|
||||||
|
|
||||||
|
runOnUiThread (new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
sb.setMax(duration);
|
||||||
|
sb.setProgress(position);
|
||||||
|
updateTimeWidget();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.position = position;
|
||||||
|
this.duration = duration;
|
||||||
|
}
|
||||||
|
|
||||||
|
static {
|
||||||
|
System.loadLibrary("gstreamer_android");
|
||||||
|
System.loadLibrary("tutorial-4");
|
||||||
|
nativeClassInit();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void surfaceChanged(SurfaceHolder holder, int format, int width,
|
||||||
|
int height) {
|
||||||
|
Log.d("GStreamer", "Surface changed to format " + format + " width "
|
||||||
|
+ width + " height " + height);
|
||||||
|
nativeSurfaceInit (holder.getSurface());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void surfaceCreated(SurfaceHolder holder) {
|
||||||
|
Log.d("GStreamer", "Surface created: " + holder.getSurface());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void surfaceDestroyed(SurfaceHolder holder) {
|
||||||
|
Log.d("GStreamer", "Surface destroyed");
|
||||||
|
nativeSurfaceFinalize ();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code when the size of the media changes or is first detected.
|
||||||
|
// Inform the video surface about the new size and recalculate the layout.
|
||||||
|
private void onMediaSizeChanged (int width, int height) {
|
||||||
|
Log.i ("GStreamer", "Media size changed to " + width + "x" + height);
|
||||||
|
final GStreamerSurfaceView gsv = (GStreamerSurfaceView) this.findViewById(R.id.surface_video);
|
||||||
|
gsv.media_width = width;
|
||||||
|
gsv.media_height = height;
|
||||||
|
runOnUiThread(new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
gsv.requestLayout();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// The Seek Bar thumb has moved, either because the user dragged it or we have called setProgress()
|
||||||
|
public void onProgressChanged(SeekBar sb, int progress, boolean fromUser) {
|
||||||
|
if (fromUser == false) return;
|
||||||
|
desired_position = progress;
|
||||||
|
// If this is a local file, allow scrub seeking, this is, seek as soon as the slider is moved.
|
||||||
|
if (is_local_media) nativeSetPosition(desired_position);
|
||||||
|
updateTimeWidget();
|
||||||
|
}
|
||||||
|
|
||||||
|
// The user started dragging the Seek Bar thumb
|
||||||
|
public void onStartTrackingTouch(SeekBar sb) {
|
||||||
|
nativePause();
|
||||||
|
}
|
||||||
|
|
||||||
|
// The user released the Seek Bar thumb
|
||||||
|
public void onStopTrackingTouch(SeekBar sb) {
|
||||||
|
// If this is a remote file, scrub seeking is probably not going to work smoothly enough.
|
||||||
|
// Therefore, perform only the seek when the slider is released.
|
||||||
|
if (!is_local_media) nativeSetPosition(desired_position);
|
||||||
|
if (is_playing_desired) nativePlay();
|
||||||
|
}
|
||||||
|
}
|
94
tutorials/android-tutorial-5/AndroidManifest.xml
Executable file
|
@ -0,0 +1,94 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
package="com.gst_sdk_tutorials.tutorial_5"
|
||||||
|
android:versionCode="1"
|
||||||
|
android:versionName="1.0" >
|
||||||
|
|
||||||
|
<uses-sdk
|
||||||
|
android:minSdkVersion="9"
|
||||||
|
android:targetSdkVersion="14" />
|
||||||
|
|
||||||
|
<uses-permission android:name="android.permission.INTERNET" />
|
||||||
|
<uses-permission android:name="android.permission.WAKE_LOCK" />
|
||||||
|
|
||||||
|
<uses-feature android:glEsVersion="0x00020000" />
|
||||||
|
|
||||||
|
<application
|
||||||
|
android:icon="@drawable/gstreamer_logo_5"
|
||||||
|
android:label="@string/app_name" >
|
||||||
|
<activity
|
||||||
|
android:name=".Tutorial5"
|
||||||
|
android:label="@string/app_name" >
|
||||||
|
|
||||||
|
<intent-filter>
|
||||||
|
<action android:name="android.intent.action.MAIN" />
|
||||||
|
|
||||||
|
<category android:name="android.intent.category.LAUNCHER" />
|
||||||
|
</intent-filter>
|
||||||
|
|
||||||
|
<!-- Local files whose MIME type is known to Android -->
|
||||||
|
<intent-filter>
|
||||||
|
<action android:name="android.intent.action.VIEW" />
|
||||||
|
|
||||||
|
<category android:name="android.intent.category.DEFAULT" />
|
||||||
|
<category android:name="android.intent.category.BROWSABLE" />
|
||||||
|
|
||||||
|
<data android:mimeType="audio/*" />
|
||||||
|
<data android:mimeType="video/*" />
|
||||||
|
<data android:mimeType="image/*" />
|
||||||
|
</intent-filter>
|
||||||
|
|
||||||
|
<!-- Local files with unknown MIME type.
|
||||||
|
The list of extensions and supported protocols can certainly be extended. -->
|
||||||
|
<intent-filter>
|
||||||
|
<action android:name="android.intent.action.VIEW" />
|
||||||
|
|
||||||
|
<category android:name="android.intent.category.DEFAULT" />
|
||||||
|
<category android:name="android.intent.category.BROWSABLE" />
|
||||||
|
|
||||||
|
<data android:scheme="file" />
|
||||||
|
<data android:mimeType="*/*" />
|
||||||
|
<data android:pathPattern=".*\\.avi" />
|
||||||
|
<data android:pathPattern=".*\\.AVI" />
|
||||||
|
<data android:pathPattern=".*\\.mkv" />
|
||||||
|
<data android:pathPattern=".*\\.MKV" />
|
||||||
|
<data android:pathPattern=".*\\.webm" />
|
||||||
|
<data android:pathPattern=".*\\.WEBM" />
|
||||||
|
<data android:pathPattern=".*\\.ogv" />
|
||||||
|
<data android:pathPattern=".*\\.OGV" />
|
||||||
|
<data android:pathPattern=".*\\.mp4" />
|
||||||
|
<data android:pathPattern=".*\\.MP4" />
|
||||||
|
<data android:pathPattern=".*\\.mov" />
|
||||||
|
<data android:pathPattern=".*\\.MOV" />
|
||||||
|
</intent-filter>
|
||||||
|
|
||||||
|
<!-- Remote files. These typically have unknown MIME type.
|
||||||
|
The list of extensions and supported protocols can certainly be extended. -->
|
||||||
|
<intent-filter>
|
||||||
|
<action android:name="android.intent.action.VIEW" />
|
||||||
|
|
||||||
|
<category android:name="android.intent.category.DEFAULT" />
|
||||||
|
<category android:name="android.intent.category.BROWSABLE" />
|
||||||
|
|
||||||
|
<data android:scheme="http" />
|
||||||
|
<data android:pathPattern=".*\\.avi" />
|
||||||
|
<data android:pathPattern=".*\\.AVI" />
|
||||||
|
<data android:pathPattern=".*\\.mkv" />
|
||||||
|
<data android:pathPattern=".*\\.MKV" />
|
||||||
|
<data android:pathPattern=".*\\.webm" />
|
||||||
|
<data android:pathPattern=".*\\.WEBM" />
|
||||||
|
<data android:pathPattern=".*\\.ogv" />
|
||||||
|
<data android:pathPattern=".*\\.OGV" />
|
||||||
|
<data android:pathPattern=".*\\.mp4" />
|
||||||
|
<data android:pathPattern=".*\\.MP4" />
|
||||||
|
<data android:pathPattern=".*\\.mov" />
|
||||||
|
<data android:pathPattern=".*\\.MOV" />
|
||||||
|
</intent-filter>
|
||||||
|
</activity>
|
||||||
|
<activity
|
||||||
|
android:name="com.lamerman.FileDialog"
|
||||||
|
android:label="@string/filechooser_name" >
|
||||||
|
</activity>
|
||||||
|
</application>
|
||||||
|
|
||||||
|
</manifest>
|
34
tutorials/android-tutorial-5/jni/Android.mk
Executable file
|
@ -0,0 +1,34 @@
|
||||||
|
LOCAL_PATH := $(call my-dir)
|
||||||
|
|
||||||
|
include $(CLEAR_VARS)
|
||||||
|
|
||||||
|
LOCAL_MODULE := tutorial-5
|
||||||
|
LOCAL_SRC_FILES := tutorial-5.c
|
||||||
|
LOCAL_SHARED_LIBRARIES := gstreamer_android
|
||||||
|
LOCAL_LDLIBS := -llog -landroid
|
||||||
|
include $(BUILD_SHARED_LIBRARY)
|
||||||
|
|
||||||
|
ifndef GSTREAMER_ROOT_ANDROID
|
||||||
|
$(error GSTREAMER_ROOT_ANDROID is not defined!)
|
||||||
|
endif
|
||||||
|
|
||||||
|
ifeq ($(TARGET_ARCH_ABI),armeabi)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/arm
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/armv7
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),arm64-v8a)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/arm64
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),x86)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/x86
|
||||||
|
else ifeq ($(TARGET_ARCH_ABI),x86_64)
|
||||||
|
GSTREAMER_ROOT := $(GSTREAMER_ROOT_ANDROID)/x86_64
|
||||||
|
else
|
||||||
|
$(error Target arch ABI not supported: $(TARGET_ARCH_ABI))
|
||||||
|
endif
|
||||||
|
|
||||||
|
GSTREAMER_NDK_BUILD_PATH := $(GSTREAMER_ROOT)/share/gst-android/ndk-build/
|
||||||
|
include $(GSTREAMER_NDK_BUILD_PATH)/plugins.mk
|
||||||
|
GSTREAMER_PLUGINS := $(GSTREAMER_PLUGINS_CORE) $(GSTREAMER_PLUGINS_PLAYBACK) $(GSTREAMER_PLUGINS_CODECS) $(GSTREAMER_PLUGINS_NET) $(GSTREAMER_PLUGINS_SYS)
|
||||||
|
G_IO_MODULES := gnutls
|
||||||
|
GSTREAMER_EXTRA_DEPS := gstreamer-video-1.0
|
||||||
|
include $(GSTREAMER_NDK_BUILD_PATH)/gstreamer-1.0.mk
|
2
tutorials/android-tutorial-5/jni/Application.mk
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
APP_ABI = armeabi armeabi-v7a arm64-v8a x86 x86_64
|
||||||
|
|
570
tutorials/android-tutorial-5/jni/tutorial-5.c
Executable file
|
@ -0,0 +1,570 @@
|
||||||
|
#include <string.h>
|
||||||
|
#include <stdint.h>
|
||||||
|
#include <jni.h>
|
||||||
|
#include <android/log.h>
|
||||||
|
#include <android/native_window.h>
|
||||||
|
#include <android/native_window_jni.h>
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <gst/video/video.h>
|
||||||
|
#include <gst/video/videooverlay.h>
|
||||||
|
#include <pthread.h>
|
||||||
|
|
||||||
|
GST_DEBUG_CATEGORY_STATIC (debug_category);
|
||||||
|
#define GST_CAT_DEFAULT debug_category
|
||||||
|
|
||||||
|
/*
|
||||||
|
* These macros provide a way to store the native pointer to CustomData, which might be 32 or 64 bits, into
|
||||||
|
* a jlong, which is always 64 bits, without warnings.
|
||||||
|
*/
|
||||||
|
#if GLIB_SIZEOF_VOID_P == 8
|
||||||
|
# define GET_CUSTOM_DATA(env, thiz, fieldID) (CustomData *)(*env)->GetLongField (env, thiz, fieldID)
|
||||||
|
# define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField (env, thiz, fieldID, (jlong)data)
|
||||||
|
#else
|
||||||
|
# define GET_CUSTOM_DATA(env, thiz, fieldID) (CustomData *)(jint)(*env)->GetLongField (env, thiz, fieldID)
|
||||||
|
# define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField (env, thiz, fieldID, (jlong)(jint)data)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Do not allow seeks to be performed closer than this distance. It is visually useless, and will probably
|
||||||
|
* confuse some demuxers. */
|
||||||
|
#define SEEK_MIN_DELAY (500 * GST_MSECOND)
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it to callbacks */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
jobject app; /* Application instance, used to call its methods. A global reference is kept. */
|
||||||
|
GstElement *pipeline; /* The running pipeline */
|
||||||
|
GMainContext *context; /* GLib context used to run the main loop */
|
||||||
|
GMainLoop *main_loop; /* GLib main loop */
|
||||||
|
gboolean initialized; /* To avoid informing the UI multiple times about the initialization */
|
||||||
|
ANativeWindow *native_window; /* The Android native window where video will be rendered */
|
||||||
|
GstState state; /* Current pipeline state */
|
||||||
|
GstState target_state; /* Desired pipeline state, to be set once buffering is complete */
|
||||||
|
gint64 duration; /* Cached clip duration */
|
||||||
|
gint64 desired_position; /* Position to seek to, once the pipeline is running */
|
||||||
|
GstClockTime last_seek_time; /* For seeking overflow prevention (throttling) */
|
||||||
|
gboolean is_live; /* Live streams do not use buffering */
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* playbin2 flags */
|
||||||
|
typedef enum {
|
||||||
|
GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
|
||||||
|
} GstPlayFlags;
|
||||||
|
|
||||||
|
/* These global variables cache values which are not changing during execution */
|
||||||
|
static pthread_t gst_app_thread;
|
||||||
|
static pthread_key_t current_jni_env;
|
||||||
|
static JavaVM *java_vm;
|
||||||
|
static jfieldID custom_data_field_id;
|
||||||
|
static jmethodID set_message_method_id;
|
||||||
|
static jmethodID set_current_position_method_id;
|
||||||
|
static jmethodID on_gstreamer_initialized_method_id;
|
||||||
|
static jmethodID on_media_size_changed_method_id;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Private methods
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Register this thread with the VM */
|
||||||
|
static JNIEnv *attach_current_thread (void) {
|
||||||
|
JNIEnv *env;
|
||||||
|
JavaVMAttachArgs args;
|
||||||
|
|
||||||
|
GST_DEBUG ("Attaching thread %p", g_thread_self ());
|
||||||
|
args.version = JNI_VERSION_1_4;
|
||||||
|
args.name = NULL;
|
||||||
|
args.group = NULL;
|
||||||
|
|
||||||
|
if ((*java_vm)->AttachCurrentThread (java_vm, &env, &args) < 0) {
|
||||||
|
GST_ERROR ("Failed to attach current thread");
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
return env;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Unregister this thread from the VM */
|
||||||
|
static void detach_current_thread (void *env) {
|
||||||
|
GST_DEBUG ("Detaching thread %p", g_thread_self ());
|
||||||
|
(*java_vm)->DetachCurrentThread (java_vm);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Retrieve the JNI environment for this thread */
|
||||||
|
static JNIEnv *get_jni_env (void) {
|
||||||
|
JNIEnv *env;
|
||||||
|
|
||||||
|
if ((env = pthread_getspecific (current_jni_env)) == NULL) {
|
||||||
|
env = attach_current_thread ();
|
||||||
|
pthread_setspecific (current_jni_env, env);
|
||||||
|
}
|
||||||
|
|
||||||
|
return env;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Change the content of the UI's TextView */
|
||||||
|
static void set_ui_message (const gchar *message, CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
GST_DEBUG ("Setting message to: %s", message);
|
||||||
|
jstring jmessage = (*env)->NewStringUTF(env, message);
|
||||||
|
(*env)->CallVoidMethod (env, data->app, set_message_method_id, jmessage);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
(*env)->DeleteLocalRef (env, jmessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tell the application what is the current position and clip duration */
|
||||||
|
static void set_current_ui_position (gint position, gint duration, CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
(*env)->CallVoidMethod (env, data->app, set_current_position_method_id, position, duration);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If we have pipeline and it is running, query the current position and clip duration and inform
|
||||||
|
* the application */
|
||||||
|
static gboolean refresh_ui (CustomData *data) {
|
||||||
|
gint64 current = -1;
|
||||||
|
gint64 position;
|
||||||
|
|
||||||
|
/* We do not want to update anything unless we have a working pipeline in the PAUSED or PLAYING state */
|
||||||
|
if (!data || !data->pipeline || data->state < GST_STATE_PAUSED)
|
||||||
|
return TRUE;
|
||||||
|
|
||||||
|
/* If we didn't know it yet, query the stream duration */
|
||||||
|
if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
|
||||||
|
if (!gst_element_query_duration (data->pipeline, GST_FORMAT_TIME, &data->duration)) {
|
||||||
|
GST_WARNING ("Could not query current duration (normal for still pictures)");
|
||||||
|
data->duration = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!gst_element_query_position (data->pipeline, GST_FORMAT_TIME, &position)) {
|
||||||
|
GST_WARNING ("Could not query current position (normal for still pictures)");
|
||||||
|
position = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Java expects these values in milliseconds, and GStreamer provides nanoseconds */
|
||||||
|
set_current_ui_position (position / GST_MSECOND, data->duration / GST_MSECOND, data);
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Forward declaration for the delayed seek callback */
|
||||||
|
static gboolean delayed_seek_cb (CustomData *data);
|
||||||
|
|
||||||
|
/* Perform seek, if we are not too close to the previous seek. Otherwise, schedule the seek for
|
||||||
|
* some time in the future. */
|
||||||
|
static void execute_seek (gint64 desired_position, CustomData *data) {
|
||||||
|
gint64 diff;
|
||||||
|
|
||||||
|
if (desired_position == GST_CLOCK_TIME_NONE)
|
||||||
|
return;
|
||||||
|
|
||||||
|
diff = gst_util_get_timestamp () - data->last_seek_time;
|
||||||
|
|
||||||
|
if (GST_CLOCK_TIME_IS_VALID (data->last_seek_time) && diff < SEEK_MIN_DELAY) {
|
||||||
|
/* The previous seek was too close, delay this one */
|
||||||
|
GSource *timeout_source;
|
||||||
|
|
||||||
|
if (data->desired_position == GST_CLOCK_TIME_NONE) {
|
||||||
|
/* There was no previous seek scheduled. Setup a timer for some time in the future */
|
||||||
|
timeout_source = g_timeout_source_new ((SEEK_MIN_DELAY - diff) / GST_MSECOND);
|
||||||
|
g_source_set_callback (timeout_source, (GSourceFunc)delayed_seek_cb, data, NULL);
|
||||||
|
g_source_attach (timeout_source, data->context);
|
||||||
|
g_source_unref (timeout_source);
|
||||||
|
}
|
||||||
|
/* Update the desired seek position. If multiple petitions are received before it is time
|
||||||
|
* to perform a seek, only the last one is remembered. */
|
||||||
|
data->desired_position = desired_position;
|
||||||
|
GST_DEBUG ("Throttling seek to %" GST_TIME_FORMAT ", will be in %" GST_TIME_FORMAT,
|
||||||
|
GST_TIME_ARGS (desired_position), GST_TIME_ARGS (SEEK_MIN_DELAY - diff));
|
||||||
|
} else {
|
||||||
|
/* Perform the seek now */
|
||||||
|
GST_DEBUG ("Seeking to %" GST_TIME_FORMAT, GST_TIME_ARGS (desired_position));
|
||||||
|
data->last_seek_time = gst_util_get_timestamp ();
|
||||||
|
gst_element_seek_simple (data->pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, desired_position);
|
||||||
|
data->desired_position = GST_CLOCK_TIME_NONE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Delayed seek callback. This gets called by the timer setup in the above function. */
|
||||||
|
static gboolean delayed_seek_cb (CustomData *data) {
|
||||||
|
GST_DEBUG ("Doing delayed seek to %" GST_TIME_FORMAT, GST_TIME_ARGS (data->desired_position));
|
||||||
|
execute_seek (data->desired_position, data);
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Retrieve errors from the bus and show them on the UI */
|
||||||
|
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
gchar *message_string;
|
||||||
|
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
message_string = g_strdup_printf ("Error received from element %s: %s", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
set_ui_message (message_string, data);
|
||||||
|
g_free (message_string);
|
||||||
|
data->target_state = GST_STATE_NULL;
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Called when the End Of the Stream is reached. Just move to the beginning of the media and pause. */
|
||||||
|
static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
data->target_state = GST_STATE_PAUSED;
|
||||||
|
data->is_live |= (gst_element_set_state (data->pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_NO_PREROLL);
|
||||||
|
execute_seek (0, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Called when the duration of the media changes. Just mark it as unknown, so we re-query it in the next UI refresh. */
|
||||||
|
static void duration_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
data->duration = GST_CLOCK_TIME_NONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Called when buffering messages are received. We inform the UI about the current buffering level and
|
||||||
|
* keep the pipeline paused until 100% buffering is reached. At that point, set the desired state. */
|
||||||
|
static void buffering_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
gint percent;
|
||||||
|
|
||||||
|
if (data->is_live)
|
||||||
|
return;
|
||||||
|
|
||||||
|
gst_message_parse_buffering (msg, &percent);
|
||||||
|
if (percent < 100 && data->target_state >= GST_STATE_PAUSED) {
|
||||||
|
gchar * message_string = g_strdup_printf ("Buffering %d%%", percent);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
||||||
|
set_ui_message (message_string, data);
|
||||||
|
g_free (message_string);
|
||||||
|
} else if (data->target_state >= GST_STATE_PLAYING) {
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
|
||||||
|
} else if (data->target_state >= GST_STATE_PAUSED) {
|
||||||
|
set_ui_message ("Buffering complete", data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Called when the clock is lost */
|
||||||
|
static void clock_lost_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
if (data->target_state >= GST_STATE_PLAYING) {
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Retrieve the video sink's Caps and tell the application about the media size */
|
||||||
|
static void check_media_size (CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
GstElement *video_sink;
|
||||||
|
GstPad *video_sink_pad;
|
||||||
|
GstCaps *caps;
|
||||||
|
GstVideoInfo info;
|
||||||
|
|
||||||
|
/* Retrieve the Caps at the entrance of the video sink */
|
||||||
|
g_object_get (data->pipeline, "video-sink", &video_sink, NULL);
|
||||||
|
video_sink_pad = gst_element_get_static_pad (video_sink, "sink");
|
||||||
|
caps = gst_pad_get_current_caps (video_sink_pad);
|
||||||
|
|
||||||
|
if (gst_video_info_from_caps(&info, caps)) {
|
||||||
|
info.width = info.width * info.par_n / info.par_d;
|
||||||
|
GST_DEBUG ("Media size is %dx%d, notifying application", info.width, info.height);
|
||||||
|
|
||||||
|
(*env)->CallVoidMethod (env, data->app, on_media_size_changed_method_id, (jint)info.width, (jint)info.height);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
gst_caps_unref(caps);
|
||||||
|
gst_object_unref (video_sink_pad);
|
||||||
|
gst_object_unref(video_sink);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Notify UI about pipeline state changes */
|
||||||
|
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GstState old_state, new_state, pending_state;
|
||||||
|
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
|
||||||
|
/* Only pay attention to messages coming from the pipeline, not its children */
|
||||||
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->pipeline)) {
|
||||||
|
data->state = new_state;
|
||||||
|
gchar *message = g_strdup_printf("State changed to %s", gst_element_state_get_name(new_state));
|
||||||
|
set_ui_message(message, data);
|
||||||
|
g_free (message);
|
||||||
|
|
||||||
|
if (new_state == GST_STATE_NULL || new_state == GST_STATE_READY)
|
||||||
|
data->is_live = FALSE;
|
||||||
|
|
||||||
|
/* The Ready to Paused state change is particularly interesting: */
|
||||||
|
if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED) {
|
||||||
|
/* By now the sink already knows the media size */
|
||||||
|
check_media_size(data);
|
||||||
|
|
||||||
|
/* If there was a scheduled seek, perform it now that we have moved to the Paused state */
|
||||||
|
if (GST_CLOCK_TIME_IS_VALID (data->desired_position))
|
||||||
|
execute_seek (data->desired_position, data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Check if all conditions are met to report GStreamer as initialized.
|
||||||
|
* These conditions will change depending on the application */
|
||||||
|
static void check_initialization_complete (CustomData *data) {
|
||||||
|
JNIEnv *env = get_jni_env ();
|
||||||
|
if (!data->initialized && data->native_window && data->main_loop) {
|
||||||
|
GST_DEBUG ("Initialization complete, notifying application. native_window:%p main_loop:%p", data->native_window, data->main_loop);
|
||||||
|
|
||||||
|
/* The main loop is running and we received a native window, inform the sink about it */
|
||||||
|
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->pipeline), (guintptr)data->native_window);
|
||||||
|
|
||||||
|
(*env)->CallVoidMethod (env, data->app, on_gstreamer_initialized_method_id);
|
||||||
|
if ((*env)->ExceptionCheck (env)) {
|
||||||
|
GST_ERROR ("Failed to call Java method");
|
||||||
|
(*env)->ExceptionClear (env);
|
||||||
|
}
|
||||||
|
data->initialized = TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Main method for the native code. This is executed on its own thread. */
|
||||||
|
static void *app_function (void *userdata) {
|
||||||
|
JavaVMAttachArgs args;
|
||||||
|
GstBus *bus;
|
||||||
|
CustomData *data = (CustomData *)userdata;
|
||||||
|
GSource *timeout_source;
|
||||||
|
GSource *bus_source;
|
||||||
|
GError *error = NULL;
|
||||||
|
guint flags;
|
||||||
|
|
||||||
|
GST_DEBUG ("Creating pipeline in CustomData at %p", data);
|
||||||
|
|
||||||
|
/* Create our own GLib Main Context and make it the default one */
|
||||||
|
data->context = g_main_context_new ();
|
||||||
|
g_main_context_push_thread_default(data->context);
|
||||||
|
|
||||||
|
/* Build pipeline */
|
||||||
|
data->pipeline = gst_parse_launch("playbin", &error);
|
||||||
|
if (error) {
|
||||||
|
gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
|
||||||
|
g_clear_error (&error);
|
||||||
|
set_ui_message(message, data);
|
||||||
|
g_free (message);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Disable subtitles */
|
||||||
|
g_object_get (data->pipeline, "flags", &flags, NULL);
|
||||||
|
flags &= ~GST_PLAY_FLAG_TEXT;
|
||||||
|
g_object_set (data->pipeline, "flags", flags, NULL);
|
||||||
|
|
||||||
|
/* Set the pipeline to READY, so it can already accept a window handle, if we have one */
|
||||||
|
data->target_state = GST_STATE_READY;
|
||||||
|
gst_element_set_state(data->pipeline, GST_STATE_READY);
|
||||||
|
|
||||||
|
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
|
||||||
|
bus = gst_element_get_bus (data->pipeline);
|
||||||
|
bus_source = gst_bus_create_watch (bus);
|
||||||
|
g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
|
||||||
|
g_source_attach (bus_source, data->context);
|
||||||
|
g_source_unref (bus_source);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::duration", (GCallback)duration_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::buffering", (GCallback)buffering_cb, data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::clock-lost", (GCallback)clock_lost_cb, data);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
|
||||||
|
/* Register a function that GLib will call 4 times per second */
|
||||||
|
timeout_source = g_timeout_source_new (250);
|
||||||
|
g_source_set_callback (timeout_source, (GSourceFunc)refresh_ui, data, NULL);
|
||||||
|
g_source_attach (timeout_source, data->context);
|
||||||
|
g_source_unref (timeout_source);
|
||||||
|
|
||||||
|
/* Create a GLib Main Loop and set it to run */
|
||||||
|
GST_DEBUG ("Entering main loop... (CustomData:%p)", data);
|
||||||
|
data->main_loop = g_main_loop_new (data->context, FALSE);
|
||||||
|
check_initialization_complete (data);
|
||||||
|
g_main_loop_run (data->main_loop);
|
||||||
|
GST_DEBUG ("Exited main loop");
|
||||||
|
g_main_loop_unref (data->main_loop);
|
||||||
|
data->main_loop = NULL;
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
g_main_context_pop_thread_default(data->context);
|
||||||
|
g_main_context_unref (data->context);
|
||||||
|
data->target_state = GST_STATE_NULL;
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data->pipeline);
|
||||||
|
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Java Bindings
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Instruct the native code to create its internal data structure, pipeline and thread */
|
||||||
|
static void gst_native_init (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = g_new0 (CustomData, 1);
|
||||||
|
data->desired_position = GST_CLOCK_TIME_NONE;
|
||||||
|
data->last_seek_time = GST_CLOCK_TIME_NONE;
|
||||||
|
SET_CUSTOM_DATA (env, thiz, custom_data_field_id, data);
|
||||||
|
GST_DEBUG_CATEGORY_INIT (debug_category, "tutorial-5", 0, "Android tutorial 5");
|
||||||
|
gst_debug_set_threshold_for_name("tutorial-5", GST_LEVEL_DEBUG);
|
||||||
|
GST_DEBUG ("Created CustomData at %p", data);
|
||||||
|
data->app = (*env)->NewGlobalRef (env, thiz);
|
||||||
|
GST_DEBUG ("Created GlobalRef for app object at %p", data->app);
|
||||||
|
pthread_create (&gst_app_thread, NULL, &app_function, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Quit the main loop, remove the native thread and free resources */
|
||||||
|
static void gst_native_finalize (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Quitting main loop...");
|
||||||
|
g_main_loop_quit (data->main_loop);
|
||||||
|
GST_DEBUG ("Waiting for thread to finish...");
|
||||||
|
pthread_join (gst_app_thread, NULL);
|
||||||
|
GST_DEBUG ("Deleting GlobalRef for app object at %p", data->app);
|
||||||
|
(*env)->DeleteGlobalRef (env, data->app);
|
||||||
|
GST_DEBUG ("Freeing CustomData at %p", data);
|
||||||
|
g_free (data);
|
||||||
|
SET_CUSTOM_DATA (env, thiz, custom_data_field_id, NULL);
|
||||||
|
GST_DEBUG ("Done finalizing");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set playbin2's URI */
|
||||||
|
void gst_native_set_uri (JNIEnv* env, jobject thiz, jstring uri) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data || !data->pipeline) return;
|
||||||
|
const jbyte *char_uri = (*env)->GetStringUTFChars (env, uri, NULL);
|
||||||
|
GST_DEBUG ("Setting URI to %s", char_uri);
|
||||||
|
if (data->target_state >= GST_STATE_READY)
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_READY);
|
||||||
|
g_object_set(data->pipeline, "uri", char_uri, NULL);
|
||||||
|
(*env)->ReleaseStringUTFChars (env, uri, char_uri);
|
||||||
|
data->duration = GST_CLOCK_TIME_NONE;
|
||||||
|
data->is_live |= (gst_element_set_state (data->pipeline, data->target_state) == GST_STATE_CHANGE_NO_PREROLL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set pipeline to PLAYING state */
|
||||||
|
static void gst_native_play (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Setting state to PLAYING");
|
||||||
|
data->target_state = GST_STATE_PLAYING;
|
||||||
|
data->is_live |= (gst_element_set_state (data->pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_NO_PREROLL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set pipeline to PAUSED state */
|
||||||
|
static void gst_native_pause (JNIEnv* env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Setting state to PAUSED");
|
||||||
|
data->target_state = GST_STATE_PAUSED;
|
||||||
|
data->is_live |= (gst_element_set_state (data->pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_NO_PREROLL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Instruct the pipeline to seek to a different position */
|
||||||
|
void gst_native_set_position (JNIEnv* env, jobject thiz, int milliseconds) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
gint64 desired_position = (gint64)(milliseconds * GST_MSECOND);
|
||||||
|
if (data->state >= GST_STATE_PAUSED) {
|
||||||
|
execute_seek(desired_position, data);
|
||||||
|
} else {
|
||||||
|
GST_DEBUG ("Scheduling seek to %" GST_TIME_FORMAT " for later", GST_TIME_ARGS (desired_position));
|
||||||
|
data->desired_position = desired_position;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Static class initializer: retrieve method and field IDs */
|
||||||
|
static jboolean gst_native_class_init (JNIEnv* env, jclass klass) {
|
||||||
|
custom_data_field_id = (*env)->GetFieldID (env, klass, "native_custom_data", "J");
|
||||||
|
set_message_method_id = (*env)->GetMethodID (env, klass, "setMessage", "(Ljava/lang/String;)V");
|
||||||
|
set_current_position_method_id = (*env)->GetMethodID (env, klass, "setCurrentPosition", "(II)V");
|
||||||
|
on_gstreamer_initialized_method_id = (*env)->GetMethodID (env, klass, "onGStreamerInitialized", "()V");
|
||||||
|
on_media_size_changed_method_id = (*env)->GetMethodID (env, klass, "onMediaSizeChanged", "(II)V");
|
||||||
|
|
||||||
|
if (!custom_data_field_id || !set_message_method_id || !on_gstreamer_initialized_method_id ||
|
||||||
|
!on_media_size_changed_method_id || !set_current_position_method_id) {
|
||||||
|
/* We emit this message through the Android log instead of the GStreamer log because the later
|
||||||
|
* has not been initialized yet.
|
||||||
|
*/
|
||||||
|
__android_log_print (ANDROID_LOG_ERROR, "tutorial-4", "The calling class does not implement all necessary interface methods");
|
||||||
|
return JNI_FALSE;
|
||||||
|
}
|
||||||
|
return JNI_TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void gst_native_surface_init (JNIEnv *env, jobject thiz, jobject surface) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
ANativeWindow *new_native_window = ANativeWindow_fromSurface(env, surface);
|
||||||
|
GST_DEBUG ("Received surface %p (native window %p)", surface, new_native_window);
|
||||||
|
|
||||||
|
if (data->native_window) {
|
||||||
|
ANativeWindow_release (data->native_window);
|
||||||
|
if (data->native_window == new_native_window) {
|
||||||
|
GST_DEBUG ("New native window is the same as the previous one %p", data->native_window);
|
||||||
|
if (data->pipeline) {
|
||||||
|
gst_video_overlay_expose(GST_VIDEO_OVERLAY (data->pipeline));
|
||||||
|
gst_video_overlay_expose(GST_VIDEO_OVERLAY (data->pipeline));
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
GST_DEBUG ("Released previous native window %p", data->native_window);
|
||||||
|
data->initialized = FALSE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
data->native_window = new_native_window;
|
||||||
|
|
||||||
|
check_initialization_complete (data);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void gst_native_surface_finalize (JNIEnv *env, jobject thiz) {
|
||||||
|
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
|
||||||
|
if (!data) return;
|
||||||
|
GST_DEBUG ("Releasing Native Window %p", data->native_window);
|
||||||
|
|
||||||
|
if (data->pipeline) {
|
||||||
|
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->pipeline), (guintptr)NULL);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_READY);
|
||||||
|
}
|
||||||
|
|
||||||
|
ANativeWindow_release (data->native_window);
|
||||||
|
data->native_window = NULL;
|
||||||
|
data->initialized = FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* List of implemented native methods */
|
||||||
|
static JNINativeMethod native_methods[] = {
|
||||||
|
{ "nativeInit", "()V", (void *) gst_native_init},
|
||||||
|
{ "nativeFinalize", "()V", (void *) gst_native_finalize},
|
||||||
|
{ "nativeSetUri", "(Ljava/lang/String;)V", (void *) gst_native_set_uri},
|
||||||
|
{ "nativePlay", "()V", (void *) gst_native_play},
|
||||||
|
{ "nativePause", "()V", (void *) gst_native_pause},
|
||||||
|
{ "nativeSetPosition", "(I)V", (void*) gst_native_set_position},
|
||||||
|
{ "nativeSurfaceInit", "(Ljava/lang/Object;)V", (void *) gst_native_surface_init},
|
||||||
|
{ "nativeSurfaceFinalize", "()V", (void *) gst_native_surface_finalize},
|
||||||
|
{ "nativeClassInit", "()Z", (void *) gst_native_class_init}
|
||||||
|
};
|
||||||
|
|
||||||
|
/* Library initializer */
|
||||||
|
jint JNI_OnLoad(JavaVM *vm, void *reserved) {
|
||||||
|
JNIEnv *env = NULL;
|
||||||
|
|
||||||
|
java_vm = vm;
|
||||||
|
|
||||||
|
if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) {
|
||||||
|
__android_log_print (ANDROID_LOG_ERROR, "tutorial-5", "Could not retrieve JNIEnv");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
jclass klass = (*env)->FindClass (env, "com/gst_sdk_tutorials/tutorial_5/Tutorial5");
|
||||||
|
(*env)->RegisterNatives (env, klass, native_methods, G_N_ELEMENTS(native_methods));
|
||||||
|
|
||||||
|
pthread_key_create (¤t_jni_env, detach_current_thread);
|
||||||
|
|
||||||
|
return JNI_VERSION_1_4;
|
||||||
|
}
|
BIN
tutorials/android-tutorial-5/res/drawable-ldpi/file.png
Normal file
After Width: | Height: | Size: 1.1 KiB |
BIN
tutorials/android-tutorial-5/res/drawable-ldpi/folder.png
Normal file
After Width: | Height: | Size: 1.5 KiB |
After Width: | Height: | Size: 1.1 KiB |
After Width: | Height: | Size: 1.4 KiB |
After Width: | Height: | Size: 2.7 KiB |
After Width: | Height: | Size: 4.1 KiB |
After Width: | Height: | Size: 5.4 KiB |
39
tutorials/android-tutorial-5/res/layout/file_dialog_main.xml
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
android:layout_width="fill_parent"
|
||||||
|
android:layout_height="fill_parent"
|
||||||
|
android:layout_above="@+id/fdLinearLayoutList"
|
||||||
|
android:orientation="vertical" >
|
||||||
|
|
||||||
|
<TextView
|
||||||
|
android:id="@+id/path"
|
||||||
|
android:layout_width="fill_parent"
|
||||||
|
android:layout_height="wrap_content" />
|
||||||
|
|
||||||
|
<ListView
|
||||||
|
android:id="@android:id/list"
|
||||||
|
android:layout_width="fill_parent"
|
||||||
|
android:layout_height="0dip"
|
||||||
|
android:layout_weight="1" />
|
||||||
|
|
||||||
|
<LinearLayout
|
||||||
|
android:layout_width="fill_parent"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:orientation="horizontal" >
|
||||||
|
|
||||||
|
<Button
|
||||||
|
android:id="@+id/fdButtonCancel"
|
||||||
|
android:layout_width="0dip"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_weight=".3"
|
||||||
|
android:text="@string/button_cancel" />
|
||||||
|
|
||||||
|
<Button
|
||||||
|
android:id="@+id/fdButtonSelect"
|
||||||
|
android:layout_width="0dip"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_weight=".7"
|
||||||
|
android:text="@string/button_select" />
|
||||||
|
</LinearLayout>
|
||||||
|
|
||||||
|
</LinearLayout>
|
24
tutorials/android-tutorial-5/res/layout/file_dialog_row.xml
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
android:layout_width="fill_parent"
|
||||||
|
android:layout_height="wrap_content" >
|
||||||
|
|
||||||
|
<ImageView
|
||||||
|
android:id="@+id/fdrowimage"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="36dp"
|
||||||
|
android:contentDescription="@string/icon" />
|
||||||
|
|
||||||
|
<TextView
|
||||||
|
android:id="@+id/fdrowtext"
|
||||||
|
android:layout_width="0dip"
|
||||||
|
android:layout_height="36dp"
|
||||||
|
android:layout_weight="1"
|
||||||
|
android:ellipsize="marquee"
|
||||||
|
android:gravity="center_vertical"
|
||||||
|
android:marqueeRepeatLimit="marquee_forever"
|
||||||
|
android:scrollHorizontally="true"
|
||||||
|
android:singleLine="true"
|
||||||
|
android:textSize="23dp" />
|
||||||
|
|
||||||
|
</LinearLayout>
|
77
tutorials/android-tutorial-5/res/layout/main.xml
Executable file
|
@ -0,0 +1,77 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="match_parent"
|
||||||
|
android:gravity="center_vertical"
|
||||||
|
android:orientation="vertical" >
|
||||||
|
|
||||||
|
<TextView
|
||||||
|
android:id="@+id/textview_message"
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_marginBottom="16dip"
|
||||||
|
android:gravity="center_horizontal" />
|
||||||
|
|
||||||
|
<LinearLayout
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_marginBottom="16dip"
|
||||||
|
android:gravity="center_horizontal"
|
||||||
|
android:orientation="horizontal" >
|
||||||
|
|
||||||
|
<ImageButton
|
||||||
|
android:id="@+id/button_play"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:contentDescription="@string/button_play"
|
||||||
|
android:src="@android:drawable/ic_media_play"
|
||||||
|
android:text="@string/button_play" />
|
||||||
|
|
||||||
|
<ImageButton
|
||||||
|
android:id="@+id/button_stop"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:contentDescription="@string/button_stop"
|
||||||
|
android:src="@android:drawable/ic_media_pause"
|
||||||
|
android:text="@string/button_stop" />
|
||||||
|
|
||||||
|
<ImageButton
|
||||||
|
android:id="@+id/button_select"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:contentDescription="@string/button_select"
|
||||||
|
android:src="@android:drawable/ic_media_next"
|
||||||
|
android:text="@string/button_select" />
|
||||||
|
</LinearLayout>
|
||||||
|
|
||||||
|
<LinearLayout
|
||||||
|
android:layout_width="match_parent"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_marginBottom="16dip"
|
||||||
|
android:gravity="center_horizontal"
|
||||||
|
android:orientation="horizontal" >
|
||||||
|
|
||||||
|
<TextView
|
||||||
|
android:id="@+id/textview_time"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_gravity="center_vertical"
|
||||||
|
android:layout_marginLeft="5dip"
|
||||||
|
android:layout_marginRight="5dip" />
|
||||||
|
|
||||||
|
<SeekBar
|
||||||
|
android:id="@+id/seek_bar"
|
||||||
|
android:layout_width="0dip"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_gravity="center_vertical"
|
||||||
|
android:layout_weight="1"
|
||||||
|
android:indeterminate="false" />
|
||||||
|
</LinearLayout>
|
||||||
|
|
||||||
|
<com.gst_sdk_tutorials.tutorial_5.GStreamerSurfaceView
|
||||||
|
android:id="@+id/surface_video"
|
||||||
|
android:layout_width="wrap_content"
|
||||||
|
android:layout_height="wrap_content"
|
||||||
|
android:layout_gravity="center_vertical|center_horizontal" />
|
||||||
|
|
||||||
|
</LinearLayout>
|
12
tutorials/android-tutorial-5/res/values/strings.xml
Executable file
|
@ -0,0 +1,12 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<resources>
|
||||||
|
<string name="app_name">GStreamer tutorial 5</string>
|
||||||
|
<string name="button_play">Play</string>
|
||||||
|
<string name="button_stop">Stop</string>
|
||||||
|
<string name="button_select">Select</string>
|
||||||
|
<string name="button_cancel">Cancel</string>
|
||||||
|
<string name="filechooser_name">Select a file</string>
|
||||||
|
<string name="location">Location</string>
|
||||||
|
<string name="cant_read_folder">folder cannot be read</string>
|
||||||
|
<string name="icon">Icon</string>
|
||||||
|
</resources>
|
|
@ -0,0 +1,85 @@
|
||||||
|
package com.gst_sdk_tutorials.tutorial_5;
|
||||||
|
|
||||||
|
import android.content.Context;
|
||||||
|
import android.util.AttributeSet;
|
||||||
|
import android.util.Log;
|
||||||
|
import android.view.SurfaceView;
|
||||||
|
import android.view.View;
|
||||||
|
|
||||||
|
// A simple SurfaceView whose width and height can be set from the outside
|
||||||
|
public class GStreamerSurfaceView extends SurfaceView {
|
||||||
|
public int media_width = 320;
|
||||||
|
public int media_height = 240;
|
||||||
|
|
||||||
|
// Mandatory constructors, they do not do much
|
||||||
|
public GStreamerSurfaceView(Context context, AttributeSet attrs,
|
||||||
|
int defStyle) {
|
||||||
|
super(context, attrs, defStyle);
|
||||||
|
}
|
||||||
|
|
||||||
|
public GStreamerSurfaceView(Context context, AttributeSet attrs) {
|
||||||
|
super(context, attrs);
|
||||||
|
}
|
||||||
|
|
||||||
|
public GStreamerSurfaceView (Context context) {
|
||||||
|
super(context);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called by the layout manager to find out our size and give us some rules.
|
||||||
|
// We will try to maximize our size, and preserve the media's aspect ratio if
|
||||||
|
// we are given the freedom to do so.
|
||||||
|
@Override
|
||||||
|
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
|
||||||
|
int width = 0, height = 0;
|
||||||
|
int wmode = View.MeasureSpec.getMode(widthMeasureSpec);
|
||||||
|
int hmode = View.MeasureSpec.getMode(heightMeasureSpec);
|
||||||
|
int wsize = View.MeasureSpec.getSize(widthMeasureSpec);
|
||||||
|
int hsize = View.MeasureSpec.getSize(heightMeasureSpec);
|
||||||
|
|
||||||
|
Log.i ("GStreamer", "onMeasure called with " + media_width + "x" + media_height);
|
||||||
|
// Obey width rules
|
||||||
|
switch (wmode) {
|
||||||
|
case View.MeasureSpec.AT_MOST:
|
||||||
|
if (hmode == View.MeasureSpec.EXACTLY) {
|
||||||
|
width = Math.min(hsize * media_width / media_height, wsize);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case View.MeasureSpec.EXACTLY:
|
||||||
|
width = wsize;
|
||||||
|
break;
|
||||||
|
case View.MeasureSpec.UNSPECIFIED:
|
||||||
|
width = media_width;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Obey height rules
|
||||||
|
switch (hmode) {
|
||||||
|
case View.MeasureSpec.AT_MOST:
|
||||||
|
if (wmode == View.MeasureSpec.EXACTLY) {
|
||||||
|
height = Math.min(wsize * media_height / media_width, hsize);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case View.MeasureSpec.EXACTLY:
|
||||||
|
height = hsize;
|
||||||
|
break;
|
||||||
|
case View.MeasureSpec.UNSPECIFIED:
|
||||||
|
height = media_height;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finally, calculate best size when both axis are free
|
||||||
|
if (hmode == View.MeasureSpec.AT_MOST && wmode == View.MeasureSpec.AT_MOST) {
|
||||||
|
int correct_height = width * media_height / media_width;
|
||||||
|
int correct_width = height * media_width / media_height;
|
||||||
|
|
||||||
|
if (correct_height < height)
|
||||||
|
height = correct_height;
|
||||||
|
else
|
||||||
|
width = correct_width;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Obey minimum size
|
||||||
|
width = Math.max (getSuggestedMinimumWidth(), width);
|
||||||
|
height = Math.max (getSuggestedMinimumHeight(), height);
|
||||||
|
setMeasuredDimension(width, height);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,309 @@
|
||||||
|
package com.gst_sdk_tutorials.tutorial_5;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.TimeZone;
|
||||||
|
|
||||||
|
import android.app.Activity;
|
||||||
|
import android.content.Context;
|
||||||
|
import android.content.Intent;
|
||||||
|
import android.os.Bundle;
|
||||||
|
import android.os.Environment;
|
||||||
|
import android.os.PowerManager;
|
||||||
|
import android.util.Log;
|
||||||
|
import android.view.SurfaceHolder;
|
||||||
|
import android.view.SurfaceView;
|
||||||
|
import android.view.View;
|
||||||
|
import android.view.View.OnClickListener;
|
||||||
|
import android.widget.ImageButton;
|
||||||
|
import android.widget.SeekBar;
|
||||||
|
import android.widget.SeekBar.OnSeekBarChangeListener;
|
||||||
|
import android.widget.TextView;
|
||||||
|
import android.widget.Toast;
|
||||||
|
|
||||||
|
import org.freedesktop.gstreamer.GStreamer;
|
||||||
|
import com.lamerman.FileDialog;
|
||||||
|
|
||||||
|
public class Tutorial5 extends Activity implements SurfaceHolder.Callback, OnSeekBarChangeListener {
|
||||||
|
private native void nativeInit(); // Initialize native code, build pipeline, etc
|
||||||
|
private native void nativeFinalize(); // Destroy pipeline and shutdown native code
|
||||||
|
private native void nativeSetUri(String uri); // Set the URI of the media to play
|
||||||
|
private native void nativePlay(); // Set pipeline to PLAYING
|
||||||
|
private native void nativeSetPosition(int milliseconds); // Seek to the indicated position, in milliseconds
|
||||||
|
private native void nativePause(); // Set pipeline to PAUSED
|
||||||
|
private static native boolean nativeClassInit(); // Initialize native class: cache Method IDs for callbacks
|
||||||
|
private native void nativeSurfaceInit(Object surface); // A new surface is available
|
||||||
|
private native void nativeSurfaceFinalize(); // Surface about to be destroyed
|
||||||
|
private long native_custom_data; // Native code will use this to keep private data
|
||||||
|
|
||||||
|
private boolean is_playing_desired; // Whether the user asked to go to PLAYING
|
||||||
|
private int position; // Current position, reported by native code
|
||||||
|
private int duration; // Current clip duration, reported by native code
|
||||||
|
private boolean is_local_media; // Whether this clip is stored locally or is being streamed
|
||||||
|
private int desired_position; // Position where the users wants to seek to
|
||||||
|
private String mediaUri; // URI of the clip being played
|
||||||
|
|
||||||
|
private final String defaultMediaUri = "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.ogv";
|
||||||
|
|
||||||
|
static private final int PICK_FILE_CODE = 1;
|
||||||
|
private String last_folder;
|
||||||
|
|
||||||
|
private PowerManager.WakeLock wake_lock;
|
||||||
|
|
||||||
|
// Called when the activity is first created.
|
||||||
|
@Override
|
||||||
|
public void onCreate(Bundle savedInstanceState)
|
||||||
|
{
|
||||||
|
super.onCreate(savedInstanceState);
|
||||||
|
|
||||||
|
// Initialize GStreamer and warn if it fails
|
||||||
|
try {
|
||||||
|
GStreamer.init(this);
|
||||||
|
} catch (Exception e) {
|
||||||
|
Toast.makeText(this, e.getMessage(), Toast.LENGTH_LONG).show();
|
||||||
|
finish();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setContentView(R.layout.main);
|
||||||
|
|
||||||
|
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
|
||||||
|
wake_lock = pm.newWakeLock(PowerManager.FULL_WAKE_LOCK, "GStreamer tutorial 5");
|
||||||
|
wake_lock.setReferenceCounted(false);
|
||||||
|
|
||||||
|
ImageButton play = (ImageButton) this.findViewById(R.id.button_play);
|
||||||
|
play.setOnClickListener(new OnClickListener() {
|
||||||
|
public void onClick(View v) {
|
||||||
|
is_playing_desired = true;
|
||||||
|
wake_lock.acquire();
|
||||||
|
nativePlay();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ImageButton pause = (ImageButton) this.findViewById(R.id.button_stop);
|
||||||
|
pause.setOnClickListener(new OnClickListener() {
|
||||||
|
public void onClick(View v) {
|
||||||
|
is_playing_desired = false;
|
||||||
|
wake_lock.release();
|
||||||
|
nativePause();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ImageButton select = (ImageButton) this.findViewById(R.id.button_select);
|
||||||
|
select.setOnClickListener(new OnClickListener() {
|
||||||
|
public void onClick(View v) {
|
||||||
|
Intent i = new Intent(getBaseContext(), FileDialog.class);
|
||||||
|
i.putExtra(FileDialog.START_PATH, last_folder);
|
||||||
|
startActivityForResult(i, PICK_FILE_CODE);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
SurfaceView sv = (SurfaceView) this.findViewById(R.id.surface_video);
|
||||||
|
SurfaceHolder sh = sv.getHolder();
|
||||||
|
sh.addCallback(this);
|
||||||
|
|
||||||
|
SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
||||||
|
sb.setOnSeekBarChangeListener(this);
|
||||||
|
|
||||||
|
// Retrieve our previous state, or initialize it to default values
|
||||||
|
if (savedInstanceState != null) {
|
||||||
|
is_playing_desired = savedInstanceState.getBoolean("playing");
|
||||||
|
position = savedInstanceState.getInt("position");
|
||||||
|
duration = savedInstanceState.getInt("duration");
|
||||||
|
mediaUri = savedInstanceState.getString("mediaUri");
|
||||||
|
last_folder = savedInstanceState.getString("last_folder");
|
||||||
|
Log.i ("GStreamer", "Activity created with saved state:");
|
||||||
|
} else {
|
||||||
|
is_playing_desired = false;
|
||||||
|
position = duration = 0;
|
||||||
|
last_folder = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES).getAbsolutePath();
|
||||||
|
Intent intent = getIntent();
|
||||||
|
android.net.Uri uri = intent.getData();
|
||||||
|
if (uri == null)
|
||||||
|
mediaUri = defaultMediaUri;
|
||||||
|
else {
|
||||||
|
Log.i ("GStreamer", "Received URI: " + uri);
|
||||||
|
if (uri.getScheme().equals("content")) {
|
||||||
|
android.database.Cursor cursor = getContentResolver().query(uri, null, null, null, null);
|
||||||
|
cursor.moveToFirst();
|
||||||
|
mediaUri = "file://" + cursor.getString(cursor.getColumnIndex(android.provider.MediaStore.Video.Media.DATA));
|
||||||
|
cursor.close();
|
||||||
|
} else
|
||||||
|
mediaUri = uri.toString();
|
||||||
|
}
|
||||||
|
Log.i ("GStreamer", "Activity created with no saved state:");
|
||||||
|
}
|
||||||
|
is_local_media = false;
|
||||||
|
Log.i ("GStreamer", " playing:" + is_playing_desired + " position:" + position +
|
||||||
|
" duration: " + duration + " uri: " + mediaUri);
|
||||||
|
|
||||||
|
// Start with disabled buttons, until native code is initialized
|
||||||
|
this.findViewById(R.id.button_play).setEnabled(false);
|
||||||
|
this.findViewById(R.id.button_stop).setEnabled(false);
|
||||||
|
|
||||||
|
nativeInit();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void onSaveInstanceState (Bundle outState) {
|
||||||
|
Log.d ("GStreamer", "Saving state, playing:" + is_playing_desired + " position:" + position +
|
||||||
|
" duration: " + duration + " uri: " + mediaUri);
|
||||||
|
outState.putBoolean("playing", is_playing_desired);
|
||||||
|
outState.putInt("position", position);
|
||||||
|
outState.putInt("duration", duration);
|
||||||
|
outState.putString("mediaUri", mediaUri);
|
||||||
|
outState.putString("last_folder", last_folder);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void onDestroy() {
|
||||||
|
nativeFinalize();
|
||||||
|
if (wake_lock.isHeld())
|
||||||
|
wake_lock.release();
|
||||||
|
super.onDestroy();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code. This sets the content of the TextView from the UI thread.
|
||||||
|
private void setMessage(final String message) {
|
||||||
|
final TextView tv = (TextView) this.findViewById(R.id.textview_message);
|
||||||
|
runOnUiThread (new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
tv.setText(message);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the URI to play, and record whether it is a local or remote file
|
||||||
|
private void setMediaUri() {
|
||||||
|
nativeSetUri (mediaUri);
|
||||||
|
is_local_media = mediaUri.startsWith("file://");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code. Native code calls this once it has created its pipeline and
|
||||||
|
// the main loop is running, so it is ready to accept commands.
|
||||||
|
private void onGStreamerInitialized () {
|
||||||
|
Log.i ("GStreamer", "GStreamer initialized:");
|
||||||
|
Log.i ("GStreamer", " playing:" + is_playing_desired + " position:" + position + " uri: " + mediaUri);
|
||||||
|
|
||||||
|
// Restore previous playing state
|
||||||
|
setMediaUri ();
|
||||||
|
nativeSetPosition (position);
|
||||||
|
if (is_playing_desired) {
|
||||||
|
nativePlay();
|
||||||
|
wake_lock.acquire();
|
||||||
|
} else {
|
||||||
|
nativePause();
|
||||||
|
wake_lock.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-enable buttons, now that GStreamer is initialized
|
||||||
|
final Activity activity = this;
|
||||||
|
runOnUiThread(new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
activity.findViewById(R.id.button_play).setEnabled(true);
|
||||||
|
activity.findViewById(R.id.button_stop).setEnabled(true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// The text widget acts as an slave for the seek bar, so it reflects what the seek bar shows, whether
|
||||||
|
// it is an actual pipeline position or the position the user is currently dragging to.
|
||||||
|
private void updateTimeWidget () {
|
||||||
|
TextView tv = (TextView) this.findViewById(R.id.textview_time);
|
||||||
|
SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
||||||
|
int pos = sb.getProgress();
|
||||||
|
|
||||||
|
SimpleDateFormat df = new SimpleDateFormat("HH:mm:ss");
|
||||||
|
df.setTimeZone(TimeZone.getTimeZone("UTC"));
|
||||||
|
String message = df.format(new Date (pos)) + " / " + df.format(new Date (duration));
|
||||||
|
tv.setText(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code
|
||||||
|
private void setCurrentPosition(final int position, final int duration) {
|
||||||
|
final SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
|
||||||
|
|
||||||
|
// Ignore position messages from the pipeline if the seek bar is being dragged
|
||||||
|
if (sb.isPressed()) return;
|
||||||
|
|
||||||
|
runOnUiThread (new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
sb.setMax(duration);
|
||||||
|
sb.setProgress(position);
|
||||||
|
updateTimeWidget();
|
||||||
|
sb.setEnabled(duration != 0);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.position = position;
|
||||||
|
this.duration = duration;
|
||||||
|
}
|
||||||
|
|
||||||
|
static {
|
||||||
|
System.loadLibrary("gstreamer_android");
|
||||||
|
System.loadLibrary("tutorial-5");
|
||||||
|
nativeClassInit();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void surfaceChanged(SurfaceHolder holder, int format, int width,
|
||||||
|
int height) {
|
||||||
|
Log.d("GStreamer", "Surface changed to format " + format + " width "
|
||||||
|
+ width + " height " + height);
|
||||||
|
nativeSurfaceInit (holder.getSurface());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void surfaceCreated(SurfaceHolder holder) {
|
||||||
|
Log.d("GStreamer", "Surface created: " + holder.getSurface());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void surfaceDestroyed(SurfaceHolder holder) {
|
||||||
|
Log.d("GStreamer", "Surface destroyed");
|
||||||
|
nativeSurfaceFinalize ();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Called from native code when the size of the media changes or is first detected.
|
||||||
|
// Inform the video surface about the new size and recalculate the layout.
|
||||||
|
private void onMediaSizeChanged (int width, int height) {
|
||||||
|
Log.i ("GStreamer", "Media size changed to " + width + "x" + height);
|
||||||
|
final GStreamerSurfaceView gsv = (GStreamerSurfaceView) this.findViewById(R.id.surface_video);
|
||||||
|
gsv.media_width = width;
|
||||||
|
gsv.media_height = height;
|
||||||
|
runOnUiThread(new Runnable() {
|
||||||
|
public void run() {
|
||||||
|
gsv.requestLayout();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// The Seek Bar thumb has moved, either because the user dragged it or we have called setProgress()
|
||||||
|
public void onProgressChanged(SeekBar sb, int progress, boolean fromUser) {
|
||||||
|
if (fromUser == false) return;
|
||||||
|
desired_position = progress;
|
||||||
|
// If this is a local file, allow scrub seeking, this is, seek as soon as the slider is moved.
|
||||||
|
if (is_local_media) nativeSetPosition(desired_position);
|
||||||
|
updateTimeWidget();
|
||||||
|
}
|
||||||
|
|
||||||
|
// The user started dragging the Seek Bar thumb
|
||||||
|
public void onStartTrackingTouch(SeekBar sb) {
|
||||||
|
nativePause();
|
||||||
|
}
|
||||||
|
|
||||||
|
// The user released the Seek Bar thumb
|
||||||
|
public void onStopTrackingTouch(SeekBar sb) {
|
||||||
|
// If this is a remote file, scrub seeking is probably not going to work smoothly enough.
|
||||||
|
// Therefore, perform only the seek when the slider is released.
|
||||||
|
if (!is_local_media) nativeSetPosition(desired_position);
|
||||||
|
if (is_playing_desired) nativePlay();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onActivityResult(int requestCode, int resultCode, Intent data)
|
||||||
|
{
|
||||||
|
if (resultCode == RESULT_OK && requestCode == PICK_FILE_CODE) {
|
||||||
|
mediaUri = "file://" + data.getStringExtra(FileDialog.RESULT_PATH);
|
||||||
|
position = 0;
|
||||||
|
last_folder = new File (data.getStringExtra(FileDialog.RESULT_PATH)).getParent();
|
||||||
|
Log.i("GStreamer", "Setting last_folder to " + last_folder);
|
||||||
|
setMediaUri();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
334
tutorials/android-tutorial-5/src/com/lamerman/FileDialog.java
Normal file
|
@ -0,0 +1,334 @@
|
||||||
|
// Based on http://code.google.com/p/android-file-dialog/
|
||||||
|
//
|
||||||
|
// Copyright (c) 2011, 2012, Alexander Ponomarev <alexander.ponomarev.1@gmail.com>
|
||||||
|
// All rights reserved.
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
// are permitted provided that the following conditions are met:
|
||||||
|
//
|
||||||
|
// Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
// of conditions and the following disclaimer. Redistributions in binary form must
|
||||||
|
// reproduce the above copyright notice, this list of conditions and the following
|
||||||
|
// disclaimer in the documentation and/or other materials provided with the distribution.
|
||||||
|
// Neither the name of the <ORGANIZATION> nor the names of its contributors may be used
|
||||||
|
// to endorse or promote products derived from this software without specific prior
|
||||||
|
// written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
|
||||||
|
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||||
|
// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
|
||||||
|
// SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||||
|
// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
||||||
|
// BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
|
||||||
|
// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||||
|
// DAMAGE.
|
||||||
|
|
||||||
|
package com.lamerman;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
|
||||||
|
import android.app.AlertDialog;
|
||||||
|
import android.app.ListActivity;
|
||||||
|
import android.content.DialogInterface;
|
||||||
|
import android.os.Bundle;
|
||||||
|
import android.view.KeyEvent;
|
||||||
|
import android.view.View;
|
||||||
|
import android.view.View.OnClickListener;
|
||||||
|
import android.widget.Button;
|
||||||
|
import android.widget.ListView;
|
||||||
|
import android.widget.SimpleAdapter;
|
||||||
|
import android.widget.TextView;
|
||||||
|
|
||||||
|
import com.gst_sdk_tutorials.tutorial_5.R;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Activity para escolha de arquivos/diretorios.
|
||||||
|
*
|
||||||
|
* @author android
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public class FileDialog extends ListActivity {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Chave de um item da lista de paths.
|
||||||
|
*/
|
||||||
|
private static final String ITEM_KEY = "key";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Imagem de um item da lista de paths (diretorio ou arquivo).
|
||||||
|
*/
|
||||||
|
private static final String ITEM_IMAGE = "image";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Diretorio raiz.
|
||||||
|
*/
|
||||||
|
private static final String ROOT = "/";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parametro de entrada da Activity: path inicial. Padrao: ROOT.
|
||||||
|
*/
|
||||||
|
public static final String START_PATH = "START_PATH";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parametro de entrada da Activity: filtro de formatos de arquivos. Padrao:
|
||||||
|
* null.
|
||||||
|
*/
|
||||||
|
public static final String FORMAT_FILTER = "FORMAT_FILTER";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parametro de saida da Activity: path escolhido. Padrao: null.
|
||||||
|
*/
|
||||||
|
public static final String RESULT_PATH = "RESULT_PATH";
|
||||||
|
|
||||||
|
private List<String> path = null;
|
||||||
|
private TextView myPath;
|
||||||
|
private ArrayList<HashMap<String, Object>> mList;
|
||||||
|
|
||||||
|
private Button selectButton;
|
||||||
|
|
||||||
|
private String parentPath;
|
||||||
|
private String currentPath = ROOT;
|
||||||
|
|
||||||
|
private String[] formatFilter = null;
|
||||||
|
|
||||||
|
private File selectedFile;
|
||||||
|
private HashMap<String, Integer> lastPositions = new HashMap<String, Integer>();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called when the activity is first created. Configura todos os parametros
|
||||||
|
* de entrada e das VIEWS..
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void onCreate(Bundle savedInstanceState) {
|
||||||
|
super.onCreate(savedInstanceState);
|
||||||
|
setResult(RESULT_CANCELED, getIntent());
|
||||||
|
|
||||||
|
setContentView(R.layout.file_dialog_main);
|
||||||
|
myPath = (TextView) findViewById(R.id.path);
|
||||||
|
|
||||||
|
selectButton = (Button) findViewById(R.id.fdButtonSelect);
|
||||||
|
selectButton.setEnabled(false);
|
||||||
|
selectButton.setOnClickListener(new OnClickListener() {
|
||||||
|
|
||||||
|
public void onClick(View v) {
|
||||||
|
if (selectedFile != null) {
|
||||||
|
getIntent().putExtra(RESULT_PATH, selectedFile.getPath());
|
||||||
|
setResult(RESULT_OK, getIntent());
|
||||||
|
finish();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
formatFilter = getIntent().getStringArrayExtra(FORMAT_FILTER);
|
||||||
|
|
||||||
|
final Button cancelButton = (Button) findViewById(R.id.fdButtonCancel);
|
||||||
|
cancelButton.setOnClickListener(new OnClickListener() {
|
||||||
|
|
||||||
|
public void onClick(View v) {
|
||||||
|
setResult(RESULT_CANCELED);
|
||||||
|
finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
String startPath;
|
||||||
|
if (savedInstanceState != null) {
|
||||||
|
startPath = savedInstanceState.getString("currentPath");
|
||||||
|
} else {
|
||||||
|
startPath = getIntent().getStringExtra(START_PATH);
|
||||||
|
}
|
||||||
|
startPath = startPath != null ? startPath : ROOT;
|
||||||
|
getDir(startPath);
|
||||||
|
|
||||||
|
ListView lv = (ListView) findViewById(android.R.id.list);
|
||||||
|
lv.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void getDir(String dirPath) {
|
||||||
|
|
||||||
|
boolean useAutoSelection = dirPath.length() < currentPath.length();
|
||||||
|
|
||||||
|
Integer position = lastPositions.get(parentPath);
|
||||||
|
|
||||||
|
getDirImpl(dirPath);
|
||||||
|
|
||||||
|
if (position != null && useAutoSelection) {
|
||||||
|
getListView().setSelection(position);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Monta a estrutura de arquivos e diretorios filhos do diretorio fornecido.
|
||||||
|
*
|
||||||
|
* @param dirPath
|
||||||
|
* Diretorio pai.
|
||||||
|
*/
|
||||||
|
private void getDirImpl(final String dirPath) {
|
||||||
|
|
||||||
|
currentPath = dirPath;
|
||||||
|
|
||||||
|
final List<String> item = new ArrayList<String>();
|
||||||
|
path = new ArrayList<String>();
|
||||||
|
mList = new ArrayList<HashMap<String, Object>>();
|
||||||
|
|
||||||
|
File f = new File(currentPath);
|
||||||
|
File[] files = f.listFiles();
|
||||||
|
if (files == null) {
|
||||||
|
currentPath = ROOT;
|
||||||
|
f = new File(currentPath);
|
||||||
|
files = f.listFiles();
|
||||||
|
}
|
||||||
|
myPath.setText(getText(R.string.location) + ": " + currentPath);
|
||||||
|
|
||||||
|
if (!currentPath.equals(ROOT)) {
|
||||||
|
|
||||||
|
item.add(ROOT);
|
||||||
|
addItem(ROOT, R.drawable.folder);
|
||||||
|
path.add(ROOT);
|
||||||
|
|
||||||
|
item.add("../");
|
||||||
|
addItem("../", R.drawable.folder);
|
||||||
|
path.add(f.getParent());
|
||||||
|
parentPath = f.getParent();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
TreeMap<String, String> dirsMap = new TreeMap<String, String>();
|
||||||
|
TreeMap<String, String> dirsPathMap = new TreeMap<String, String>();
|
||||||
|
TreeMap<String, String> filesMap = new TreeMap<String, String>();
|
||||||
|
TreeMap<String, String> filesPathMap = new TreeMap<String, String>();
|
||||||
|
for (File file : files) {
|
||||||
|
if (file.isDirectory()) {
|
||||||
|
String dirName = file.getName();
|
||||||
|
dirsMap.put(dirName, dirName);
|
||||||
|
dirsPathMap.put(dirName, file.getPath());
|
||||||
|
} else {
|
||||||
|
final String fileName = file.getName();
|
||||||
|
final String fileNameLwr = fileName.toLowerCase();
|
||||||
|
// se ha um filtro de formatos, utiliza-o
|
||||||
|
if (formatFilter != null) {
|
||||||
|
boolean contains = false;
|
||||||
|
for (int i = 0; i < formatFilter.length; i++) {
|
||||||
|
final String formatLwr = formatFilter[i].toLowerCase();
|
||||||
|
if (fileNameLwr.endsWith(formatLwr)) {
|
||||||
|
contains = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (contains) {
|
||||||
|
filesMap.put(fileName, fileName);
|
||||||
|
filesPathMap.put(fileName, file.getPath());
|
||||||
|
}
|
||||||
|
// senao, adiciona todos os arquivos
|
||||||
|
} else {
|
||||||
|
filesMap.put(fileName, fileName);
|
||||||
|
filesPathMap.put(fileName, file.getPath());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
item.addAll(dirsMap.tailMap("").values());
|
||||||
|
item.addAll(filesMap.tailMap("").values());
|
||||||
|
path.addAll(dirsPathMap.tailMap("").values());
|
||||||
|
path.addAll(filesPathMap.tailMap("").values());
|
||||||
|
|
||||||
|
SimpleAdapter fileList = new SimpleAdapter(this, mList,
|
||||||
|
R.layout.file_dialog_row,
|
||||||
|
new String[] { ITEM_KEY, ITEM_IMAGE }, new int[] {
|
||||||
|
R.id.fdrowtext, R.id.fdrowimage });
|
||||||
|
|
||||||
|
for (String dir : dirsMap.tailMap("").values()) {
|
||||||
|
addItem(dir, R.drawable.folder);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String file : filesMap.tailMap("").values()) {
|
||||||
|
addItem(file, R.drawable.file);
|
||||||
|
}
|
||||||
|
|
||||||
|
fileList.notifyDataSetChanged();
|
||||||
|
|
||||||
|
setListAdapter(fileList);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addItem(String fileName, int imageId) {
|
||||||
|
HashMap<String, Object> item = new HashMap<String, Object>();
|
||||||
|
item.put(ITEM_KEY, fileName);
|
||||||
|
item.put(ITEM_IMAGE, imageId);
|
||||||
|
mList.add(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Quando clica no item da lista, deve-se: 1) Se for diretorio, abre seus
|
||||||
|
* arquivos filhos; 2) Se puder escolher diretorio, define-o como sendo o
|
||||||
|
* path escolhido. 3) Se for arquivo, define-o como path escolhido. 4) Ativa
|
||||||
|
* botao de selecao.
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
protected void onListItemClick(ListView l, View v, int position, long id) {
|
||||||
|
|
||||||
|
File file = new File(path.get(position));
|
||||||
|
|
||||||
|
if (file.isDirectory()) {
|
||||||
|
selectButton.setEnabled(false);
|
||||||
|
if (file.canRead()) {
|
||||||
|
lastPositions.put(currentPath, position);
|
||||||
|
getDir(path.get(position));
|
||||||
|
} else {
|
||||||
|
new AlertDialog.Builder(this)
|
||||||
|
.setIcon(android.R.drawable.stat_sys_warning)
|
||||||
|
.setTitle(
|
||||||
|
"[" + file.getName() + "] "
|
||||||
|
+ getText(R.string.cant_read_folder))
|
||||||
|
.setPositiveButton("OK",
|
||||||
|
new DialogInterface.OnClickListener() {
|
||||||
|
|
||||||
|
public void onClick(DialogInterface dialog,
|
||||||
|
int which) {
|
||||||
|
|
||||||
|
}
|
||||||
|
}).show();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (selectedFile != null
|
||||||
|
&& selectedFile.getPath().equals(file.getPath())) {
|
||||||
|
getIntent().putExtra(RESULT_PATH, selectedFile.getPath());
|
||||||
|
setResult(RESULT_OK, getIntent());
|
||||||
|
finish();
|
||||||
|
}
|
||||||
|
selectedFile = file;
|
||||||
|
l.setItemChecked(position, true);
|
||||||
|
selectButton.setEnabled(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean onKeyDown(int keyCode, KeyEvent event) {
|
||||||
|
if ((keyCode == KeyEvent.KEYCODE_BACK)) {
|
||||||
|
selectButton.setEnabled(false);
|
||||||
|
|
||||||
|
if (!currentPath.equals(ROOT)) {
|
||||||
|
getDir(parentPath);
|
||||||
|
} else {
|
||||||
|
return super.onKeyDown(keyCode, event);
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
return super.onKeyDown(keyCode, event);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onSaveInstanceState(Bundle outState) {
|
||||||
|
outState.putString("currentPath", currentPath);
|
||||||
|
super.onSaveInstanceState(outState);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
28
tutorials/basic-tutorial-1.c
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
GstElement *pipeline;
|
||||||
|
GstBus *bus;
|
||||||
|
GstMessage *msg;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Build the pipeline */
|
||||||
|
pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
|
|
||||||
|
/* Wait until error or EOS */
|
||||||
|
bus = gst_element_get_bus (pipeline);
|
||||||
|
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
if (msg != NULL)
|
||||||
|
gst_message_unref (msg);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
99
tutorials/basic-tutorial-12.c
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <string.h>
|
||||||
|
|
||||||
|
typedef struct _CustomData {
|
||||||
|
gboolean is_live;
|
||||||
|
GstElement *pipeline;
|
||||||
|
GMainLoop *loop;
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
|
||||||
|
switch (GST_MESSAGE_TYPE (msg)) {
|
||||||
|
case GST_MESSAGE_ERROR: {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug;
|
||||||
|
|
||||||
|
gst_message_parse_error (msg, &err, &debug);
|
||||||
|
g_print ("Error: %s\n", err->message);
|
||||||
|
g_error_free (err);
|
||||||
|
g_free (debug);
|
||||||
|
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_READY);
|
||||||
|
g_main_loop_quit (data->loop);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case GST_MESSAGE_EOS:
|
||||||
|
/* end-of-stream */
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_READY);
|
||||||
|
g_main_loop_quit (data->loop);
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_BUFFERING: {
|
||||||
|
gint percent = 0;
|
||||||
|
|
||||||
|
/* If the stream is live, we do not care about buffering. */
|
||||||
|
if (data->is_live) break;
|
||||||
|
|
||||||
|
gst_message_parse_buffering (msg, &percent);
|
||||||
|
g_print ("Buffering (%3d%%)\r", percent);
|
||||||
|
/* Wait until buffering is complete before start/resume playing */
|
||||||
|
if (percent < 100)
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
||||||
|
else
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case GST_MESSAGE_CLOCK_LOST:
|
||||||
|
/* Get a new clock */
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
/* Unhandled message */
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
GstElement *pipeline;
|
||||||
|
GstBus *bus;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
GMainLoop *main_loop;
|
||||||
|
CustomData data;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Initialize our data structure */
|
||||||
|
memset (&data, 0, sizeof (data));
|
||||||
|
|
||||||
|
/* Build the pipeline */
|
||||||
|
pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
|
||||||
|
bus = gst_element_get_bus (pipeline);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return -1;
|
||||||
|
} else if (ret == GST_STATE_CHANGE_NO_PREROLL) {
|
||||||
|
data.is_live = TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
main_loop = g_main_loop_new (NULL, FALSE);
|
||||||
|
data.loop = main_loop;
|
||||||
|
data.pipeline = pipeline;
|
||||||
|
|
||||||
|
gst_bus_add_signal_watch (bus);
|
||||||
|
g_signal_connect (bus, "message", G_CALLBACK (cb_message), &data);
|
||||||
|
|
||||||
|
g_main_loop_run (main_loop);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
g_main_loop_unref (main_loop);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
146
tutorials/basic-tutorial-13.c
Normal file
|
@ -0,0 +1,146 @@
|
||||||
|
#include <string.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
typedef struct _CustomData {
|
||||||
|
GstElement *pipeline;
|
||||||
|
GstElement *video_sink;
|
||||||
|
GMainLoop *loop;
|
||||||
|
|
||||||
|
gboolean playing; /* Playing or Paused */
|
||||||
|
gdouble rate; /* Current playback rate (can be negative) */
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* Send seek event to change rate */
|
||||||
|
static void send_seek_event (CustomData *data) {
|
||||||
|
gint64 position;
|
||||||
|
GstEvent *seek_event;
|
||||||
|
|
||||||
|
/* Obtain the current position, needed for the seek event */
|
||||||
|
if (!gst_element_query_position (data->pipeline, GST_FORMAT_TIME, &position)) {
|
||||||
|
g_printerr ("Unable to retrieve current position.\n");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Create the seek event */
|
||||||
|
if (data->rate > 0) {
|
||||||
|
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
|
||||||
|
GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_SET, -1);
|
||||||
|
} else {
|
||||||
|
seek_event = gst_event_new_seek (data->rate, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
|
||||||
|
GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_SET, position);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data->video_sink == NULL) {
|
||||||
|
/* If we have not done so, obtain the sink through which we will send the seek events */
|
||||||
|
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Send the event */
|
||||||
|
gst_element_send_event (data->video_sink, seek_event);
|
||||||
|
|
||||||
|
g_print ("Current rate: %g\n", data->rate);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Process keyboard input */
|
||||||
|
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
|
||||||
|
gchar *str = NULL;
|
||||||
|
|
||||||
|
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) != G_IO_STATUS_NORMAL) {
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (g_ascii_tolower (str[0])) {
|
||||||
|
case 'p':
|
||||||
|
data->playing = !data->playing;
|
||||||
|
gst_element_set_state (data->pipeline, data->playing ? GST_STATE_PLAYING : GST_STATE_PAUSED);
|
||||||
|
g_print ("Setting state to %s\n", data->playing ? "PLAYING" : "PAUSE");
|
||||||
|
break;
|
||||||
|
case 's':
|
||||||
|
if (g_ascii_isupper (str[0])) {
|
||||||
|
data->rate *= 2.0;
|
||||||
|
} else {
|
||||||
|
data->rate /= 2.0;
|
||||||
|
}
|
||||||
|
send_seek_event (data);
|
||||||
|
break;
|
||||||
|
case 'd':
|
||||||
|
data->rate *= -1.0;
|
||||||
|
send_seek_event (data);
|
||||||
|
break;
|
||||||
|
case 'n':
|
||||||
|
if (data->video_sink == NULL) {
|
||||||
|
/* If we have not done so, obtain the sink through which we will send the step events */
|
||||||
|
g_object_get (data->pipeline, "video-sink", &data->video_sink, NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
gst_element_send_event (data->video_sink,
|
||||||
|
gst_event_new_step (GST_FORMAT_BUFFERS, 1, data->rate, TRUE, FALSE));
|
||||||
|
g_print ("Stepping one frame\n");
|
||||||
|
break;
|
||||||
|
case 'q':
|
||||||
|
g_main_loop_quit (data->loop);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
g_free (str);
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
CustomData data;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
GIOChannel *io_stdin;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Initialize our data structure */
|
||||||
|
memset (&data, 0, sizeof (data));
|
||||||
|
|
||||||
|
/* Print usage map */
|
||||||
|
g_print (
|
||||||
|
"USAGE: Choose one of the following options, then press enter:\n"
|
||||||
|
" 'P' to toggle between PAUSE and PLAY\n"
|
||||||
|
" 'S' to increase playback speed, 's' to decrease playback speed\n"
|
||||||
|
" 'D' to toggle playback direction\n"
|
||||||
|
" 'N' to move to next frame (in the current direction, better in PAUSE)\n"
|
||||||
|
" 'Q' to quit\n");
|
||||||
|
|
||||||
|
/* Build the pipeline */
|
||||||
|
data.pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
|
||||||
|
|
||||||
|
/* Add a keyboard watch so we get notified of keystrokes */
|
||||||
|
#ifdef G_OS_WIN32
|
||||||
|
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
|
||||||
|
#else
|
||||||
|
io_stdin = g_io_channel_unix_new (fileno (stdin));
|
||||||
|
#endif
|
||||||
|
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
data.playing = TRUE;
|
||||||
|
data.rate = 1.0;
|
||||||
|
|
||||||
|
/* Create a GLib Main Loop and set it to run */
|
||||||
|
data.loop = g_main_loop_new (NULL, FALSE);
|
||||||
|
g_main_loop_run (data.loop);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
g_main_loop_unref (data.loop);
|
||||||
|
g_io_channel_unref (io_stdin);
|
||||||
|
gst_element_set_state (data.pipeline, GST_STATE_NULL);
|
||||||
|
if (data.video_sink != NULL)
|
||||||
|
gst_object_unref (data.video_sink);
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
95
tutorials/basic-tutorial-15.c
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
#include <clutter-gst/clutter-gst.h>
|
||||||
|
|
||||||
|
/* Setup the video texture once its size is known */
|
||||||
|
void size_change (ClutterActor *texture, gint width, gint height, gpointer user_data) {
|
||||||
|
ClutterActor *stage;
|
||||||
|
gfloat new_x, new_y, new_width, new_height;
|
||||||
|
gfloat stage_width, stage_height;
|
||||||
|
ClutterAnimation *animation = NULL;
|
||||||
|
|
||||||
|
stage = clutter_actor_get_stage (texture);
|
||||||
|
if (stage == NULL)
|
||||||
|
return;
|
||||||
|
|
||||||
|
clutter_actor_get_size (stage, &stage_width, &stage_height);
|
||||||
|
|
||||||
|
/* Center video on window and calculate new size preserving aspect ratio */
|
||||||
|
new_height = (height * stage_width) / width;
|
||||||
|
if (new_height <= stage_height) {
|
||||||
|
new_width = stage_width;
|
||||||
|
|
||||||
|
new_x = 0;
|
||||||
|
new_y = (stage_height - new_height) / 2;
|
||||||
|
} else {
|
||||||
|
new_width = (width * stage_height) / height;
|
||||||
|
new_height = stage_height;
|
||||||
|
|
||||||
|
new_x = (stage_width - new_width) / 2;
|
||||||
|
new_y = 0;
|
||||||
|
}
|
||||||
|
clutter_actor_set_position (texture, new_x, new_y);
|
||||||
|
clutter_actor_set_size (texture, new_width, new_height);
|
||||||
|
clutter_actor_set_rotation (texture, CLUTTER_Y_AXIS, 0.0, stage_width / 2, 0, 0);
|
||||||
|
/* Animate it */
|
||||||
|
animation = clutter_actor_animate (texture, CLUTTER_LINEAR, 10000, "rotation-angle-y", 360.0, NULL);
|
||||||
|
clutter_animation_set_loop (animation, TRUE);
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
GstElement *pipeline, *sink;
|
||||||
|
ClutterTimeline *timeline;
|
||||||
|
ClutterActor *stage, *texture;
|
||||||
|
|
||||||
|
/* clutter-gst takes care of initializing Clutter and GStreamer */
|
||||||
|
if (clutter_gst_init (&argc, &argv) != CLUTTER_INIT_SUCCESS) {
|
||||||
|
g_error ("Failed to initialize clutter\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
stage = clutter_stage_get_default ();
|
||||||
|
|
||||||
|
/* Make a timeline */
|
||||||
|
timeline = clutter_timeline_new (1000);
|
||||||
|
g_object_set(timeline, "loop", TRUE, NULL);
|
||||||
|
|
||||||
|
/* Create new texture and disable slicing so the video is properly mapped onto it */
|
||||||
|
texture = CLUTTER_ACTOR (g_object_new (CLUTTER_TYPE_TEXTURE, "disable-slicing", TRUE, NULL));
|
||||||
|
g_signal_connect (texture, "size-change", G_CALLBACK (size_change), NULL);
|
||||||
|
|
||||||
|
/* Build the GStreamer pipeline */
|
||||||
|
pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
|
||||||
|
|
||||||
|
/* Instantiate the Clutter sink */
|
||||||
|
sink = gst_element_factory_make ("autocluttersink", NULL);
|
||||||
|
if (sink == NULL) {
|
||||||
|
/* Revert to the older cluttersink, in case autocluttersink was not found */
|
||||||
|
sink = gst_element_factory_make ("cluttersink", NULL);
|
||||||
|
}
|
||||||
|
if (sink == NULL) {
|
||||||
|
g_printerr ("Unable to find a Clutter sink.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Link GStreamer with Clutter by passing the Clutter texture to the Clutter sink*/
|
||||||
|
g_object_set (sink, "texture", texture, NULL);
|
||||||
|
|
||||||
|
/* Add the Clutter sink to the pipeline */
|
||||||
|
g_object_set (pipeline, "video-sink", sink, NULL);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
|
|
||||||
|
/* start the timeline */
|
||||||
|
clutter_timeline_start (timeline);
|
||||||
|
|
||||||
|
/* Add texture to the stage, and show it */
|
||||||
|
clutter_group_add (CLUTTER_GROUP (stage), texture);
|
||||||
|
clutter_actor_show_all (stage);
|
||||||
|
|
||||||
|
clutter_main();
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
76
tutorials/basic-tutorial-2.c
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
GstElement *pipeline, *source, *sink;
|
||||||
|
GstBus *bus;
|
||||||
|
GstMessage *msg;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Create the elements */
|
||||||
|
source = gst_element_factory_make ("videotestsrc", "source");
|
||||||
|
sink = gst_element_factory_make ("autovideosink", "sink");
|
||||||
|
|
||||||
|
/* Create the empty pipeline */
|
||||||
|
pipeline = gst_pipeline_new ("test-pipeline");
|
||||||
|
|
||||||
|
if (!pipeline || !source || !sink) {
|
||||||
|
g_printerr ("Not all elements could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Build the pipeline */
|
||||||
|
gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
|
||||||
|
if (gst_element_link (source, sink) != TRUE) {
|
||||||
|
g_printerr ("Elements could not be linked.\n");
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Modify the source's properties */
|
||||||
|
g_object_set (source, "pattern", 0, NULL);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Wait until error or EOS */
|
||||||
|
bus = gst_element_get_bus (pipeline);
|
||||||
|
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
|
||||||
|
|
||||||
|
/* Parse message */
|
||||||
|
if (msg != NULL) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
|
||||||
|
switch (GST_MESSAGE_TYPE (msg)) {
|
||||||
|
case GST_MESSAGE_ERROR:
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_EOS:
|
||||||
|
g_print ("End-Of-Stream reached.\n");
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
/* We should not reach here because we only asked for ERRORs and EOS */
|
||||||
|
g_printerr ("Unexpected message received.\n");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
gst_message_unref (msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
149
tutorials/basic-tutorial-3.c
Normal file
|
@ -0,0 +1,149 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it to callbacks */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
GstElement *pipeline;
|
||||||
|
GstElement *source;
|
||||||
|
GstElement *convert;
|
||||||
|
GstElement *sink;
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* Handler for the pad-added signal */
|
||||||
|
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
CustomData data;
|
||||||
|
GstBus *bus;
|
||||||
|
GstMessage *msg;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
gboolean terminate = FALSE;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Create the elements */
|
||||||
|
data.source = gst_element_factory_make ("uridecodebin", "source");
|
||||||
|
data.convert = gst_element_factory_make ("audioconvert", "convert");
|
||||||
|
data.sink = gst_element_factory_make ("autoaudiosink", "sink");
|
||||||
|
|
||||||
|
/* Create the empty pipeline */
|
||||||
|
data.pipeline = gst_pipeline_new ("test-pipeline");
|
||||||
|
|
||||||
|
if (!data.pipeline || !data.source || !data.convert || !data.sink) {
|
||||||
|
g_printerr ("Not all elements could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Build the pipeline. Note that we are NOT linking the source at this
|
||||||
|
* point. We will do it later. */
|
||||||
|
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert , data.sink, NULL);
|
||||||
|
if (!gst_element_link (data.convert, data.sink)) {
|
||||||
|
g_printerr ("Elements could not be linked.\n");
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set the URI to play */
|
||||||
|
g_object_set (data.source, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
|
||||||
|
|
||||||
|
/* Connect to the pad-added signal */
|
||||||
|
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Listen to the bus */
|
||||||
|
bus = gst_element_get_bus (data.pipeline);
|
||||||
|
do {
|
||||||
|
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
|
||||||
|
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
|
||||||
|
|
||||||
|
/* Parse message */
|
||||||
|
if (msg != NULL) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
|
||||||
|
switch (GST_MESSAGE_TYPE (msg)) {
|
||||||
|
case GST_MESSAGE_ERROR:
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
terminate = TRUE;
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_EOS:
|
||||||
|
g_print ("End-Of-Stream reached.\n");
|
||||||
|
terminate = TRUE;
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_STATE_CHANGED:
|
||||||
|
/* We are only interested in state-changed messages from the pipeline */
|
||||||
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
|
||||||
|
GstState old_state, new_state, pending_state;
|
||||||
|
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
|
||||||
|
g_print ("Pipeline state changed from %s to %s:\n",
|
||||||
|
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
/* We should not reach here */
|
||||||
|
g_printerr ("Unexpected message received.\n");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
gst_message_unref (msg);
|
||||||
|
}
|
||||||
|
} while (!terminate);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (data.pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function will be called by the pad-added signal */
|
||||||
|
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
|
||||||
|
GstPad *sink_pad = gst_element_get_static_pad (data->convert, "sink");
|
||||||
|
GstPadLinkReturn ret;
|
||||||
|
GstCaps *new_pad_caps = NULL;
|
||||||
|
GstStructure *new_pad_struct = NULL;
|
||||||
|
const gchar *new_pad_type = NULL;
|
||||||
|
|
||||||
|
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));
|
||||||
|
|
||||||
|
/* If our converter is already linked, we have nothing to do here */
|
||||||
|
if (gst_pad_is_linked (sink_pad)) {
|
||||||
|
g_print (" We are already linked. Ignoring.\n");
|
||||||
|
goto exit;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Check the new pad's type */
|
||||||
|
new_pad_caps = gst_pad_query_caps (new_pad, NULL);
|
||||||
|
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
|
||||||
|
new_pad_type = gst_structure_get_name (new_pad_struct);
|
||||||
|
if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
|
||||||
|
g_print (" It has type '%s' which is not raw audio. Ignoring.\n", new_pad_type);
|
||||||
|
goto exit;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Attempt the link */
|
||||||
|
ret = gst_pad_link (new_pad, sink_pad);
|
||||||
|
if (GST_PAD_LINK_FAILED (ret)) {
|
||||||
|
g_print (" Type is '%s' but link failed.\n", new_pad_type);
|
||||||
|
} else {
|
||||||
|
g_print (" Link succeeded (type '%s').\n", new_pad_type);
|
||||||
|
}
|
||||||
|
|
||||||
|
exit:
|
||||||
|
/* Unreference the new pad's caps, if we got them */
|
||||||
|
if (new_pad_caps != NULL)
|
||||||
|
gst_caps_unref (new_pad_caps);
|
||||||
|
|
||||||
|
/* Unreference the sink pad */
|
||||||
|
gst_object_unref (sink_pad);
|
||||||
|
}
|
156
tutorials/basic-tutorial-4.c
Normal file
|
@ -0,0 +1,156 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it around */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
GstElement *playbin; /* Our one and only element */
|
||||||
|
gboolean playing; /* Are we in the PLAYING state? */
|
||||||
|
gboolean terminate; /* Should we terminate execution? */
|
||||||
|
gboolean seek_enabled; /* Is seeking enabled for this media? */
|
||||||
|
gboolean seek_done; /* Have we performed the seek already? */
|
||||||
|
gint64 duration; /* How long does this media last, in nanoseconds */
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* Forward definition of the message processing function */
|
||||||
|
static void handle_message (CustomData *data, GstMessage *msg);
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
CustomData data;
|
||||||
|
GstBus *bus;
|
||||||
|
GstMessage *msg;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
|
||||||
|
data.playing = FALSE;
|
||||||
|
data.terminate = FALSE;
|
||||||
|
data.seek_enabled = FALSE;
|
||||||
|
data.seek_done = FALSE;
|
||||||
|
data.duration = GST_CLOCK_TIME_NONE;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Create the elements */
|
||||||
|
data.playbin = gst_element_factory_make ("playbin", "playbin");
|
||||||
|
|
||||||
|
if (!data.playbin) {
|
||||||
|
g_printerr ("Not all elements could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set the URI to play */
|
||||||
|
g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (data.playbin);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Listen to the bus */
|
||||||
|
bus = gst_element_get_bus (data.playbin);
|
||||||
|
do {
|
||||||
|
msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND,
|
||||||
|
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION);
|
||||||
|
|
||||||
|
/* Parse message */
|
||||||
|
if (msg != NULL) {
|
||||||
|
handle_message (&data, msg);
|
||||||
|
} else {
|
||||||
|
/* We got no message, this means the timeout expired */
|
||||||
|
if (data.playing) {
|
||||||
|
gint64 current = -1;
|
||||||
|
|
||||||
|
/* Query the current position of the stream */
|
||||||
|
if (!gst_element_query_position (data.playbin, GST_FORMAT_TIME, ¤t)) {
|
||||||
|
g_printerr ("Could not query current position.\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If we didn't know it yet, query the stream duration */
|
||||||
|
if (!GST_CLOCK_TIME_IS_VALID (data.duration)) {
|
||||||
|
if (!gst_element_query_duration (data.playbin, GST_FORMAT_TIME, &data.duration)) {
|
||||||
|
g_printerr ("Could not query current duration.\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Print current position and total duration */
|
||||||
|
g_print ("Position %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
|
||||||
|
GST_TIME_ARGS (current), GST_TIME_ARGS (data.duration));
|
||||||
|
|
||||||
|
/* If seeking is enabled, we have not done it yet, and the time is right, seek */
|
||||||
|
if (data.seek_enabled && !data.seek_done && current > 10 * GST_SECOND) {
|
||||||
|
g_print ("\nReached 10s, performing seek...\n");
|
||||||
|
gst_element_seek_simple (data.playbin, GST_FORMAT_TIME,
|
||||||
|
GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, 30 * GST_SECOND);
|
||||||
|
data.seek_done = TRUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} while (!data.terminate);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (data.playbin, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data.playbin);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void handle_message (CustomData *data, GstMessage *msg) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
|
||||||
|
switch (GST_MESSAGE_TYPE (msg)) {
|
||||||
|
case GST_MESSAGE_ERROR:
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
data->terminate = TRUE;
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_EOS:
|
||||||
|
g_print ("End-Of-Stream reached.\n");
|
||||||
|
data->terminate = TRUE;
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_DURATION:
|
||||||
|
/* The duration has changed, mark the current one as invalid */
|
||||||
|
data->duration = GST_CLOCK_TIME_NONE;
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_STATE_CHANGED: {
|
||||||
|
GstState old_state, new_state, pending_state;
|
||||||
|
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
|
||||||
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
|
||||||
|
g_print ("Pipeline state changed from %s to %s:\n",
|
||||||
|
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
|
||||||
|
|
||||||
|
/* Remember whether we are in the PLAYING state or not */
|
||||||
|
data->playing = (new_state == GST_STATE_PLAYING);
|
||||||
|
|
||||||
|
if (data->playing) {
|
||||||
|
/* We just moved to PLAYING. Check if seeking is possible */
|
||||||
|
GstQuery *query;
|
||||||
|
gint64 start, end;
|
||||||
|
query = gst_query_new_seeking (GST_FORMAT_TIME);
|
||||||
|
if (gst_element_query (data->playbin, query)) {
|
||||||
|
gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end);
|
||||||
|
if (data->seek_enabled) {
|
||||||
|
g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",
|
||||||
|
GST_TIME_ARGS (start), GST_TIME_ARGS (end));
|
||||||
|
} else {
|
||||||
|
g_print ("Seeking is DISABLED for this stream.\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
g_printerr ("Seeking query failed.");
|
||||||
|
}
|
||||||
|
gst_query_unref (query);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} break;
|
||||||
|
default:
|
||||||
|
/* We should not reach here */
|
||||||
|
g_printerr ("Unexpected message received.\n");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
gst_message_unref (msg);
|
||||||
|
}
|
380
tutorials/basic-tutorial-5.c
Normal file
|
@ -0,0 +1,380 @@
|
||||||
|
#include <string.h>
|
||||||
|
|
||||||
|
#include <gtk/gtk.h>
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <gst/video/videooverlay.h>
|
||||||
|
|
||||||
|
#include <gdk/gdk.h>
|
||||||
|
#if defined (GDK_WINDOWING_X11)
|
||||||
|
#include <gdk/gdkx.h>
|
||||||
|
#elif defined (GDK_WINDOWING_WIN32)
|
||||||
|
#include <gdk/gdkwin32.h>
|
||||||
|
#elif defined (GDK_WINDOWING_QUARTZ)
|
||||||
|
#include <gdk/gdkquartz.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it around */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
GstElement *playbin; /* Our one and only pipeline */
|
||||||
|
|
||||||
|
GtkWidget *slider; /* Slider widget to keep track of current position */
|
||||||
|
GtkWidget *streams_list; /* Text widget to display info about the streams */
|
||||||
|
gulong slider_update_signal_id; /* Signal ID for the slider update signal */
|
||||||
|
|
||||||
|
GstState state; /* Current state of the pipeline */
|
||||||
|
gint64 duration; /* Duration of the clip, in nanoseconds */
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* This function is called when the GUI toolkit creates the physical window that will hold the video.
|
||||||
|
* At this point we can retrieve its handler (which has a different meaning depending on the windowing system)
|
||||||
|
* and pass it to GStreamer through the XOverlay interface. */
|
||||||
|
static void realize_cb (GtkWidget *widget, CustomData *data) {
|
||||||
|
GdkWindow *window = gtk_widget_get_window (widget);
|
||||||
|
guintptr window_handle;
|
||||||
|
|
||||||
|
if (!gdk_window_ensure_native (window))
|
||||||
|
g_error ("Couldn't create native window needed for GstXOverlay!");
|
||||||
|
|
||||||
|
/* Retrieve window handler from GDK */
|
||||||
|
#if defined (GDK_WINDOWING_WIN32)
|
||||||
|
window_handle = (guintptr)GDK_WINDOW_HWND (window);
|
||||||
|
#elif defined (GDK_WINDOWING_QUARTZ)
|
||||||
|
window_handle = gdk_quartz_window_get_nsview (window);
|
||||||
|
#elif defined (GDK_WINDOWING_X11)
|
||||||
|
window_handle = GDK_WINDOW_XID (window);
|
||||||
|
#endif
|
||||||
|
/* Pass it to playbin, which implements XOverlay and will forward it to the video sink */
|
||||||
|
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->playbin), window_handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when the PLAY button is clicked */
|
||||||
|
static void play_cb (GtkButton *button, CustomData *data) {
|
||||||
|
gst_element_set_state (data->playbin, GST_STATE_PLAYING);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when the PAUSE button is clicked */
|
||||||
|
static void pause_cb (GtkButton *button, CustomData *data) {
|
||||||
|
gst_element_set_state (data->playbin, GST_STATE_PAUSED);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when the STOP button is clicked */
|
||||||
|
static void stop_cb (GtkButton *button, CustomData *data) {
|
||||||
|
gst_element_set_state (data->playbin, GST_STATE_READY);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when the main window is closed */
|
||||||
|
static void delete_event_cb (GtkWidget *widget, GdkEvent *event, CustomData *data) {
|
||||||
|
stop_cb (NULL, data);
|
||||||
|
gtk_main_quit ();
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called everytime the video window needs to be redrawn (due to damage/exposure,
|
||||||
|
* rescaling, etc). GStreamer takes care of this in the PAUSED and PLAYING states, otherwise,
|
||||||
|
* we simply draw a black rectangle to avoid garbage showing up. */
|
||||||
|
static gboolean draw_cb (GtkWidget *widget, cairo_t *cr, CustomData *data) {
|
||||||
|
if (data->state < GST_STATE_PAUSED) {
|
||||||
|
GtkAllocation allocation;
|
||||||
|
|
||||||
|
/* Cairo is a 2D graphics library which we use here to clean the video window.
|
||||||
|
* It is used by GStreamer for other reasons, so it will always be available to us. */
|
||||||
|
gtk_widget_get_allocation (widget, &allocation);
|
||||||
|
cairo_set_source_rgb (cr, 0, 0, 0);
|
||||||
|
cairo_rectangle (cr, 0, 0, allocation.width, allocation.height);
|
||||||
|
cairo_fill (cr);
|
||||||
|
}
|
||||||
|
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when the slider changes its position. We perform a seek to the
|
||||||
|
* new position here. */
|
||||||
|
static void slider_cb (GtkRange *range, CustomData *data) {
|
||||||
|
gdouble value = gtk_range_get_value (GTK_RANGE (data->slider));
|
||||||
|
gst_element_seek_simple (data->playbin, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,
|
||||||
|
(gint64)(value * GST_SECOND));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This creates all the GTK+ widgets that compose our application, and registers the callbacks */
|
||||||
|
static void create_ui (CustomData *data) {
|
||||||
|
GtkWidget *main_window; /* The uppermost window, containing all other windows */
|
||||||
|
GtkWidget *video_window; /* The drawing area where the video will be shown */
|
||||||
|
GtkWidget *main_box; /* VBox to hold main_hbox and the controls */
|
||||||
|
GtkWidget *main_hbox; /* HBox to hold the video_window and the stream info text widget */
|
||||||
|
GtkWidget *controls; /* HBox to hold the buttons and the slider */
|
||||||
|
GtkWidget *play_button, *pause_button, *stop_button; /* Buttons */
|
||||||
|
|
||||||
|
main_window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
|
||||||
|
g_signal_connect (G_OBJECT (main_window), "delete-event", G_CALLBACK (delete_event_cb), data);
|
||||||
|
|
||||||
|
video_window = gtk_drawing_area_new ();
|
||||||
|
gtk_widget_set_double_buffered (video_window, FALSE);
|
||||||
|
g_signal_connect (video_window, "realize", G_CALLBACK (realize_cb), data);
|
||||||
|
g_signal_connect (video_window, "draw", G_CALLBACK (draw_cb), data);
|
||||||
|
|
||||||
|
play_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PLAY);
|
||||||
|
g_signal_connect (G_OBJECT (play_button), "clicked", G_CALLBACK (play_cb), data);
|
||||||
|
|
||||||
|
pause_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PAUSE);
|
||||||
|
g_signal_connect (G_OBJECT (pause_button), "clicked", G_CALLBACK (pause_cb), data);
|
||||||
|
|
||||||
|
stop_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_STOP);
|
||||||
|
g_signal_connect (G_OBJECT (stop_button), "clicked", G_CALLBACK (stop_cb), data);
|
||||||
|
|
||||||
|
data->slider = gtk_scale_new_with_range (GTK_ORIENTATION_HORIZONTAL, 0, 100, 1);
|
||||||
|
gtk_scale_set_draw_value (GTK_SCALE (data->slider), 0);
|
||||||
|
data->slider_update_signal_id = g_signal_connect (G_OBJECT (data->slider), "value-changed", G_CALLBACK (slider_cb), data);
|
||||||
|
|
||||||
|
data->streams_list = gtk_text_view_new ();
|
||||||
|
gtk_text_view_set_editable (GTK_TEXT_VIEW (data->streams_list), FALSE);
|
||||||
|
|
||||||
|
controls = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0);
|
||||||
|
gtk_box_pack_start (GTK_BOX (controls), play_button, FALSE, FALSE, 2);
|
||||||
|
gtk_box_pack_start (GTK_BOX (controls), pause_button, FALSE, FALSE, 2);
|
||||||
|
gtk_box_pack_start (GTK_BOX (controls), stop_button, FALSE, FALSE, 2);
|
||||||
|
gtk_box_pack_start (GTK_BOX (controls), data->slider, TRUE, TRUE, 2);
|
||||||
|
|
||||||
|
main_hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0);
|
||||||
|
gtk_box_pack_start (GTK_BOX (main_hbox), video_window, TRUE, TRUE, 0);
|
||||||
|
gtk_box_pack_start (GTK_BOX (main_hbox), data->streams_list, FALSE, FALSE, 2);
|
||||||
|
|
||||||
|
main_box = gtk_box_new (GTK_ORIENTATION_VERTICAL, 0);
|
||||||
|
gtk_box_pack_start (GTK_BOX (main_box), main_hbox, TRUE, TRUE, 0);
|
||||||
|
gtk_box_pack_start (GTK_BOX (main_box), controls, FALSE, FALSE, 0);
|
||||||
|
gtk_container_add (GTK_CONTAINER (main_window), main_box);
|
||||||
|
gtk_window_set_default_size (GTK_WINDOW (main_window), 640, 480);
|
||||||
|
|
||||||
|
gtk_widget_show_all (main_window);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called periodically to refresh the GUI */
|
||||||
|
static gboolean refresh_ui (CustomData *data) {
|
||||||
|
gint64 current = -1;
|
||||||
|
|
||||||
|
/* We do not want to update anything unless we are in the PAUSED or PLAYING states */
|
||||||
|
if (data->state < GST_STATE_PAUSED)
|
||||||
|
return TRUE;
|
||||||
|
|
||||||
|
/* If we didn't know it yet, query the stream duration */
|
||||||
|
if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
|
||||||
|
if (!gst_element_query_duration (data->playbin, GST_FORMAT_TIME, &data->duration)) {
|
||||||
|
g_printerr ("Could not query current duration.\n");
|
||||||
|
} else {
|
||||||
|
/* Set the range of the slider to the clip duration, in SECONDS */
|
||||||
|
gtk_range_set_range (GTK_RANGE (data->slider), 0, (gdouble)data->duration / GST_SECOND);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (gst_element_query_position (data->playbin, GST_FORMAT_TIME, ¤t)) {
|
||||||
|
/* Block the "value-changed" signal, so the slider_cb function is not called
|
||||||
|
* (which would trigger a seek the user has not requested) */
|
||||||
|
g_signal_handler_block (data->slider, data->slider_update_signal_id);
|
||||||
|
/* Set the position of the slider to the current pipeline positoin, in SECONDS */
|
||||||
|
gtk_range_set_value (GTK_RANGE (data->slider), (gdouble)current / GST_SECOND);
|
||||||
|
/* Re-enable the signal */
|
||||||
|
g_signal_handler_unblock (data->slider, data->slider_update_signal_id);
|
||||||
|
}
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when new metadata is discovered in the stream */
|
||||||
|
static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
|
||||||
|
/* We are possibly in a GStreamer working thread, so we notify the main
|
||||||
|
* thread of this event through a message in the bus */
|
||||||
|
gst_element_post_message (playbin,
|
||||||
|
gst_message_new_application (GST_OBJECT (playbin),
|
||||||
|
gst_structure_new_empty ("tags-changed")));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when an error message is posted on the bus */
|
||||||
|
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
|
||||||
|
/* Print error details on the screen */
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
|
||||||
|
/* Set the pipeline to READY (which stops playback) */
|
||||||
|
gst_element_set_state (data->playbin, GST_STATE_READY);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when an End-Of-Stream message is posted on the bus.
|
||||||
|
* We just set the pipeline to READY (which stops playback) */
|
||||||
|
static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
g_print ("End-Of-Stream reached.\n");
|
||||||
|
gst_element_set_state (data->playbin, GST_STATE_READY);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when the pipeline changes states. We use it to
|
||||||
|
* keep track of the current state. */
|
||||||
|
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GstState old_state, new_state, pending_state;
|
||||||
|
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
|
||||||
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
|
||||||
|
data->state = new_state;
|
||||||
|
g_print ("State set to %s\n", gst_element_state_get_name (new_state));
|
||||||
|
if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED) {
|
||||||
|
/* For extra responsiveness, we refresh the GUI as soon as we reach the PAUSED state */
|
||||||
|
refresh_ui (data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Extract metadata from all the streams and write it to the text widget in the GUI */
|
||||||
|
static void analyze_streams (CustomData *data) {
|
||||||
|
gint i;
|
||||||
|
GstTagList *tags;
|
||||||
|
gchar *str, *total_str;
|
||||||
|
guint rate;
|
||||||
|
gint n_video, n_audio, n_text;
|
||||||
|
GtkTextBuffer *text;
|
||||||
|
|
||||||
|
/* Clean current contents of the widget */
|
||||||
|
text = gtk_text_view_get_buffer (GTK_TEXT_VIEW (data->streams_list));
|
||||||
|
gtk_text_buffer_set_text (text, "", -1);
|
||||||
|
|
||||||
|
/* Read some properties */
|
||||||
|
g_object_get (data->playbin, "n-video", &n_video, NULL);
|
||||||
|
g_object_get (data->playbin, "n-audio", &n_audio, NULL);
|
||||||
|
g_object_get (data->playbin, "n-text", &n_text, NULL);
|
||||||
|
|
||||||
|
for (i = 0; i < n_video; i++) {
|
||||||
|
tags = NULL;
|
||||||
|
/* Retrieve the stream's video tags */
|
||||||
|
g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags);
|
||||||
|
if (tags) {
|
||||||
|
total_str = g_strdup_printf ("video stream %d:\n", i);
|
||||||
|
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
|
||||||
|
g_free (total_str);
|
||||||
|
gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str);
|
||||||
|
total_str = g_strdup_printf (" codec: %s\n", str ? str : "unknown");
|
||||||
|
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
|
||||||
|
g_free (total_str);
|
||||||
|
g_free (str);
|
||||||
|
gst_tag_list_free (tags);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 0; i < n_audio; i++) {
|
||||||
|
tags = NULL;
|
||||||
|
/* Retrieve the stream's audio tags */
|
||||||
|
g_signal_emit_by_name (data->playbin, "get-audio-tags", i, &tags);
|
||||||
|
if (tags) {
|
||||||
|
total_str = g_strdup_printf ("\naudio stream %d:\n", i);
|
||||||
|
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
|
||||||
|
g_free (total_str);
|
||||||
|
if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) {
|
||||||
|
total_str = g_strdup_printf (" codec: %s\n", str);
|
||||||
|
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
|
||||||
|
g_free (total_str);
|
||||||
|
g_free (str);
|
||||||
|
}
|
||||||
|
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
|
||||||
|
total_str = g_strdup_printf (" language: %s\n", str);
|
||||||
|
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
|
||||||
|
g_free (total_str);
|
||||||
|
g_free (str);
|
||||||
|
}
|
||||||
|
if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) {
|
||||||
|
total_str = g_strdup_printf (" bitrate: %d\n", rate);
|
||||||
|
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
|
||||||
|
g_free (total_str);
|
||||||
|
}
|
||||||
|
gst_tag_list_free (tags);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 0; i < n_text; i++) {
|
||||||
|
tags = NULL;
|
||||||
|
/* Retrieve the stream's subtitle tags */
|
||||||
|
g_signal_emit_by_name (data->playbin, "get-text-tags", i, &tags);
|
||||||
|
if (tags) {
|
||||||
|
total_str = g_strdup_printf ("\nsubtitle stream %d:\n", i);
|
||||||
|
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
|
||||||
|
g_free (total_str);
|
||||||
|
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
|
||||||
|
total_str = g_strdup_printf (" language: %s\n", str);
|
||||||
|
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
|
||||||
|
g_free (total_str);
|
||||||
|
g_free (str);
|
||||||
|
}
|
||||||
|
gst_tag_list_free (tags);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when an "application" message is posted on the bus.
|
||||||
|
* Here we retrieve the message posted by the tags_cb callback */
|
||||||
|
static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
if (g_strcmp0 (gst_structure_get_name (gst_message_get_structure (msg)), "tags-changed") == 0) {
|
||||||
|
/* If the message is the "tags-changed" (only one we are currently issuing), update
|
||||||
|
* the stream info GUI */
|
||||||
|
analyze_streams (data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
CustomData data;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
GstBus *bus;
|
||||||
|
|
||||||
|
/* Initialize GTK */
|
||||||
|
gtk_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Initialize our data structure */
|
||||||
|
memset (&data, 0, sizeof (data));
|
||||||
|
data.duration = GST_CLOCK_TIME_NONE;
|
||||||
|
|
||||||
|
/* Create the elements */
|
||||||
|
data.playbin = gst_element_factory_make ("playbin", "playbin");
|
||||||
|
|
||||||
|
if (!data.playbin) {
|
||||||
|
g_printerr ("Not all elements could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set the URI to play */
|
||||||
|
g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
|
||||||
|
|
||||||
|
/* Connect to interesting signals in playbin */
|
||||||
|
g_signal_connect (G_OBJECT (data.playbin), "video-tags-changed", (GCallback) tags_cb, &data);
|
||||||
|
g_signal_connect (G_OBJECT (data.playbin), "audio-tags-changed", (GCallback) tags_cb, &data);
|
||||||
|
g_signal_connect (G_OBJECT (data.playbin), "text-tags-changed", (GCallback) tags_cb, &data);
|
||||||
|
|
||||||
|
/* Create the GUI */
|
||||||
|
create_ui (&data);
|
||||||
|
|
||||||
|
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
|
||||||
|
bus = gst_element_get_bus (data.playbin);
|
||||||
|
gst_bus_add_signal_watch (bus);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, &data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, &data);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::application", (GCallback)application_cb, &data);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (data.playbin);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Register a function that GLib will call every second */
|
||||||
|
g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);
|
||||||
|
|
||||||
|
/* Start the GTK main loop. We will not regain control until gtk_main_quit is called. */
|
||||||
|
gtk_main ();
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
gst_element_set_state (data.playbin, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data.playbin);
|
||||||
|
return 0;
|
||||||
|
}
|
207
tutorials/basic-tutorial-6.c
Normal file
|
@ -0,0 +1,207 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
/* Functions below print the Capabilities in a human-friendly format */
|
||||||
|
static gboolean print_field (GQuark field, const GValue * value, gpointer pfx) {
|
||||||
|
gchar *str = gst_value_serialize (value);
|
||||||
|
|
||||||
|
g_print ("%s %15s: %s\n", (gchar *) pfx, g_quark_to_string (field), str);
|
||||||
|
g_free (str);
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void print_caps (const GstCaps * caps, const gchar * pfx) {
|
||||||
|
guint i;
|
||||||
|
|
||||||
|
g_return_if_fail (caps != NULL);
|
||||||
|
|
||||||
|
if (gst_caps_is_any (caps)) {
|
||||||
|
g_print ("%sANY\n", pfx);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (gst_caps_is_empty (caps)) {
|
||||||
|
g_print ("%sEMPTY\n", pfx);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 0; i < gst_caps_get_size (caps); i++) {
|
||||||
|
GstStructure *structure = gst_caps_get_structure (caps, i);
|
||||||
|
|
||||||
|
g_print ("%s%s\n", pfx, gst_structure_get_name (structure));
|
||||||
|
gst_structure_foreach (structure, print_field, (gpointer) pfx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Prints information about a Pad Template, including its Capabilities */
|
||||||
|
static void print_pad_templates_information (GstElementFactory * factory) {
|
||||||
|
const GList *pads;
|
||||||
|
GstStaticPadTemplate *padtemplate;
|
||||||
|
|
||||||
|
g_print ("Pad Templates for %s:\n", gst_element_factory_get_longname (factory));
|
||||||
|
if (!gst_element_factory_get_num_pad_templates (factory)) {
|
||||||
|
g_print (" none\n");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
pads = gst_element_factory_get_static_pad_templates (factory);
|
||||||
|
while (pads) {
|
||||||
|
padtemplate = pads->data;
|
||||||
|
pads = g_list_next (pads);
|
||||||
|
|
||||||
|
if (padtemplate->direction == GST_PAD_SRC)
|
||||||
|
g_print (" SRC template: '%s'\n", padtemplate->name_template);
|
||||||
|
else if (padtemplate->direction == GST_PAD_SINK)
|
||||||
|
g_print (" SINK template: '%s'\n", padtemplate->name_template);
|
||||||
|
else
|
||||||
|
g_print (" UNKNOWN!!! template: '%s'\n", padtemplate->name_template);
|
||||||
|
|
||||||
|
if (padtemplate->presence == GST_PAD_ALWAYS)
|
||||||
|
g_print (" Availability: Always\n");
|
||||||
|
else if (padtemplate->presence == GST_PAD_SOMETIMES)
|
||||||
|
g_print (" Availability: Sometimes\n");
|
||||||
|
else if (padtemplate->presence == GST_PAD_REQUEST) {
|
||||||
|
g_print (" Availability: On request\n");
|
||||||
|
} else
|
||||||
|
g_print (" Availability: UNKNOWN!!!\n");
|
||||||
|
|
||||||
|
if (padtemplate->static_caps.string) {
|
||||||
|
GstCaps *caps;
|
||||||
|
|
||||||
|
g_print (" Capabilities:\n");
|
||||||
|
caps = gst_static_caps_get (&padtemplate->static_caps);
|
||||||
|
print_caps (caps, " ");
|
||||||
|
gst_caps_unref (caps);
|
||||||
|
}
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Shows the CURRENT capabilities of the requested pad in the given element */
|
||||||
|
static void print_pad_capabilities (GstElement *element, gchar *pad_name) {
|
||||||
|
GstPad *pad = NULL;
|
||||||
|
GstCaps *caps = NULL;
|
||||||
|
|
||||||
|
/* Retrieve pad */
|
||||||
|
pad = gst_element_get_static_pad (element, pad_name);
|
||||||
|
if (!pad) {
|
||||||
|
g_printerr ("Could not retrieve pad '%s'\n", pad_name);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Retrieve negotiated caps (or acceptable caps if negotiation is not finished yet) */
|
||||||
|
caps = gst_pad_get_current_caps (pad);
|
||||||
|
if (!caps)
|
||||||
|
caps = gst_pad_query_caps (pad, NULL);
|
||||||
|
|
||||||
|
/* Print and free */
|
||||||
|
g_print ("Caps for the %s pad:\n", pad_name);
|
||||||
|
print_caps (caps, " ");
|
||||||
|
gst_caps_unref (caps);
|
||||||
|
gst_object_unref (pad);
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
GstElement *pipeline, *source, *sink;
|
||||||
|
GstElementFactory *source_factory, *sink_factory;
|
||||||
|
GstBus *bus;
|
||||||
|
GstMessage *msg;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
gboolean terminate = FALSE;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Create the element factories */
|
||||||
|
source_factory = gst_element_factory_find ("audiotestsrc");
|
||||||
|
sink_factory = gst_element_factory_find ("autoaudiosink");
|
||||||
|
if (!source_factory || !sink_factory) {
|
||||||
|
g_printerr ("Not all element factories could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Print information about the pad templates of these factories */
|
||||||
|
print_pad_templates_information (source_factory);
|
||||||
|
print_pad_templates_information (sink_factory);
|
||||||
|
|
||||||
|
/* Ask the factories to instantiate actual elements */
|
||||||
|
source = gst_element_factory_create (source_factory, "source");
|
||||||
|
sink = gst_element_factory_create (sink_factory, "sink");
|
||||||
|
|
||||||
|
/* Create the empty pipeline */
|
||||||
|
pipeline = gst_pipeline_new ("test-pipeline");
|
||||||
|
|
||||||
|
if (!pipeline || !source || !sink) {
|
||||||
|
g_printerr ("Not all elements could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Build the pipeline */
|
||||||
|
gst_bin_add_many (GST_BIN (pipeline), source, sink, NULL);
|
||||||
|
if (gst_element_link (source, sink) != TRUE) {
|
||||||
|
g_printerr ("Elements could not be linked.\n");
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Print initial negotiated caps (in NULL state) */
|
||||||
|
g_print ("In NULL state:\n");
|
||||||
|
print_pad_capabilities (sink, "sink");
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state (check the bus for error messages).\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Wait until error, EOS or State Change */
|
||||||
|
bus = gst_element_get_bus (pipeline);
|
||||||
|
do {
|
||||||
|
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS |
|
||||||
|
GST_MESSAGE_STATE_CHANGED);
|
||||||
|
|
||||||
|
/* Parse message */
|
||||||
|
if (msg != NULL) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
|
||||||
|
switch (GST_MESSAGE_TYPE (msg)) {
|
||||||
|
case GST_MESSAGE_ERROR:
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
terminate = TRUE;
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_EOS:
|
||||||
|
g_print ("End-Of-Stream reached.\n");
|
||||||
|
terminate = TRUE;
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_STATE_CHANGED:
|
||||||
|
/* We are only interested in state-changed messages from the pipeline */
|
||||||
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (pipeline)) {
|
||||||
|
GstState old_state, new_state, pending_state;
|
||||||
|
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
|
||||||
|
g_print ("\nPipeline state changed from %s to %s:\n",
|
||||||
|
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
|
||||||
|
/* Print the current capabilities of the sink element */
|
||||||
|
print_pad_capabilities (sink, "sink");
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
/* We should not reach here because we only asked for ERRORs, EOS and STATE_CHANGED */
|
||||||
|
g_printerr ("Unexpected message received.\n");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
gst_message_unref (msg);
|
||||||
|
}
|
||||||
|
} while (!terminate);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
gst_object_unref (source_factory);
|
||||||
|
gst_object_unref (sink_factory);
|
||||||
|
return 0;
|
||||||
|
}
|
89
tutorials/basic-tutorial-7.c
Normal file
|
@ -0,0 +1,89 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
GstElement *pipeline, *audio_source, *tee, *audio_queue, *audio_convert, *audio_resample, *audio_sink;
|
||||||
|
GstElement *video_queue, *visual, *video_convert, *video_sink;
|
||||||
|
GstBus *bus;
|
||||||
|
GstMessage *msg;
|
||||||
|
GstPadTemplate *tee_src_pad_template;
|
||||||
|
GstPad *tee_audio_pad, *tee_video_pad;
|
||||||
|
GstPad *queue_audio_pad, *queue_video_pad;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Create the elements */
|
||||||
|
audio_source = gst_element_factory_make ("audiotestsrc", "audio_source");
|
||||||
|
tee = gst_element_factory_make ("tee", "tee");
|
||||||
|
audio_queue = gst_element_factory_make ("queue", "audio_queue");
|
||||||
|
audio_convert = gst_element_factory_make ("audioconvert", "audio_convert");
|
||||||
|
audio_resample = gst_element_factory_make ("audioresample", "audio_resample");
|
||||||
|
audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
|
||||||
|
video_queue = gst_element_factory_make ("queue", "video_queue");
|
||||||
|
visual = gst_element_factory_make ("wavescope", "visual");
|
||||||
|
video_convert = gst_element_factory_make ("videoconvert", "video_convert");
|
||||||
|
video_sink = gst_element_factory_make ("autovideosink", "video_sink");
|
||||||
|
|
||||||
|
/* Create the empty pipeline */
|
||||||
|
pipeline = gst_pipeline_new ("test-pipeline");
|
||||||
|
|
||||||
|
if (!pipeline || !audio_source || !tee || !audio_queue || !audio_convert || !audio_resample || !audio_sink ||
|
||||||
|
!video_queue || !visual || !video_convert || !video_sink) {
|
||||||
|
g_printerr ("Not all elements could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Configure elements */
|
||||||
|
g_object_set (audio_source, "freq", 215.0f, NULL);
|
||||||
|
g_object_set (visual, "shader", 0, "style", 1, NULL);
|
||||||
|
|
||||||
|
/* Link all elements that can be automatically linked because they have "Always" pads */
|
||||||
|
gst_bin_add_many (GST_BIN (pipeline), audio_source, tee, audio_queue, audio_convert, audio_resample, audio_sink,
|
||||||
|
video_queue, visual, video_convert, video_sink, NULL);
|
||||||
|
if (gst_element_link_many (audio_source, tee, NULL) != TRUE ||
|
||||||
|
gst_element_link_many (audio_queue, audio_convert, audio_resample, audio_sink, NULL) != TRUE ||
|
||||||
|
gst_element_link_many (video_queue, visual, video_convert, video_sink, NULL) != TRUE) {
|
||||||
|
g_printerr ("Elements could not be linked.\n");
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Manually link the Tee, which has "Request" pads */
|
||||||
|
tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (tee), "src_%u");
|
||||||
|
tee_audio_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL);
|
||||||
|
g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad));
|
||||||
|
queue_audio_pad = gst_element_get_static_pad (audio_queue, "sink");
|
||||||
|
tee_video_pad = gst_element_request_pad (tee, tee_src_pad_template, NULL, NULL);
|
||||||
|
g_print ("Obtained request pad %s for video branch.\n", gst_pad_get_name (tee_video_pad));
|
||||||
|
queue_video_pad = gst_element_get_static_pad (video_queue, "sink");
|
||||||
|
if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK ||
|
||||||
|
gst_pad_link (tee_video_pad, queue_video_pad) != GST_PAD_LINK_OK) {
|
||||||
|
g_printerr ("Tee could not be linked.\n");
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
gst_object_unref (queue_audio_pad);
|
||||||
|
gst_object_unref (queue_video_pad);
|
||||||
|
|
||||||
|
/* Start playing the pipeline */
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
|
|
||||||
|
/* Wait until error or EOS */
|
||||||
|
bus = gst_element_get_bus (pipeline);
|
||||||
|
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
|
||||||
|
|
||||||
|
/* Release the request pads from the Tee, and unref them */
|
||||||
|
gst_element_release_request_pad (tee, tee_audio_pad);
|
||||||
|
gst_element_release_request_pad (tee, tee_video_pad);
|
||||||
|
gst_object_unref (tee_audio_pad);
|
||||||
|
gst_object_unref (tee_video_pad);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
if (msg != NULL)
|
||||||
|
gst_message_unref (msg);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_NULL);
|
||||||
|
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
234
tutorials/basic-tutorial-8.c
Normal file
|
@ -0,0 +1,234 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <gst/audio/audio.h>
|
||||||
|
#include <string.h>
|
||||||
|
|
||||||
|
#define CHUNK_SIZE 1024 /* Amount of bytes we are sending in each buffer */
|
||||||
|
#define SAMPLE_RATE 44100 /* Samples per second we are sending */
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it to callbacks */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
GstElement *pipeline, *app_source, *tee, *audio_queue, *audio_convert1, *audio_resample, *audio_sink;
|
||||||
|
GstElement *video_queue, *audio_convert2, *visual, *video_convert, *video_sink;
|
||||||
|
GstElement *app_queue, *app_sink;
|
||||||
|
|
||||||
|
guint64 num_samples; /* Number of samples generated so far (for timestamp generation) */
|
||||||
|
gfloat a, b, c, d; /* For waveform generation */
|
||||||
|
|
||||||
|
guint sourceid; /* To control the GSource */
|
||||||
|
|
||||||
|
GMainLoop *main_loop; /* GLib's Main Loop */
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc.
|
||||||
|
* The idle handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
|
||||||
|
* and is removed when appsrc has enough data (enough-data signal).
|
||||||
|
*/
|
||||||
|
static gboolean push_data (CustomData *data) {
|
||||||
|
GstBuffer *buffer;
|
||||||
|
GstFlowReturn ret;
|
||||||
|
int i;
|
||||||
|
GstMapInfo map;
|
||||||
|
gint16 *raw;
|
||||||
|
gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
|
||||||
|
gfloat freq;
|
||||||
|
|
||||||
|
/* Create a new empty buffer */
|
||||||
|
buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);
|
||||||
|
|
||||||
|
/* Set its timestamp and duration */
|
||||||
|
GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE);
|
||||||
|
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, SAMPLE_RATE);
|
||||||
|
|
||||||
|
/* Generate some psychodelic waveforms */
|
||||||
|
gst_buffer_map (buffer, &map, GST_MAP_WRITE);
|
||||||
|
raw = (gint16 *)map.data;
|
||||||
|
data->c += data->d;
|
||||||
|
data->d -= data->c / 1000;
|
||||||
|
freq = 1100 + 1000 * data->d;
|
||||||
|
for (i = 0; i < num_samples; i++) {
|
||||||
|
data->a += data->b;
|
||||||
|
data->b -= data->a / freq;
|
||||||
|
raw[i] = (gint16)(500 * data->a);
|
||||||
|
}
|
||||||
|
gst_buffer_unmap (buffer, &map);
|
||||||
|
data->num_samples += num_samples;
|
||||||
|
|
||||||
|
/* Push the buffer into the appsrc */
|
||||||
|
g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
|
||||||
|
|
||||||
|
/* Free the buffer now that we are done with it */
|
||||||
|
gst_buffer_unref (buffer);
|
||||||
|
|
||||||
|
if (ret != GST_FLOW_OK) {
|
||||||
|
/* We got some error, stop sending data */
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This signal callback triggers when appsrc needs data. Here, we add an idle handler
|
||||||
|
* to the mainloop to start pushing data into the appsrc */
|
||||||
|
static void start_feed (GstElement *source, guint size, CustomData *data) {
|
||||||
|
if (data->sourceid == 0) {
|
||||||
|
g_print ("Start feeding\n");
|
||||||
|
data->sourceid = g_idle_add ((GSourceFunc) push_data, data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This callback triggers when appsrc has enough data and we can stop sending.
|
||||||
|
* We remove the idle handler from the mainloop */
|
||||||
|
static void stop_feed (GstElement *source, CustomData *data) {
|
||||||
|
if (data->sourceid != 0) {
|
||||||
|
g_print ("Stop feeding\n");
|
||||||
|
g_source_remove (data->sourceid);
|
||||||
|
data->sourceid = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* The appsink has received a buffer */
|
||||||
|
static void new_sample (GstElement *sink, CustomData *data) {
|
||||||
|
GstSample *sample;
|
||||||
|
|
||||||
|
/* Retrieve the buffer */
|
||||||
|
g_signal_emit_by_name (sink, "pull-sample", &sample);
|
||||||
|
if (sample) {
|
||||||
|
/* The only thing we do in this example is print a * to indicate a received buffer */
|
||||||
|
g_print ("*");
|
||||||
|
gst_sample_unref (sample);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when an error message is posted on the bus */
|
||||||
|
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
|
||||||
|
/* Print error details on the screen */
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
|
||||||
|
g_main_loop_quit (data->main_loop);
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
CustomData data;
|
||||||
|
GstPadTemplate *tee_src_pad_template;
|
||||||
|
GstPad *tee_audio_pad, *tee_video_pad, *tee_app_pad;
|
||||||
|
GstPad *queue_audio_pad, *queue_video_pad, *queue_app_pad;
|
||||||
|
GstAudioInfo info;
|
||||||
|
GstCaps *audio_caps;
|
||||||
|
GstBus *bus;
|
||||||
|
|
||||||
|
/* Initialize cumstom data structure */
|
||||||
|
memset (&data, 0, sizeof (data));
|
||||||
|
data.b = 1; /* For waveform generation */
|
||||||
|
data.d = 1;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Create the elements */
|
||||||
|
data.app_source = gst_element_factory_make ("appsrc", "audio_source");
|
||||||
|
data.tee = gst_element_factory_make ("tee", "tee");
|
||||||
|
data.audio_queue = gst_element_factory_make ("queue", "audio_queue");
|
||||||
|
data.audio_convert1 = gst_element_factory_make ("audioconvert", "audio_convert1");
|
||||||
|
data.audio_resample = gst_element_factory_make ("audioresample", "audio_resample");
|
||||||
|
data.audio_sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
|
||||||
|
data.video_queue = gst_element_factory_make ("queue", "video_queue");
|
||||||
|
data.audio_convert2 = gst_element_factory_make ("audioconvert", "audio_convert2");
|
||||||
|
data.visual = gst_element_factory_make ("wavescope", "visual");
|
||||||
|
data.video_convert = gst_element_factory_make ("videoconvert", "video_convert");
|
||||||
|
data.video_sink = gst_element_factory_make ("autovideosink", "video_sink");
|
||||||
|
data.app_queue = gst_element_factory_make ("queue", "app_queue");
|
||||||
|
data.app_sink = gst_element_factory_make ("appsink", "app_sink");
|
||||||
|
|
||||||
|
/* Create the empty pipeline */
|
||||||
|
data.pipeline = gst_pipeline_new ("test-pipeline");
|
||||||
|
|
||||||
|
if (!data.pipeline || !data.app_source || !data.tee || !data.audio_queue || !data.audio_convert1 ||
|
||||||
|
!data.audio_resample || !data.audio_sink || !data.video_queue || !data.audio_convert2 || !data.visual ||
|
||||||
|
!data.video_convert || !data.video_sink || !data.app_queue || !data.app_sink) {
|
||||||
|
g_printerr ("Not all elements could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Configure wavescope */
|
||||||
|
g_object_set (data.visual, "shader", 0, "style", 0, NULL);
|
||||||
|
|
||||||
|
/* Configure appsrc */
|
||||||
|
gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1, NULL);
|
||||||
|
audio_caps = gst_audio_info_to_caps (&info);
|
||||||
|
g_object_set (data.app_source, "caps", audio_caps, "format", GST_FORMAT_TIME, NULL);
|
||||||
|
g_signal_connect (data.app_source, "need-data", G_CALLBACK (start_feed), &data);
|
||||||
|
g_signal_connect (data.app_source, "enough-data", G_CALLBACK (stop_feed), &data);
|
||||||
|
|
||||||
|
/* Configure appsink */
|
||||||
|
g_object_set (data.app_sink, "emit-signals", TRUE, "caps", audio_caps, NULL);
|
||||||
|
g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data);
|
||||||
|
gst_caps_unref (audio_caps);
|
||||||
|
|
||||||
|
/* Link all elements that can be automatically linked because they have "Always" pads */
|
||||||
|
gst_bin_add_many (GST_BIN (data.pipeline), data.app_source, data.tee, data.audio_queue, data.audio_convert1, data.audio_resample,
|
||||||
|
data.audio_sink, data.video_queue, data.audio_convert2, data.visual, data.video_convert, data.video_sink, data.app_queue,
|
||||||
|
data.app_sink, NULL);
|
||||||
|
if (gst_element_link_many (data.app_source, data.tee, NULL) != TRUE ||
|
||||||
|
gst_element_link_many (data.audio_queue, data.audio_convert1, data.audio_resample, data.audio_sink, NULL) != TRUE ||
|
||||||
|
gst_element_link_many (data.video_queue, data.audio_convert2, data.visual, data.video_convert, data.video_sink, NULL) != TRUE ||
|
||||||
|
gst_element_link_many (data.app_queue, data.app_sink, NULL) != TRUE) {
|
||||||
|
g_printerr ("Elements could not be linked.\n");
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Manually link the Tee, which has "Request" pads */
|
||||||
|
tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (data.tee), "src_%u");
|
||||||
|
tee_audio_pad = gst_element_request_pad (data.tee, tee_src_pad_template, NULL, NULL);
|
||||||
|
g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad));
|
||||||
|
queue_audio_pad = gst_element_get_static_pad (data.audio_queue, "sink");
|
||||||
|
tee_video_pad = gst_element_request_pad (data.tee, tee_src_pad_template, NULL, NULL);
|
||||||
|
g_print ("Obtained request pad %s for video branch.\n", gst_pad_get_name (tee_video_pad));
|
||||||
|
queue_video_pad = gst_element_get_static_pad (data.video_queue, "sink");
|
||||||
|
tee_app_pad = gst_element_request_pad (data.tee, tee_src_pad_template, NULL, NULL);
|
||||||
|
g_print ("Obtained request pad %s for app branch.\n", gst_pad_get_name (tee_app_pad));
|
||||||
|
queue_app_pad = gst_element_get_static_pad (data.app_queue, "sink");
|
||||||
|
if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK ||
|
||||||
|
gst_pad_link (tee_video_pad, queue_video_pad) != GST_PAD_LINK_OK ||
|
||||||
|
gst_pad_link (tee_app_pad, queue_app_pad) != GST_PAD_LINK_OK) {
|
||||||
|
g_printerr ("Tee could not be linked\n");
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
gst_object_unref (queue_audio_pad);
|
||||||
|
gst_object_unref (queue_video_pad);
|
||||||
|
gst_object_unref (queue_app_pad);
|
||||||
|
|
||||||
|
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
|
||||||
|
bus = gst_element_get_bus (data.pipeline);
|
||||||
|
gst_bus_add_signal_watch (bus);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
|
||||||
|
/* Start playing the pipeline */
|
||||||
|
gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
|
||||||
|
|
||||||
|
/* Create a GLib Main Loop and set it to run */
|
||||||
|
data.main_loop = g_main_loop_new (NULL, FALSE);
|
||||||
|
g_main_loop_run (data.main_loop);
|
||||||
|
|
||||||
|
/* Release the request pads from the Tee, and unref them */
|
||||||
|
gst_element_release_request_pad (data.tee, tee_audio_pad);
|
||||||
|
gst_element_release_request_pad (data.tee, tee_video_pad);
|
||||||
|
gst_element_release_request_pad (data.tee, tee_app_pad);
|
||||||
|
gst_object_unref (tee_audio_pad);
|
||||||
|
gst_object_unref (tee_video_pad);
|
||||||
|
gst_object_unref (tee_app_pad);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
gst_element_set_state (data.pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
216
tutorials/basic-tutorial-9.c
Normal file
|
@ -0,0 +1,216 @@
|
||||||
|
#include <string.h>
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <gst/pbutils/pbutils.h>
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it around */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
GstDiscoverer *discoverer;
|
||||||
|
GMainLoop *loop;
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* Print a tag in a human-readable format (name: value) */
|
||||||
|
static void print_tag_foreach (const GstTagList *tags, const gchar *tag, gpointer user_data) {
|
||||||
|
GValue val = { 0, };
|
||||||
|
gchar *str;
|
||||||
|
gint depth = GPOINTER_TO_INT (user_data);
|
||||||
|
|
||||||
|
gst_tag_list_copy_value (&val, tags, tag);
|
||||||
|
|
||||||
|
if (G_VALUE_HOLDS_STRING (&val))
|
||||||
|
str = g_value_dup_string (&val);
|
||||||
|
else
|
||||||
|
str = gst_value_serialize (&val);
|
||||||
|
|
||||||
|
g_print ("%*s%s: %s\n", 2 * depth, " ", gst_tag_get_nick (tag), str);
|
||||||
|
g_free (str);
|
||||||
|
|
||||||
|
g_value_unset (&val);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Print information regarding a stream */
|
||||||
|
static void print_stream_info (GstDiscovererStreamInfo *info, gint depth) {
|
||||||
|
gchar *desc = NULL;
|
||||||
|
GstCaps *caps;
|
||||||
|
const GstTagList *tags;
|
||||||
|
|
||||||
|
caps = gst_discoverer_stream_info_get_caps (info);
|
||||||
|
|
||||||
|
if (caps) {
|
||||||
|
if (gst_caps_is_fixed (caps))
|
||||||
|
desc = gst_pb_utils_get_codec_description (caps);
|
||||||
|
else
|
||||||
|
desc = gst_caps_to_string (caps);
|
||||||
|
gst_caps_unref (caps);
|
||||||
|
}
|
||||||
|
|
||||||
|
g_print ("%*s%s: %s\n", 2 * depth, " ", gst_discoverer_stream_info_get_stream_type_nick (info), (desc ? desc : ""));
|
||||||
|
|
||||||
|
if (desc) {
|
||||||
|
g_free (desc);
|
||||||
|
desc = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
tags = gst_discoverer_stream_info_get_tags (info);
|
||||||
|
if (tags) {
|
||||||
|
g_print ("%*sTags:\n", 2 * (depth + 1), " ");
|
||||||
|
gst_tag_list_foreach (tags, print_tag_foreach, GINT_TO_POINTER (depth + 2));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Print information regarding a stream and its substreams, if any */
|
||||||
|
static void print_topology (GstDiscovererStreamInfo *info, gint depth) {
|
||||||
|
GstDiscovererStreamInfo *next;
|
||||||
|
|
||||||
|
if (!info)
|
||||||
|
return;
|
||||||
|
|
||||||
|
print_stream_info (info, depth);
|
||||||
|
|
||||||
|
next = gst_discoverer_stream_info_get_next (info);
|
||||||
|
if (next) {
|
||||||
|
print_topology (next, depth + 1);
|
||||||
|
gst_discoverer_stream_info_unref (next);
|
||||||
|
} else if (GST_IS_DISCOVERER_CONTAINER_INFO (info)) {
|
||||||
|
GList *tmp, *streams;
|
||||||
|
|
||||||
|
streams = gst_discoverer_container_info_get_streams (GST_DISCOVERER_CONTAINER_INFO (info));
|
||||||
|
for (tmp = streams; tmp; tmp = tmp->next) {
|
||||||
|
GstDiscovererStreamInfo *tmpinf = (GstDiscovererStreamInfo *) tmp->data;
|
||||||
|
print_topology (tmpinf, depth + 1);
|
||||||
|
}
|
||||||
|
gst_discoverer_stream_info_list_free (streams);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called every time the discoverer has information regarding
|
||||||
|
* one of the URIs we provided.*/
|
||||||
|
static void on_discovered_cb (GstDiscoverer *discoverer, GstDiscovererInfo *info, GError *err, CustomData *data) {
|
||||||
|
GstDiscovererResult result;
|
||||||
|
const gchar *uri;
|
||||||
|
const GstTagList *tags;
|
||||||
|
GstDiscovererStreamInfo *sinfo;
|
||||||
|
|
||||||
|
uri = gst_discoverer_info_get_uri (info);
|
||||||
|
result = gst_discoverer_info_get_result (info);
|
||||||
|
switch (result) {
|
||||||
|
case GST_DISCOVERER_URI_INVALID:
|
||||||
|
g_print ("Invalid URI '%s'\n", uri);
|
||||||
|
break;
|
||||||
|
case GST_DISCOVERER_ERROR:
|
||||||
|
g_print ("Discoverer error: %s\n", err->message);
|
||||||
|
break;
|
||||||
|
case GST_DISCOVERER_TIMEOUT:
|
||||||
|
g_print ("Timeout\n");
|
||||||
|
break;
|
||||||
|
case GST_DISCOVERER_BUSY:
|
||||||
|
g_print ("Busy\n");
|
||||||
|
break;
|
||||||
|
case GST_DISCOVERER_MISSING_PLUGINS:{
|
||||||
|
const GstStructure *s;
|
||||||
|
gchar *str;
|
||||||
|
|
||||||
|
s = gst_discoverer_info_get_misc (info);
|
||||||
|
str = gst_structure_to_string (s);
|
||||||
|
|
||||||
|
g_print ("Missing plugins: %s\n", str);
|
||||||
|
g_free (str);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case GST_DISCOVERER_OK:
|
||||||
|
g_print ("Discovered '%s'\n", uri);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result != GST_DISCOVERER_OK) {
|
||||||
|
g_printerr ("This URI cannot be played\n");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If we got no error, show the retrieved information */
|
||||||
|
|
||||||
|
g_print ("\nDuration: %" GST_TIME_FORMAT "\n", GST_TIME_ARGS (gst_discoverer_info_get_duration (info)));
|
||||||
|
|
||||||
|
tags = gst_discoverer_info_get_tags (info);
|
||||||
|
if (tags) {
|
||||||
|
g_print ("Tags:\n");
|
||||||
|
gst_tag_list_foreach (tags, print_tag_foreach, GINT_TO_POINTER (1));
|
||||||
|
}
|
||||||
|
|
||||||
|
g_print ("Seekable: %s\n", (gst_discoverer_info_get_seekable (info) ? "yes" : "no"));
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
|
||||||
|
sinfo = gst_discoverer_info_get_stream_info (info);
|
||||||
|
if (!sinfo)
|
||||||
|
return;
|
||||||
|
|
||||||
|
g_print ("Stream information:\n");
|
||||||
|
|
||||||
|
print_topology (sinfo, 1);
|
||||||
|
|
||||||
|
gst_discoverer_stream_info_unref (sinfo);
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when the discoverer has finished examining
|
||||||
|
* all the URIs we provided.*/
|
||||||
|
static void on_finished_cb (GstDiscoverer *discoverer, CustomData *data) {
|
||||||
|
g_print ("Finished discovering\n");
|
||||||
|
|
||||||
|
g_main_loop_quit (data->loop);
|
||||||
|
}
|
||||||
|
|
||||||
|
int main (int argc, char **argv) {
|
||||||
|
CustomData data;
|
||||||
|
GError *err = NULL;
|
||||||
|
gchar *uri = "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm";
|
||||||
|
|
||||||
|
/* if a URI was provided, use it instead of the default one */
|
||||||
|
if (argc > 1) {
|
||||||
|
uri = argv[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Initialize cumstom data structure */
|
||||||
|
memset (&data, 0, sizeof (data));
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
g_print ("Discovering '%s'\n", uri);
|
||||||
|
|
||||||
|
/* Instantiate the Discoverer */
|
||||||
|
data.discoverer = gst_discoverer_new (5 * GST_SECOND, &err);
|
||||||
|
if (!data.discoverer) {
|
||||||
|
g_print ("Error creating discoverer instance: %s\n", err->message);
|
||||||
|
g_clear_error (&err);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Connect to the interesting signals */
|
||||||
|
g_signal_connect (data.discoverer, "discovered", G_CALLBACK (on_discovered_cb), &data);
|
||||||
|
g_signal_connect (data.discoverer, "finished", G_CALLBACK (on_finished_cb), &data);
|
||||||
|
|
||||||
|
/* Start the discoverer process (nothing to do yet) */
|
||||||
|
gst_discoverer_start (data.discoverer);
|
||||||
|
|
||||||
|
/* Add a request to process asynchronously the URI passed through the command line */
|
||||||
|
if (!gst_discoverer_discover_uri_async (data.discoverer, uri)) {
|
||||||
|
g_print ("Failed to start discovering URI '%s'\n", uri);
|
||||||
|
g_object_unref (data.discoverer);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Create a GLib Main Loop and set it to run, so we can wait for the signals */
|
||||||
|
data.loop = g_main_loop_new (NULL, FALSE);
|
||||||
|
g_main_loop_run (data.loop);
|
||||||
|
|
||||||
|
/* Stop the discoverer process */
|
||||||
|
gst_discoverer_stop (data.discoverer);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
g_object_unref (data.discoverer);
|
||||||
|
g_main_loop_unref (data.loop);
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
68
tutorials/icons/gstreamer-logo-1.svg
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Generator: Adobe Illustrator 12.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 51448) -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:cc="http://creativecommons.org/ns#"
|
||||||
|
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
version="1.1"
|
||||||
|
id="Layer_1"
|
||||||
|
width="100"
|
||||||
|
height="100"
|
||||||
|
viewBox="0 0 100 99.999999"
|
||||||
|
overflow="visible"
|
||||||
|
enable-background="new 0 0 280.22 69.387"
|
||||||
|
xml:space="preserve"
|
||||||
|
inkscape:version="0.91 r13725"
|
||||||
|
sodipodi:docname="gstreamer-logo-1.svg"
|
||||||
|
style="overflow:visible"><metadata
|
||||||
|
id="metadata3365"><rdf:RDF><cc:Work
|
||||||
|
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
|
||||||
|
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs
|
||||||
|
id="defs3363" /><sodipodi:namedview
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#666666"
|
||||||
|
borderopacity="1"
|
||||||
|
objecttolerance="10"
|
||||||
|
gridtolerance="10"
|
||||||
|
guidetolerance="10"
|
||||||
|
inkscape:pageopacity="0"
|
||||||
|
inkscape:pageshadow="2"
|
||||||
|
inkscape:window-width="1366"
|
||||||
|
inkscape:window-height="704"
|
||||||
|
id="namedview3361"
|
||||||
|
showgrid="false"
|
||||||
|
inkscape:zoom="4"
|
||||||
|
inkscape:cx="47.543353"
|
||||||
|
inkscape:cy="50.202349"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="27"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="Layer_1" /><path
|
||||||
|
d="m 49.141,11.311999 c -5.409,0 -13.323,-4.407 -21.036,-4.407 -7.713,0 -12.32,4.407 -13.122,5.008 -0.801,0.601 -1.303,2.904 1.102,2.203 2.405,-0.701 4.407,-0.701 8.214,-0.701 3.807,0 12.922,5.209 22.138,5.209 9.216,0 16.928,-7.111 18.631,-9.616 1.703,-2.505 0.1,-3.306 -1.202,-2.704 -1.303,0.601 -9.716,5.008 -14.725,5.008 z"
|
||||||
|
id="path3355"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#ff3131" /><path
|
||||||
|
d="m 72.639,25.033999 c -5.683,0 -13.997,-4.406 -22.1,-4.406 -8.103,0 -12.944,4.406 -13.786,5.008 -0.842,0.602 -1.369,2.904 1.157,2.203 2.526,-0.701 4.63,-0.701 8.629,-0.701 3.999,0 13.576,5.209 23.257,5.209 9.681,0 17.785,-7.111 19.574,-9.615 1.789,-2.505 0.105,-3.307 -1.263,-2.705 -1.368,0.602 -10.206,5.007 -15.468,5.007 z"
|
||||||
|
id="path3357"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#319831" /><path
|
||||||
|
d="m 39.936,40.175999 c -5.893,0 -14.515,-4.761 -22.918,-4.761 -8.403,0 -13.424,4.761 -14.297,5.41 -0.873,0.649 -1.419,3.137 1.2,2.379 2.62,-0.757 4.802,-0.757 8.949,-0.757 4.147,0 14.079,5.626 24.12,5.626 10.04,0 18.443,-7.681 20.299,-10.386 1.856,-2.705 0.109,-3.57 -1.31,-2.921 -1.419,0.649 -10.586,5.41 -16.043,5.41 z"
|
||||||
|
id="path3359"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#3232cc" /><text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-style:italic;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:70px;line-height:125%;font-family:Optima;-inkscape-font-specification:'Optima Bold Italic';letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
x="48.124519"
|
||||||
|
y="90.172836"
|
||||||
|
id="text3375"
|
||||||
|
sodipodi:linespacing="125%"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan3377"
|
||||||
|
x="48.124519"
|
||||||
|
y="90.172836">1</tspan></text>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 3.4 KiB |
68
tutorials/icons/gstreamer-logo-2.svg
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Generator: Adobe Illustrator 12.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 51448) -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:cc="http://creativecommons.org/ns#"
|
||||||
|
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
version="1.1"
|
||||||
|
id="Layer_1"
|
||||||
|
width="100"
|
||||||
|
height="100"
|
||||||
|
viewBox="0 0 100 99.999999"
|
||||||
|
overflow="visible"
|
||||||
|
enable-background="new 0 0 280.22 69.387"
|
||||||
|
xml:space="preserve"
|
||||||
|
inkscape:version="0.91 r13725"
|
||||||
|
sodipodi:docname="gstreamer-logo-2.svg"
|
||||||
|
style="overflow:visible"><metadata
|
||||||
|
id="metadata3365"><rdf:RDF><cc:Work
|
||||||
|
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
|
||||||
|
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs
|
||||||
|
id="defs3363" /><sodipodi:namedview
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#666666"
|
||||||
|
borderopacity="1"
|
||||||
|
objecttolerance="10"
|
||||||
|
gridtolerance="10"
|
||||||
|
guidetolerance="10"
|
||||||
|
inkscape:pageopacity="0"
|
||||||
|
inkscape:pageshadow="2"
|
||||||
|
inkscape:window-width="1366"
|
||||||
|
inkscape:window-height="704"
|
||||||
|
id="namedview3361"
|
||||||
|
showgrid="false"
|
||||||
|
inkscape:zoom="4"
|
||||||
|
inkscape:cx="47.543353"
|
||||||
|
inkscape:cy="50.202349"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="27"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="Layer_1" /><path
|
||||||
|
d="m 49.141,11.311999 c -5.409,0 -13.323,-4.407 -21.036,-4.407 -7.713,0 -12.32,4.407 -13.122,5.008 -0.801,0.601 -1.303,2.904 1.102,2.203 2.405,-0.701 4.407,-0.701 8.214,-0.701 3.807,0 12.922,5.209 22.138,5.209 9.216,0 16.928,-7.111 18.631,-9.616 1.703,-2.505 0.1,-3.306 -1.202,-2.704 -1.303,0.601 -9.716,5.008 -14.725,5.008 z"
|
||||||
|
id="path3355"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#ff3131" /><path
|
||||||
|
d="m 72.639,25.033999 c -5.683,0 -13.997,-4.406 -22.1,-4.406 -8.103,0 -12.944,4.406 -13.786,5.008 -0.842,0.602 -1.369,2.904 1.157,2.203 2.526,-0.701 4.63,-0.701 8.629,-0.701 3.999,0 13.576,5.209 23.257,5.209 9.681,0 17.785,-7.111 19.574,-9.615 1.789,-2.505 0.105,-3.307 -1.263,-2.705 -1.368,0.602 -10.206,5.007 -15.468,5.007 z"
|
||||||
|
id="path3357"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#319831" /><path
|
||||||
|
d="m 39.936,40.175999 c -5.893,0 -14.515,-4.761 -22.918,-4.761 -8.403,0 -13.424,4.761 -14.297,5.41 -0.873,0.649 -1.419,3.137 1.2,2.379 2.62,-0.757 4.802,-0.757 8.949,-0.757 4.147,0 14.079,5.626 24.12,5.626 10.04,0 18.443,-7.681 20.299,-10.386 1.856,-2.705 0.109,-3.57 -1.31,-2.921 -1.419,0.649 -10.586,5.41 -16.043,5.41 z"
|
||||||
|
id="path3359"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#3232cc" /><text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-style:italic;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:70px;line-height:125%;font-family:Optima;-inkscape-font-specification:'Optima Bold Italic';letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
x="48.124519"
|
||||||
|
y="90.172836"
|
||||||
|
id="text3375"
|
||||||
|
sodipodi:linespacing="125%"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan3377"
|
||||||
|
x="48.124519"
|
||||||
|
y="90.172836">2</tspan></text>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 3.4 KiB |
68
tutorials/icons/gstreamer-logo-3.svg
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Generator: Adobe Illustrator 12.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 51448) -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:cc="http://creativecommons.org/ns#"
|
||||||
|
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
version="1.1"
|
||||||
|
id="Layer_1"
|
||||||
|
width="100"
|
||||||
|
height="100"
|
||||||
|
viewBox="0 0 100 99.999999"
|
||||||
|
overflow="visible"
|
||||||
|
enable-background="new 0 0 280.22 69.387"
|
||||||
|
xml:space="preserve"
|
||||||
|
inkscape:version="0.91 r13725"
|
||||||
|
sodipodi:docname="gstreamer-logo-3.svg"
|
||||||
|
style="overflow:visible"><metadata
|
||||||
|
id="metadata3365"><rdf:RDF><cc:Work
|
||||||
|
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
|
||||||
|
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs
|
||||||
|
id="defs3363" /><sodipodi:namedview
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#666666"
|
||||||
|
borderopacity="1"
|
||||||
|
objecttolerance="10"
|
||||||
|
gridtolerance="10"
|
||||||
|
guidetolerance="10"
|
||||||
|
inkscape:pageopacity="0"
|
||||||
|
inkscape:pageshadow="2"
|
||||||
|
inkscape:window-width="1366"
|
||||||
|
inkscape:window-height="704"
|
||||||
|
id="namedview3361"
|
||||||
|
showgrid="false"
|
||||||
|
inkscape:zoom="4"
|
||||||
|
inkscape:cx="47.543353"
|
||||||
|
inkscape:cy="50.202349"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="27"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="Layer_1" /><path
|
||||||
|
d="m 49.141,11.311999 c -5.409,0 -13.323,-4.407 -21.036,-4.407 -7.713,0 -12.32,4.407 -13.122,5.008 -0.801,0.601 -1.303,2.904 1.102,2.203 2.405,-0.701 4.407,-0.701 8.214,-0.701 3.807,0 12.922,5.209 22.138,5.209 9.216,0 16.928,-7.111 18.631,-9.616 1.703,-2.505 0.1,-3.306 -1.202,-2.704 -1.303,0.601 -9.716,5.008 -14.725,5.008 z"
|
||||||
|
id="path3355"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#ff3131" /><path
|
||||||
|
d="m 72.639,25.033999 c -5.683,0 -13.997,-4.406 -22.1,-4.406 -8.103,0 -12.944,4.406 -13.786,5.008 -0.842,0.602 -1.369,2.904 1.157,2.203 2.526,-0.701 4.63,-0.701 8.629,-0.701 3.999,0 13.576,5.209 23.257,5.209 9.681,0 17.785,-7.111 19.574,-9.615 1.789,-2.505 0.105,-3.307 -1.263,-2.705 -1.368,0.602 -10.206,5.007 -15.468,5.007 z"
|
||||||
|
id="path3357"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#319831" /><path
|
||||||
|
d="m 39.936,40.175999 c -5.893,0 -14.515,-4.761 -22.918,-4.761 -8.403,0 -13.424,4.761 -14.297,5.41 -0.873,0.649 -1.419,3.137 1.2,2.379 2.62,-0.757 4.802,-0.757 8.949,-0.757 4.147,0 14.079,5.626 24.12,5.626 10.04,0 18.443,-7.681 20.299,-10.386 1.856,-2.705 0.109,-3.57 -1.31,-2.921 -1.419,0.649 -10.586,5.41 -16.043,5.41 z"
|
||||||
|
id="path3359"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#3232cc" /><text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-style:italic;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:70px;line-height:125%;font-family:Optima;-inkscape-font-specification:'Optima Bold Italic';letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
x="48.124519"
|
||||||
|
y="90.172836"
|
||||||
|
id="text3375"
|
||||||
|
sodipodi:linespacing="125%"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan3377"
|
||||||
|
x="48.124519"
|
||||||
|
y="90.172836">3</tspan></text>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 3.4 KiB |
68
tutorials/icons/gstreamer-logo-4.svg
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Generator: Adobe Illustrator 12.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 51448) -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:cc="http://creativecommons.org/ns#"
|
||||||
|
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
version="1.1"
|
||||||
|
id="Layer_1"
|
||||||
|
width="100"
|
||||||
|
height="100"
|
||||||
|
viewBox="0 0 100 99.999999"
|
||||||
|
overflow="visible"
|
||||||
|
enable-background="new 0 0 280.22 69.387"
|
||||||
|
xml:space="preserve"
|
||||||
|
inkscape:version="0.91 r13725"
|
||||||
|
sodipodi:docname="gstreamer-logo-4.svg"
|
||||||
|
style="overflow:visible"><metadata
|
||||||
|
id="metadata3365"><rdf:RDF><cc:Work
|
||||||
|
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
|
||||||
|
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs
|
||||||
|
id="defs3363" /><sodipodi:namedview
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#666666"
|
||||||
|
borderopacity="1"
|
||||||
|
objecttolerance="10"
|
||||||
|
gridtolerance="10"
|
||||||
|
guidetolerance="10"
|
||||||
|
inkscape:pageopacity="0"
|
||||||
|
inkscape:pageshadow="2"
|
||||||
|
inkscape:window-width="1366"
|
||||||
|
inkscape:window-height="704"
|
||||||
|
id="namedview3361"
|
||||||
|
showgrid="false"
|
||||||
|
inkscape:zoom="4"
|
||||||
|
inkscape:cx="47.543353"
|
||||||
|
inkscape:cy="50.202349"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="27"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="Layer_1" /><path
|
||||||
|
d="m 49.141,11.311999 c -5.409,0 -13.323,-4.407 -21.036,-4.407 -7.713,0 -12.32,4.407 -13.122,5.008 -0.801,0.601 -1.303,2.904 1.102,2.203 2.405,-0.701 4.407,-0.701 8.214,-0.701 3.807,0 12.922,5.209 22.138,5.209 9.216,0 16.928,-7.111 18.631,-9.616 1.703,-2.505 0.1,-3.306 -1.202,-2.704 -1.303,0.601 -9.716,5.008 -14.725,5.008 z"
|
||||||
|
id="path3355"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#ff3131" /><path
|
||||||
|
d="m 72.639,25.033999 c -5.683,0 -13.997,-4.406 -22.1,-4.406 -8.103,0 -12.944,4.406 -13.786,5.008 -0.842,0.602 -1.369,2.904 1.157,2.203 2.526,-0.701 4.63,-0.701 8.629,-0.701 3.999,0 13.576,5.209 23.257,5.209 9.681,0 17.785,-7.111 19.574,-9.615 1.789,-2.505 0.105,-3.307 -1.263,-2.705 -1.368,0.602 -10.206,5.007 -15.468,5.007 z"
|
||||||
|
id="path3357"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#319831" /><path
|
||||||
|
d="m 39.936,40.175999 c -5.893,0 -14.515,-4.761 -22.918,-4.761 -8.403,0 -13.424,4.761 -14.297,5.41 -0.873,0.649 -1.419,3.137 1.2,2.379 2.62,-0.757 4.802,-0.757 8.949,-0.757 4.147,0 14.079,5.626 24.12,5.626 10.04,0 18.443,-7.681 20.299,-10.386 1.856,-2.705 0.109,-3.57 -1.31,-2.921 -1.419,0.649 -10.586,5.41 -16.043,5.41 z"
|
||||||
|
id="path3359"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#3232cc" /><text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-style:italic;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:70px;line-height:125%;font-family:Optima;-inkscape-font-specification:'Optima Bold Italic';letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
x="48.124519"
|
||||||
|
y="90.172836"
|
||||||
|
id="text3375"
|
||||||
|
sodipodi:linespacing="125%"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan3377"
|
||||||
|
x="48.124519"
|
||||||
|
y="90.172836">4</tspan></text>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 3.4 KiB |
68
tutorials/icons/gstreamer-logo-5.svg
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Generator: Adobe Illustrator 12.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 51448) -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||||
|
xmlns:cc="http://creativecommons.org/ns#"
|
||||||
|
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
version="1.1"
|
||||||
|
id="Layer_1"
|
||||||
|
width="100"
|
||||||
|
height="100"
|
||||||
|
viewBox="0 0 100 99.999999"
|
||||||
|
overflow="visible"
|
||||||
|
enable-background="new 0 0 280.22 69.387"
|
||||||
|
xml:space="preserve"
|
||||||
|
inkscape:version="0.91 r13725"
|
||||||
|
sodipodi:docname="gstreamer-logo-5.svg"
|
||||||
|
style="overflow:visible"><metadata
|
||||||
|
id="metadata3365"><rdf:RDF><cc:Work
|
||||||
|
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
|
||||||
|
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs
|
||||||
|
id="defs3363" /><sodipodi:namedview
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#666666"
|
||||||
|
borderopacity="1"
|
||||||
|
objecttolerance="10"
|
||||||
|
gridtolerance="10"
|
||||||
|
guidetolerance="10"
|
||||||
|
inkscape:pageopacity="0"
|
||||||
|
inkscape:pageshadow="2"
|
||||||
|
inkscape:window-width="1366"
|
||||||
|
inkscape:window-height="704"
|
||||||
|
id="namedview3361"
|
||||||
|
showgrid="false"
|
||||||
|
inkscape:zoom="4"
|
||||||
|
inkscape:cx="47.543353"
|
||||||
|
inkscape:cy="50.202349"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="27"
|
||||||
|
inkscape:window-maximized="1"
|
||||||
|
inkscape:current-layer="Layer_1" /><path
|
||||||
|
d="m 49.141,11.311999 c -5.409,0 -13.323,-4.407 -21.036,-4.407 -7.713,0 -12.32,4.407 -13.122,5.008 -0.801,0.601 -1.303,2.904 1.102,2.203 2.405,-0.701 4.407,-0.701 8.214,-0.701 3.807,0 12.922,5.209 22.138,5.209 9.216,0 16.928,-7.111 18.631,-9.616 1.703,-2.505 0.1,-3.306 -1.202,-2.704 -1.303,0.601 -9.716,5.008 -14.725,5.008 z"
|
||||||
|
id="path3355"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#ff3131" /><path
|
||||||
|
d="m 72.639,25.033999 c -5.683,0 -13.997,-4.406 -22.1,-4.406 -8.103,0 -12.944,4.406 -13.786,5.008 -0.842,0.602 -1.369,2.904 1.157,2.203 2.526,-0.701 4.63,-0.701 8.629,-0.701 3.999,0 13.576,5.209 23.257,5.209 9.681,0 17.785,-7.111 19.574,-9.615 1.789,-2.505 0.105,-3.307 -1.263,-2.705 -1.368,0.602 -10.206,5.007 -15.468,5.007 z"
|
||||||
|
id="path3357"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#319831" /><path
|
||||||
|
d="m 39.936,40.175999 c -5.893,0 -14.515,-4.761 -22.918,-4.761 -8.403,0 -13.424,4.761 -14.297,5.41 -0.873,0.649 -1.419,3.137 1.2,2.379 2.62,-0.757 4.802,-0.757 8.949,-0.757 4.147,0 14.079,5.626 24.12,5.626 10.04,0 18.443,-7.681 20.299,-10.386 1.856,-2.705 0.109,-3.57 -1.31,-2.921 -1.419,0.649 -10.586,5.41 -16.043,5.41 z"
|
||||||
|
id="path3359"
|
||||||
|
inkscape:connector-curvature="0"
|
||||||
|
style="fill:#3232cc" /><text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-style:italic;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:70px;line-height:125%;font-family:Optima;-inkscape-font-specification:'Optima Bold Italic';letter-spacing:0px;word-spacing:0px;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||||
|
x="48.124519"
|
||||||
|
y="90.172836"
|
||||||
|
id="text3375"
|
||||||
|
sodipodi:linespacing="125%"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan3377"
|
||||||
|
x="48.124519"
|
||||||
|
y="90.172836">5</tspan></text>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 3.4 KiB |
221
tutorials/playback-tutorial-1.c
Normal file
|
@ -0,0 +1,221 @@
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it around */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
GstElement *playbin; /* Our one and only element */
|
||||||
|
|
||||||
|
gint n_video; /* Number of embedded video streams */
|
||||||
|
gint n_audio; /* Number of embedded audio streams */
|
||||||
|
gint n_text; /* Number of embedded subtitle streams */
|
||||||
|
|
||||||
|
gint current_video; /* Currently playing video stream */
|
||||||
|
gint current_audio; /* Currently playing audio stream */
|
||||||
|
gint current_text; /* Currently playing subtitle stream */
|
||||||
|
|
||||||
|
GMainLoop *main_loop; /* GLib's Main Loop */
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* playbin flags */
|
||||||
|
typedef enum {
|
||||||
|
GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
|
||||||
|
GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */
|
||||||
|
GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
|
||||||
|
} GstPlayFlags;
|
||||||
|
|
||||||
|
/* Forward definition for the message and keyboard processing functions */
|
||||||
|
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
|
||||||
|
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data);
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
CustomData data;
|
||||||
|
GstBus *bus;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
gint flags;
|
||||||
|
GIOChannel *io_stdin;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Create the elements */
|
||||||
|
data.playbin = gst_element_factory_make ("playbin", "playbin");
|
||||||
|
|
||||||
|
if (!data.playbin) {
|
||||||
|
g_printerr ("Not all elements could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set the URI to play */
|
||||||
|
g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_cropped_multilingual.webm", NULL);
|
||||||
|
|
||||||
|
/* Set flags to show Audio and Video but ignore Subtitles */
|
||||||
|
g_object_get (data.playbin, "flags", &flags, NULL);
|
||||||
|
flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
|
||||||
|
flags &= ~GST_PLAY_FLAG_TEXT;
|
||||||
|
g_object_set (data.playbin, "flags", flags, NULL);
|
||||||
|
|
||||||
|
/* Set connection speed. This will affect some internal decisions of playbin */
|
||||||
|
g_object_set (data.playbin, "connection-speed", 56, NULL);
|
||||||
|
|
||||||
|
/* Add a bus watch, so we get notified when a message arrives */
|
||||||
|
bus = gst_element_get_bus (data.playbin);
|
||||||
|
gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data);
|
||||||
|
|
||||||
|
/* Add a keyboard watch so we get notified of keystrokes */
|
||||||
|
#ifdef G_OS_WIN32
|
||||||
|
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
|
||||||
|
#else
|
||||||
|
io_stdin = g_io_channel_unix_new (fileno (stdin));
|
||||||
|
#endif
|
||||||
|
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (data.playbin);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Create a GLib Main Loop and set it to run */
|
||||||
|
data.main_loop = g_main_loop_new (NULL, FALSE);
|
||||||
|
g_main_loop_run (data.main_loop);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
g_main_loop_unref (data.main_loop);
|
||||||
|
g_io_channel_unref (io_stdin);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (data.playbin, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data.playbin);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Extract some metadata from the streams and print it on the screen */
|
||||||
|
static void analyze_streams (CustomData *data) {
|
||||||
|
gint i;
|
||||||
|
GstTagList *tags;
|
||||||
|
gchar *str;
|
||||||
|
guint rate;
|
||||||
|
|
||||||
|
/* Read some properties */
|
||||||
|
g_object_get (data->playbin, "n-video", &data->n_video, NULL);
|
||||||
|
g_object_get (data->playbin, "n-audio", &data->n_audio, NULL);
|
||||||
|
g_object_get (data->playbin, "n-text", &data->n_text, NULL);
|
||||||
|
|
||||||
|
g_print ("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
|
||||||
|
data->n_video, data->n_audio, data->n_text);
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
for (i = 0; i < data->n_video; i++) {
|
||||||
|
tags = NULL;
|
||||||
|
/* Retrieve the stream's video tags */
|
||||||
|
g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags);
|
||||||
|
if (tags) {
|
||||||
|
g_print ("video stream %d:\n", i);
|
||||||
|
gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str);
|
||||||
|
g_print (" codec: %s\n", str ? str : "unknown");
|
||||||
|
g_free (str);
|
||||||
|
gst_tag_list_unref (tags);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
for (i = 0; i < data->n_audio; i++) {
|
||||||
|
tags = NULL;
|
||||||
|
/* Retrieve the stream's audio tags */
|
||||||
|
g_signal_emit_by_name (data->playbin, "get-audio-tags", i, &tags);
|
||||||
|
if (tags) {
|
||||||
|
g_print ("audio stream %d:\n", i);
|
||||||
|
if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) {
|
||||||
|
g_print (" codec: %s\n", str);
|
||||||
|
g_free (str);
|
||||||
|
}
|
||||||
|
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
|
||||||
|
g_print (" language: %s\n", str);
|
||||||
|
g_free (str);
|
||||||
|
}
|
||||||
|
if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) {
|
||||||
|
g_print (" bitrate: %d\n", rate);
|
||||||
|
}
|
||||||
|
gst_tag_list_unref (tags);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
for (i = 0; i < data->n_text; i++) {
|
||||||
|
tags = NULL;
|
||||||
|
/* Retrieve the stream's subtitle tags */
|
||||||
|
g_signal_emit_by_name (data->playbin, "get-text-tags", i, &tags);
|
||||||
|
if (tags) {
|
||||||
|
g_print ("subtitle stream %d:\n", i);
|
||||||
|
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
|
||||||
|
g_print (" language: %s\n", str);
|
||||||
|
g_free (str);
|
||||||
|
}
|
||||||
|
gst_tag_list_unref (tags);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
g_object_get (data->playbin, "current-video", &data->current_video, NULL);
|
||||||
|
g_object_get (data->playbin, "current-audio", &data->current_audio, NULL);
|
||||||
|
g_object_get (data->playbin, "current-text", &data->current_text, NULL);
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
g_print ("Currently playing video stream %d, audio stream %d and text stream %d\n",
|
||||||
|
data->current_video, data->current_audio, data->current_text);
|
||||||
|
g_print ("Type any number and hit ENTER to select a different audio stream\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Process messages from GStreamer */
|
||||||
|
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
|
||||||
|
switch (GST_MESSAGE_TYPE (msg)) {
|
||||||
|
case GST_MESSAGE_ERROR:
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
g_main_loop_quit (data->main_loop);
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_EOS:
|
||||||
|
g_print ("End-Of-Stream reached.\n");
|
||||||
|
g_main_loop_quit (data->main_loop);
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_STATE_CHANGED: {
|
||||||
|
GstState old_state, new_state, pending_state;
|
||||||
|
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
|
||||||
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
|
||||||
|
if (new_state == GST_STATE_PLAYING) {
|
||||||
|
/* Once we are in the playing state, analyze the streams */
|
||||||
|
analyze_streams (data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* We want to keep receiving messages */
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Process keyboard input */
|
||||||
|
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
|
||||||
|
gchar *str = NULL;
|
||||||
|
|
||||||
|
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
|
||||||
|
int index = g_ascii_strtoull (str, NULL, 0);
|
||||||
|
if (index < 0 || index >= data->n_audio) {
|
||||||
|
g_printerr ("Index out of bounds\n");
|
||||||
|
} else {
|
||||||
|
/* If the input was a valid audio stream index, set the current audio stream */
|
||||||
|
g_print ("Setting current audio stream to %d\n", index);
|
||||||
|
g_object_set (data->playbin, "current-audio", index, NULL);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
g_free (str);
|
||||||
|
return TRUE;
|
||||||
|
}
|
223
tutorials/playback-tutorial-2.c
Normal file
|
@ -0,0 +1,223 @@
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it around */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
GstElement *playbin; /* Our one and only element */
|
||||||
|
|
||||||
|
gint n_video; /* Number of embedded video streams */
|
||||||
|
gint n_audio; /* Number of embedded audio streams */
|
||||||
|
gint n_text; /* Number of embedded subtitle streams */
|
||||||
|
|
||||||
|
gint current_video; /* Currently playing video stream */
|
||||||
|
gint current_audio; /* Currently playing audio stream */
|
||||||
|
gint current_text; /* Currently playing subtitle stream */
|
||||||
|
|
||||||
|
GMainLoop *main_loop; /* GLib's Main Loop */
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* playbin flags */
|
||||||
|
typedef enum {
|
||||||
|
GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
|
||||||
|
GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */
|
||||||
|
GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
|
||||||
|
} GstPlayFlags;
|
||||||
|
|
||||||
|
/* Forward definition for the message and keyboard processing functions */
|
||||||
|
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
|
||||||
|
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data);
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
CustomData data;
|
||||||
|
GstBus *bus;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
gint flags;
|
||||||
|
GIOChannel *io_stdin;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Create the elements */
|
||||||
|
data.playbin = gst_element_factory_make ("playbin", "playbin");
|
||||||
|
|
||||||
|
if (!data.playbin) {
|
||||||
|
g_printerr ("Not all elements could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Set the URI to play */
|
||||||
|
g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.ogv", NULL);
|
||||||
|
|
||||||
|
/* Set the subtitle URI to play and some font description */
|
||||||
|
g_object_set (data.playbin, "suburi", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer_gr.srt", NULL);
|
||||||
|
g_object_set (data.playbin, "subtitle-font-desc", "Sans, 18", NULL);
|
||||||
|
|
||||||
|
/* Set flags to show Audio, Video and Subtitles */
|
||||||
|
g_object_get (data.playbin, "flags", &flags, NULL);
|
||||||
|
flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_TEXT;
|
||||||
|
g_object_set (data.playbin, "flags", flags, NULL);
|
||||||
|
|
||||||
|
/* Add a bus watch, so we get notified when a message arrives */
|
||||||
|
bus = gst_element_get_bus (data.playbin);
|
||||||
|
gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data);
|
||||||
|
|
||||||
|
/* Add a keyboard watch so we get notified of keystrokes */
|
||||||
|
#ifdef G_OS_WIN32
|
||||||
|
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
|
||||||
|
#else
|
||||||
|
io_stdin = g_io_channel_unix_new (fileno (stdin));
|
||||||
|
#endif
|
||||||
|
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (data.playbin);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Create a GLib Main Loop and set it to run */
|
||||||
|
data.main_loop = g_main_loop_new (NULL, FALSE);
|
||||||
|
g_main_loop_run (data.main_loop);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
g_main_loop_unref (data.main_loop);
|
||||||
|
g_io_channel_unref (io_stdin);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (data.playbin, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data.playbin);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Extract some metadata from the streams and print it on the screen */
|
||||||
|
static void analyze_streams (CustomData *data) {
|
||||||
|
gint i;
|
||||||
|
GstTagList *tags;
|
||||||
|
gchar *str;
|
||||||
|
guint rate;
|
||||||
|
|
||||||
|
/* Read some properties */
|
||||||
|
g_object_get (data->playbin, "n-video", &data->n_video, NULL);
|
||||||
|
g_object_get (data->playbin, "n-audio", &data->n_audio, NULL);
|
||||||
|
g_object_get (data->playbin, "n-text", &data->n_text, NULL);
|
||||||
|
|
||||||
|
g_print ("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
|
||||||
|
data->n_video, data->n_audio, data->n_text);
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
for (i = 0; i < data->n_video; i++) {
|
||||||
|
tags = NULL;
|
||||||
|
/* Retrieve the stream's video tags */
|
||||||
|
g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags);
|
||||||
|
if (tags) {
|
||||||
|
g_print ("video stream %d:\n", i);
|
||||||
|
gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str);
|
||||||
|
g_print (" codec: %s\n", str ? str : "unknown");
|
||||||
|
g_free (str);
|
||||||
|
gst_tag_list_free (tags);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
for (i = 0; i < data->n_audio; i++) {
|
||||||
|
tags = NULL;
|
||||||
|
/* Retrieve the stream's audio tags */
|
||||||
|
g_signal_emit_by_name (data->playbin, "get-audio-tags", i, &tags);
|
||||||
|
if (tags) {
|
||||||
|
g_print ("audio stream %d:\n", i);
|
||||||
|
if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) {
|
||||||
|
g_print (" codec: %s\n", str);
|
||||||
|
g_free (str);
|
||||||
|
}
|
||||||
|
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
|
||||||
|
g_print (" language: %s\n", str);
|
||||||
|
g_free (str);
|
||||||
|
}
|
||||||
|
if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) {
|
||||||
|
g_print (" bitrate: %d\n", rate);
|
||||||
|
}
|
||||||
|
gst_tag_list_free (tags);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
for (i = 0; i < data->n_text; i++) {
|
||||||
|
tags = NULL;
|
||||||
|
/* Retrieve the stream's subtitle tags */
|
||||||
|
g_print ("subtitle stream %d:\n", i);
|
||||||
|
g_signal_emit_by_name (data->playbin, "get-text-tags", i, &tags);
|
||||||
|
if (tags) {
|
||||||
|
if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
|
||||||
|
g_print (" language: %s\n", str);
|
||||||
|
g_free (str);
|
||||||
|
}
|
||||||
|
gst_tag_list_free (tags);
|
||||||
|
} else {
|
||||||
|
g_print (" no tags found\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
g_object_get (data->playbin, "current-video", &data->current_video, NULL);
|
||||||
|
g_object_get (data->playbin, "current-audio", &data->current_audio, NULL);
|
||||||
|
g_object_get (data->playbin, "current-text", &data->current_text, NULL);
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
g_print ("Currently playing video stream %d, audio stream %d and subtitle stream %d\n",
|
||||||
|
data->current_video, data->current_audio, data->current_text);
|
||||||
|
g_print ("Type any number and hit ENTER to select a different subtitle stream\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Process messages from GStreamer */
|
||||||
|
static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
|
||||||
|
switch (GST_MESSAGE_TYPE (msg)) {
|
||||||
|
case GST_MESSAGE_ERROR:
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
g_main_loop_quit (data->main_loop);
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_EOS:
|
||||||
|
g_print ("End-Of-Stream reached.\n");
|
||||||
|
g_main_loop_quit (data->main_loop);
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_STATE_CHANGED: {
|
||||||
|
GstState old_state, new_state, pending_state;
|
||||||
|
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
|
||||||
|
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
|
||||||
|
if (new_state == GST_STATE_PLAYING) {
|
||||||
|
/* Once we are in the playing state, analyze the streams */
|
||||||
|
analyze_streams (data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* We want to keep receiving messages */
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Process keyboard input */
|
||||||
|
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
|
||||||
|
gchar *str = NULL;
|
||||||
|
|
||||||
|
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
|
||||||
|
int index = g_ascii_strtoull (str, NULL, 0);
|
||||||
|
if (index < 0 || index >= data->n_text) {
|
||||||
|
g_printerr ("Index out of bounds\n");
|
||||||
|
} else {
|
||||||
|
/* If the input was a valid subtitle stream index, set the current subtitle stream */
|
||||||
|
g_print ("Setting current subtitle stream to %d\n", index);
|
||||||
|
g_object_set (data->playbin, "current-text", index, NULL);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
g_free (str);
|
||||||
|
return TRUE;
|
||||||
|
}
|
154
tutorials/playback-tutorial-3.c
Normal file
|
@ -0,0 +1,154 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <gst/audio/audio.h>
|
||||||
|
#include <string.h>
|
||||||
|
|
||||||
|
#define CHUNK_SIZE 1024 /* Amount of bytes we are sending in each buffer */
|
||||||
|
#define SAMPLE_RATE 44100 /* Samples per second we are sending */
|
||||||
|
|
||||||
|
/* Structure to contain all our information, so we can pass it to callbacks */
|
||||||
|
typedef struct _CustomData {
|
||||||
|
GstElement *pipeline;
|
||||||
|
GstElement *app_source;
|
||||||
|
|
||||||
|
guint64 num_samples; /* Number of samples generated so far (for timestamp generation) */
|
||||||
|
gfloat a, b, c, d; /* For waveform generation */
|
||||||
|
|
||||||
|
guint sourceid; /* To control the GSource */
|
||||||
|
|
||||||
|
GMainLoop *main_loop; /* GLib's Main Loop */
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc.
|
||||||
|
* The ide handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
|
||||||
|
* and is removed when appsrc has enough data (enough-data signal).
|
||||||
|
*/
|
||||||
|
static gboolean push_data (CustomData *data) {
|
||||||
|
GstBuffer *buffer;
|
||||||
|
GstFlowReturn ret;
|
||||||
|
int i;
|
||||||
|
GstMapInfo map;
|
||||||
|
gint16 *raw;
|
||||||
|
gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
|
||||||
|
gfloat freq;
|
||||||
|
|
||||||
|
/* Create a new empty buffer */
|
||||||
|
buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);
|
||||||
|
|
||||||
|
/* Set its timestamp and duration */
|
||||||
|
GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples, GST_SECOND, SAMPLE_RATE);
|
||||||
|
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, SAMPLE_RATE);
|
||||||
|
|
||||||
|
/* Generate some psychodelic waveforms */
|
||||||
|
gst_buffer_map (buffer, &map, GST_MAP_WRITE);
|
||||||
|
raw = (gint16 *)map.data;
|
||||||
|
data->c += data->d;
|
||||||
|
data->d -= data->c / 1000;
|
||||||
|
freq = 1100 + 1000 * data->d;
|
||||||
|
for (i = 0; i < num_samples; i++) {
|
||||||
|
data->a += data->b;
|
||||||
|
data->b -= data->a / freq;
|
||||||
|
raw[i] = (gint16)(500 * data->a);
|
||||||
|
}
|
||||||
|
gst_buffer_unmap (buffer, &map);
|
||||||
|
data->num_samples += num_samples;
|
||||||
|
|
||||||
|
/* Push the buffer into the appsrc */
|
||||||
|
g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
|
||||||
|
|
||||||
|
/* Free the buffer now that we are done with it */
|
||||||
|
gst_buffer_unref (buffer);
|
||||||
|
|
||||||
|
if (ret != GST_FLOW_OK) {
|
||||||
|
/* We got some error, stop sending data */
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This signal callback triggers when appsrc needs data. Here, we add an idle handler
|
||||||
|
* to the mainloop to start pushing data into the appsrc */
|
||||||
|
static void start_feed (GstElement *source, guint size, CustomData *data) {
|
||||||
|
if (data->sourceid == 0) {
|
||||||
|
g_print ("Start feeding\n");
|
||||||
|
data->sourceid = g_idle_add ((GSourceFunc) push_data, data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This callback triggers when appsrc has enough data and we can stop sending.
|
||||||
|
* We remove the idle handler from the mainloop */
|
||||||
|
static void stop_feed (GstElement *source, CustomData *data) {
|
||||||
|
if (data->sourceid != 0) {
|
||||||
|
g_print ("Stop feeding\n");
|
||||||
|
g_source_remove (data->sourceid);
|
||||||
|
data->sourceid = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when an error message is posted on the bus */
|
||||||
|
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug_info;
|
||||||
|
|
||||||
|
/* Print error details on the screen */
|
||||||
|
gst_message_parse_error (msg, &err, &debug_info);
|
||||||
|
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
|
||||||
|
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
|
||||||
|
g_clear_error (&err);
|
||||||
|
g_free (debug_info);
|
||||||
|
|
||||||
|
g_main_loop_quit (data->main_loop);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* This function is called when playbin has created the appsrc element, so we have
|
||||||
|
* a chance to configure it. */
|
||||||
|
static void source_setup (GstElement *pipeline, GstElement *source, CustomData *data) {
|
||||||
|
GstAudioInfo info;
|
||||||
|
GstCaps *audio_caps;
|
||||||
|
|
||||||
|
g_print ("Source has been created. Configuring.\n");
|
||||||
|
data->app_source = source;
|
||||||
|
|
||||||
|
/* Configure appsrc */
|
||||||
|
gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1, NULL);
|
||||||
|
audio_caps = gst_audio_info_to_caps (&info);
|
||||||
|
g_object_set (source, "caps", audio_caps, "format", GST_FORMAT_TIME, NULL);
|
||||||
|
g_signal_connect (source, "need-data", G_CALLBACK (start_feed), data);
|
||||||
|
g_signal_connect (source, "enough-data", G_CALLBACK (stop_feed), data);
|
||||||
|
gst_caps_unref (audio_caps);
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
CustomData data;
|
||||||
|
GstBus *bus;
|
||||||
|
|
||||||
|
/* Initialize cumstom data structure */
|
||||||
|
memset (&data, 0, sizeof (data));
|
||||||
|
data.b = 1; /* For waveform generation */
|
||||||
|
data.d = 1;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Create the playbin element */
|
||||||
|
data.pipeline = gst_parse_launch ("playbin uri=appsrc://", NULL);
|
||||||
|
g_signal_connect (data.pipeline, "source-setup", G_CALLBACK (source_setup), &data);
|
||||||
|
|
||||||
|
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
|
||||||
|
bus = gst_element_get_bus (data.pipeline);
|
||||||
|
gst_bus_add_signal_watch (bus);
|
||||||
|
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
|
||||||
|
/* Start playing the pipeline */
|
||||||
|
gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
|
||||||
|
|
||||||
|
/* Create a GLib Main Loop and set it to run */
|
||||||
|
data.main_loop = g_main_loop_new (NULL, FALSE);
|
||||||
|
g_main_loop_run (data.main_loop);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
gst_element_set_state (data.pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
172
tutorials/playback-tutorial-4.c
Normal file
|
@ -0,0 +1,172 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <string.h>
|
||||||
|
|
||||||
|
#define GRAPH_LENGTH 78
|
||||||
|
|
||||||
|
/* playbin flags */
|
||||||
|
typedef enum {
|
||||||
|
GST_PLAY_FLAG_DOWNLOAD = (1 << 7) /* Enable progressive download (on selected formats) */
|
||||||
|
} GstPlayFlags;
|
||||||
|
|
||||||
|
typedef struct _CustomData {
|
||||||
|
gboolean is_live;
|
||||||
|
GstElement *pipeline;
|
||||||
|
GMainLoop *loop;
|
||||||
|
gint buffering_level;
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
static void got_location (GstObject *gstobject, GstObject *prop_object, GParamSpec *prop, gpointer data) {
|
||||||
|
gchar *location;
|
||||||
|
g_object_get (G_OBJECT (prop_object), "temp-location", &location, NULL);
|
||||||
|
g_print ("Temporary file: %s\n", location);
|
||||||
|
g_free (location);
|
||||||
|
/* Uncomment this line to keep the temporary file after the program exits */
|
||||||
|
/* g_object_set (G_OBJECT (prop_object), "temp-remove", FALSE, NULL); */
|
||||||
|
}
|
||||||
|
|
||||||
|
static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
|
||||||
|
|
||||||
|
switch (GST_MESSAGE_TYPE (msg)) {
|
||||||
|
case GST_MESSAGE_ERROR: {
|
||||||
|
GError *err;
|
||||||
|
gchar *debug;
|
||||||
|
|
||||||
|
gst_message_parse_error (msg, &err, &debug);
|
||||||
|
g_print ("Error: %s\n", err->message);
|
||||||
|
g_error_free (err);
|
||||||
|
g_free (debug);
|
||||||
|
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_READY);
|
||||||
|
g_main_loop_quit (data->loop);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case GST_MESSAGE_EOS:
|
||||||
|
/* end-of-stream */
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_READY);
|
||||||
|
g_main_loop_quit (data->loop);
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_BUFFERING:
|
||||||
|
/* If the stream is live, we do not care about buffering. */
|
||||||
|
if (data->is_live) break;
|
||||||
|
|
||||||
|
gst_message_parse_buffering (msg, &data->buffering_level);
|
||||||
|
|
||||||
|
/* Wait until buffering is complete before start/resume playing */
|
||||||
|
if (data->buffering_level < 100)
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
||||||
|
else
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
|
||||||
|
break;
|
||||||
|
case GST_MESSAGE_CLOCK_LOST:
|
||||||
|
/* Get a new clock */
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
|
||||||
|
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
/* Unhandled message */
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static gboolean refresh_ui (CustomData *data) {
|
||||||
|
GstQuery *query;
|
||||||
|
gboolean result;
|
||||||
|
|
||||||
|
query = gst_query_new_buffering (GST_FORMAT_PERCENT);
|
||||||
|
result = gst_element_query (data->pipeline, query);
|
||||||
|
if (result) {
|
||||||
|
gint n_ranges, range, i;
|
||||||
|
gchar graph[GRAPH_LENGTH + 1];
|
||||||
|
gint64 position = 0, duration = 0;
|
||||||
|
|
||||||
|
memset (graph, ' ', GRAPH_LENGTH);
|
||||||
|
graph[GRAPH_LENGTH] = '\0';
|
||||||
|
|
||||||
|
n_ranges = gst_query_get_n_buffering_ranges (query);
|
||||||
|
for (range = 0; range < n_ranges; range++) {
|
||||||
|
gint64 start, stop;
|
||||||
|
gst_query_parse_nth_buffering_range (query, range, &start, &stop);
|
||||||
|
start = start * GRAPH_LENGTH / (stop - start);
|
||||||
|
stop = stop * GRAPH_LENGTH / (stop - start);
|
||||||
|
for (i = (gint)start; i < stop; i++)
|
||||||
|
graph [i] = '-';
|
||||||
|
}
|
||||||
|
if (gst_element_query_position (data->pipeline, GST_FORMAT_TIME, &position) &&
|
||||||
|
GST_CLOCK_TIME_IS_VALID (position) &&
|
||||||
|
gst_element_query_duration (data->pipeline, GST_FORMAT_TIME, &duration) &&
|
||||||
|
GST_CLOCK_TIME_IS_VALID (duration)) {
|
||||||
|
i = (gint)(GRAPH_LENGTH * (double)position / (double)(duration + 1));
|
||||||
|
graph [i] = data->buffering_level < 100 ? 'X' : '>';
|
||||||
|
}
|
||||||
|
g_print ("[%s]", graph);
|
||||||
|
if (data->buffering_level < 100) {
|
||||||
|
g_print (" Buffering: %3d%%", data->buffering_level);
|
||||||
|
} else {
|
||||||
|
g_print (" ");
|
||||||
|
}
|
||||||
|
g_print ("\r");
|
||||||
|
}
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
GstElement *pipeline;
|
||||||
|
GstBus *bus;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
GMainLoop *main_loop;
|
||||||
|
CustomData data;
|
||||||
|
guint flags;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Initialize our data structure */
|
||||||
|
memset (&data, 0, sizeof (data));
|
||||||
|
data.buffering_level = 100;
|
||||||
|
|
||||||
|
/* Build the pipeline */
|
||||||
|
pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
|
||||||
|
bus = gst_element_get_bus (pipeline);
|
||||||
|
|
||||||
|
/* Set the download flag */
|
||||||
|
g_object_get (pipeline, "flags", &flags, NULL);
|
||||||
|
flags |= GST_PLAY_FLAG_DOWNLOAD;
|
||||||
|
g_object_set (pipeline, "flags", flags, NULL);
|
||||||
|
|
||||||
|
/* Uncomment this line to limit the amount of downloaded data */
|
||||||
|
/* g_object_set (pipeline, "ring-buffer-max-size", (guint64)4000000, NULL); */
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return -1;
|
||||||
|
} else if (ret == GST_STATE_CHANGE_NO_PREROLL) {
|
||||||
|
data.is_live = TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
main_loop = g_main_loop_new (NULL, FALSE);
|
||||||
|
data.loop = main_loop;
|
||||||
|
data.pipeline = pipeline;
|
||||||
|
|
||||||
|
gst_bus_add_signal_watch (bus);
|
||||||
|
g_signal_connect (bus, "message", G_CALLBACK (cb_message), &data);
|
||||||
|
g_signal_connect (pipeline, "deep-notify::temp-location", G_CALLBACK (got_location), NULL);
|
||||||
|
|
||||||
|
/* Register a function that GLib will call every second */
|
||||||
|
g_timeout_add_seconds (1, (GSourceFunc)refresh_ui, &data);
|
||||||
|
|
||||||
|
g_main_loop_run (main_loop);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
g_main_loop_unref (main_loop);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
|
||||||
|
g_print ("\n");
|
||||||
|
return 0;
|
||||||
|
}
|
146
tutorials/playback-tutorial-5.c
Normal file
|
@ -0,0 +1,146 @@
|
||||||
|
#include <string.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <gst/gst.h>
|
||||||
|
#include <gst/video/colorbalance.h>
|
||||||
|
|
||||||
|
typedef struct _CustomData {
|
||||||
|
GstElement *pipeline;
|
||||||
|
GMainLoop *loop;
|
||||||
|
} CustomData;
|
||||||
|
|
||||||
|
/* Process a color balance command */
|
||||||
|
static void update_color_channel (const gchar *channel_name, gboolean increase, GstColorBalance *cb) {
|
||||||
|
gdouble step;
|
||||||
|
gint value;
|
||||||
|
GstColorBalanceChannel *channel = NULL;
|
||||||
|
const GList *channels, *l;
|
||||||
|
|
||||||
|
/* Retrieve the list of channels and locate the requested one */
|
||||||
|
channels = gst_color_balance_list_channels (cb);
|
||||||
|
for (l = channels; l != NULL; l = l->next) {
|
||||||
|
GstColorBalanceChannel *tmp = (GstColorBalanceChannel *)l->data;
|
||||||
|
|
||||||
|
if (g_strrstr (tmp->label, channel_name)) {
|
||||||
|
channel = tmp;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!channel)
|
||||||
|
return;
|
||||||
|
|
||||||
|
/* Change the channel's value */
|
||||||
|
step = 0.1 * (channel->max_value - channel->min_value);
|
||||||
|
value = gst_color_balance_get_value (cb, channel);
|
||||||
|
if (increase) {
|
||||||
|
value = (gint)(value + step);
|
||||||
|
if (value > channel->max_value)
|
||||||
|
value = channel->max_value;
|
||||||
|
} else {
|
||||||
|
value = (gint)(value - step);
|
||||||
|
if (value < channel->min_value)
|
||||||
|
value = channel->min_value;
|
||||||
|
}
|
||||||
|
gst_color_balance_set_value (cb, channel, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Output the current values of all Color Balance channels */
|
||||||
|
static void print_current_values (GstElement *pipeline) {
|
||||||
|
const GList *channels, *l;
|
||||||
|
|
||||||
|
/* Output Color Balance values */
|
||||||
|
channels = gst_color_balance_list_channels (GST_COLOR_BALANCE (pipeline));
|
||||||
|
for (l = channels; l != NULL; l = l->next) {
|
||||||
|
GstColorBalanceChannel *channel = (GstColorBalanceChannel *)l->data;
|
||||||
|
gint value = gst_color_balance_get_value (GST_COLOR_BALANCE (pipeline), channel);
|
||||||
|
g_print ("%s: %3d%% ", channel->label,
|
||||||
|
100 * (value - channel->min_value) / (channel->max_value - channel->min_value));
|
||||||
|
}
|
||||||
|
g_print ("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Process keyboard input */
|
||||||
|
static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
|
||||||
|
gchar *str = NULL;
|
||||||
|
|
||||||
|
if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) != G_IO_STATUS_NORMAL) {
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (g_ascii_tolower (str[0])) {
|
||||||
|
case 'c':
|
||||||
|
update_color_channel ("CONTRAST", g_ascii_isupper (str[0]), GST_COLOR_BALANCE (data->pipeline));
|
||||||
|
break;
|
||||||
|
case 'b':
|
||||||
|
update_color_channel ("BRIGHTNESS", g_ascii_isupper (str[0]), GST_COLOR_BALANCE (data->pipeline));
|
||||||
|
break;
|
||||||
|
case 'h':
|
||||||
|
update_color_channel ("HUE", g_ascii_isupper (str[0]), GST_COLOR_BALANCE (data->pipeline));
|
||||||
|
break;
|
||||||
|
case 's':
|
||||||
|
update_color_channel ("SATURATION", g_ascii_isupper (str[0]), GST_COLOR_BALANCE (data->pipeline));
|
||||||
|
break;
|
||||||
|
case 'q':
|
||||||
|
g_main_loop_quit (data->loop);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
g_free (str);
|
||||||
|
|
||||||
|
print_current_values (data->pipeline);
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
CustomData data;
|
||||||
|
GstStateChangeReturn ret;
|
||||||
|
GIOChannel *io_stdin;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Initialize our data structure */
|
||||||
|
memset (&data, 0, sizeof (data));
|
||||||
|
|
||||||
|
/* Print usage map */
|
||||||
|
g_print (
|
||||||
|
"USAGE: Choose one of the following options, then press enter:\n"
|
||||||
|
" 'C' to increase contrast, 'c' to decrease contrast\n"
|
||||||
|
" 'B' to increase brightness, 'b' to decrease brightness\n"
|
||||||
|
" 'H' to increase hue, 'h' to decrease hue\n"
|
||||||
|
" 'S' to increase saturation, 's' to decrease saturation\n"
|
||||||
|
" 'Q' to quit\n");
|
||||||
|
|
||||||
|
/* Build the pipeline */
|
||||||
|
data.pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
|
||||||
|
|
||||||
|
/* Add a keyboard watch so we get notified of keystrokes */
|
||||||
|
#ifdef G_OS_WIN32
|
||||||
|
io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
|
||||||
|
#else
|
||||||
|
io_stdin = g_io_channel_unix_new (fileno (stdin));
|
||||||
|
#endif
|
||||||
|
g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
|
||||||
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||||
|
g_printerr ("Unable to set the pipeline to the playing state.\n");
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
print_current_values (data.pipeline);
|
||||||
|
|
||||||
|
/* Create a GLib Main Loop and set it to run */
|
||||||
|
data.loop = g_main_loop_new (NULL, FALSE);
|
||||||
|
g_main_loop_run (data.loop);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
g_main_loop_unref (data.loop);
|
||||||
|
g_io_channel_unref (io_stdin);
|
||||||
|
gst_element_set_state (data.pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (data.pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
89
tutorials/playback-tutorial-6.c
Normal file
|
@ -0,0 +1,89 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
/* playbin2 flags */
|
||||||
|
typedef enum {
|
||||||
|
GST_PLAY_FLAG_VIS = (1 << 3) /* Enable rendering of visualizations when there is no video stream. */
|
||||||
|
} GstPlayFlags;
|
||||||
|
|
||||||
|
/* Return TRUE if this is a Visualization element */
|
||||||
|
static gboolean filter_vis_features (GstPluginFeature *feature, gpointer data) {
|
||||||
|
GstElementFactory *factory;
|
||||||
|
|
||||||
|
if (!GST_IS_ELEMENT_FACTORY (feature))
|
||||||
|
return FALSE;
|
||||||
|
factory = GST_ELEMENT_FACTORY (feature);
|
||||||
|
if (!g_strrstr (gst_element_factory_get_klass (factory), "Visualization"))
|
||||||
|
return FALSE;
|
||||||
|
|
||||||
|
return TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
GstElement *pipeline, *vis_plugin;
|
||||||
|
GstBus *bus;
|
||||||
|
GstMessage *msg;
|
||||||
|
GList *list, *walk;
|
||||||
|
GstElementFactory *selected_factory = NULL;
|
||||||
|
guint flags;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Get a list of all visualization plugins */
|
||||||
|
list = gst_registry_feature_filter (gst_registry_get (), filter_vis_features, FALSE, NULL);
|
||||||
|
|
||||||
|
/* Print their names */
|
||||||
|
g_print("Available visualization plugins:\n");
|
||||||
|
for (walk = list; walk != NULL; walk = g_list_next (walk)) {
|
||||||
|
const gchar *name;
|
||||||
|
GstElementFactory *factory;
|
||||||
|
|
||||||
|
factory = GST_ELEMENT_FACTORY (walk->data);
|
||||||
|
name = gst_element_factory_get_longname (factory);
|
||||||
|
g_print(" %s\n", name);
|
||||||
|
|
||||||
|
if (selected_factory == NULL || g_str_has_prefix (name, "GOOM")) {
|
||||||
|
selected_factory = factory;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Don't use the factory if it's still empty */
|
||||||
|
/* e.g. no visualization plugins found */
|
||||||
|
if (!selected_factory) {
|
||||||
|
g_print ("No visualization plugins found!\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* We have now selected a factory for the visualization element */
|
||||||
|
g_print ("Selected '%s'\n", gst_element_factory_get_longname (selected_factory));
|
||||||
|
vis_plugin = gst_element_factory_create (selected_factory, NULL);
|
||||||
|
if (!vis_plugin)
|
||||||
|
return -1;
|
||||||
|
|
||||||
|
/* Build the pipeline */
|
||||||
|
pipeline = gst_parse_launch ("playbin uri=http://radio.hbr1.com:19800/ambient.ogg", NULL);
|
||||||
|
|
||||||
|
/* Set the visualization flag */
|
||||||
|
g_object_get (pipeline, "flags", &flags, NULL);
|
||||||
|
flags |= GST_PLAY_FLAG_VIS;
|
||||||
|
g_object_set (pipeline, "flags", flags, NULL);
|
||||||
|
|
||||||
|
/* set vis plugin for playbin2 */
|
||||||
|
g_object_set (pipeline, "vis-plugin", vis_plugin, NULL);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
|
|
||||||
|
/* Wait until error or EOS */
|
||||||
|
bus = gst_element_get_bus (pipeline);
|
||||||
|
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
if (msg != NULL)
|
||||||
|
gst_message_unref (msg);
|
||||||
|
gst_plugin_feature_list_free (list);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
55
tutorials/playback-tutorial-7.c
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
#include <gst/gst.h>
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
GstElement *pipeline, *bin, *equalizer, *convert, *sink;
|
||||||
|
GstPad *pad, *ghost_pad;
|
||||||
|
GstBus *bus;
|
||||||
|
GstMessage *msg;
|
||||||
|
|
||||||
|
/* Initialize GStreamer */
|
||||||
|
gst_init (&argc, &argv);
|
||||||
|
|
||||||
|
/* Build the pipeline */
|
||||||
|
pipeline = gst_parse_launch ("playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm", NULL);
|
||||||
|
|
||||||
|
/* Create the elements inside the sink bin */
|
||||||
|
equalizer = gst_element_factory_make ("equalizer-3bands", "equalizer");
|
||||||
|
convert = gst_element_factory_make ("audioconvert", "convert");
|
||||||
|
sink = gst_element_factory_make ("autoaudiosink", "audio_sink");
|
||||||
|
if (!equalizer || !convert || !sink) {
|
||||||
|
g_printerr ("Not all elements could be created.\n");
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Create the sink bin, add the elements and link them */
|
||||||
|
bin = gst_bin_new ("audio_sink_bin");
|
||||||
|
gst_bin_add_many (GST_BIN (bin), equalizer, convert, sink, NULL);
|
||||||
|
gst_element_link_many (equalizer, convert, sink, NULL);
|
||||||
|
pad = gst_element_get_static_pad (equalizer, "sink");
|
||||||
|
ghost_pad = gst_ghost_pad_new ("sink", pad);
|
||||||
|
gst_pad_set_active (ghost_pad, TRUE);
|
||||||
|
gst_element_add_pad (bin, ghost_pad);
|
||||||
|
gst_object_unref (pad);
|
||||||
|
|
||||||
|
/* Configure the equalizer */
|
||||||
|
g_object_set (G_OBJECT (equalizer), "band1", (gdouble)-24.0, NULL);
|
||||||
|
g_object_set (G_OBJECT (equalizer), "band2", (gdouble)-24.0, NULL);
|
||||||
|
|
||||||
|
/* Set playbin2's audio sink to be our sink bin */
|
||||||
|
g_object_set (GST_OBJECT (pipeline), "audio-sink", bin, NULL);
|
||||||
|
|
||||||
|
/* Start playing */
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
||||||
|
|
||||||
|
/* Wait until error or EOS */
|
||||||
|
bus = gst_element_get_bus (pipeline);
|
||||||
|
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
|
||||||
|
|
||||||
|
/* Free resources */
|
||||||
|
if (msg != NULL)
|
||||||
|
gst_message_unref (msg);
|
||||||
|
gst_object_unref (bus);
|
||||||
|
gst_element_set_state (pipeline, GST_STATE_NULL);
|
||||||
|
gst_object_unref (pipeline);
|
||||||
|
return 0;
|
||||||
|
}
|
95
tutorials/vs2010/basic-tutorial-1/basic-tutorial-1.vcxproj
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||||
|
<ItemGroup Label="ProjectConfigurations">
|
||||||
|
<ProjectConfiguration Include="Debug|Win32">
|
||||||
|
<Configuration>Debug</Configuration>
|
||||||
|
<Platform>Win32</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Debug|x64">
|
||||||
|
<Configuration>Debug</Configuration>
|
||||||
|
<Platform>x64</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Release|Win32">
|
||||||
|
<Configuration>Release</Configuration>
|
||||||
|
<Platform>Win32</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Release|x64">
|
||||||
|
<Configuration>Release</Configuration>
|
||||||
|
<Platform>x64</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ClCompile Include="..\..\basic-tutorial-1.c" />
|
||||||
|
</ItemGroup>
|
||||||
|
<PropertyGroup Label="Globals">
|
||||||
|
<Keyword>Win32Proj</Keyword>
|
||||||
|
<ProjectGuid>{63AEFB51-5FB8-409B-BDF3-893A23D28BF3}</ProjectGuid>
|
||||||
|
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
|
||||||
|
</PropertyGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||||
|
<PropertyGroup Condition="'$(Configuration)'=='Debug'" Label="Configuration">
|
||||||
|
<ConfigurationType>Application</ConfigurationType>
|
||||||
|
<UseDebugLibraries>true</UseDebugLibraries>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)'=='Release'" Label="Configuration">
|
||||||
|
<ConfigurationType>Application</ConfigurationType>
|
||||||
|
<UseDebugLibraries>false</UseDebugLibraries>
|
||||||
|
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||||
|
<ImportGroup Label="ExtensionSettings">
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Label="PropertySheets" Condition="'$(Platform)'=='Win32'">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
<Import Project="$(GSTREAMER_1_0_ROOT_X86)\share\vs\2010\libs\gstreamer-1.0.props" Condition="exists('$(GSTREAMER_1_0_ROOT_X86)\share\vs\2010\libs\gstreamer-1.0.props')" />
|
||||||
|
<Import Project="$(GSTREAMER_1_0_ROOT_X86)\share\vs\2010\msvc\x86.props" Condition="exists('$(GSTREAMER_1_0_ROOT_X86)\share\vs\2010\msvc\x86.props')" />
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Label="PropertySheets" Condition="'$(Platform)'=='x64'">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
<Import Project="$(GSTREAMER_1_0_ROOT_X86_64)\share\vs\2010\libs\gstreamer-1.0.props" Condition="exists('$(GSTREAMER_1_0_ROOT_X86_64)\share\vs\2010\libs\gstreamer-1.0.props')" />
|
||||||
|
<Import Project="$(GSTREAMER_1_0_ROOT_X86_64)\share\vs\2010\msvc\x86_64.props" Condition="exists('$(GSTREAMER_1_0_ROOT_X86_64)\share\vs\2010\msvc\x86_64.props')" />
|
||||||
|
</ImportGroup>
|
||||||
|
<PropertyGroup Label="UserMacros" />
|
||||||
|
<PropertyGroup Condition="'$(Configuration)'=='Debug'">
|
||||||
|
<LinkIncremental>true</LinkIncremental>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)'=='Release'">
|
||||||
|
<LinkIncremental>false</LinkIncremental>
|
||||||
|
</PropertyGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)'=='Debug'">
|
||||||
|
<ClCompile>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<WarningLevel>Level3</WarningLevel>
|
||||||
|
<Optimization>Disabled</Optimization>
|
||||||
|
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||||
|
<AdditionalDependencies>%(AdditionalDependencies)</AdditionalDependencies>
|
||||||
|
<AdditionalLibraryDirectories>%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)'=='Release'">
|
||||||
|
<ClCompile>
|
||||||
|
<WarningLevel>Level3</WarningLevel>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<Optimization>MaxSpeed</Optimization>
|
||||||
|
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||||
|
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||||
|
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>false</GenerateDebugInformation>
|
||||||
|
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||||
|
<ImportGroup Label="ExtensionTargets">
|
||||||
|
</ImportGroup>
|
||||||
|
</Project>
|
|
@ -0,0 +1,6 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||||
|
<ItemGroup>
|
||||||
|
<ClCompile Include="..\..\basic-tutorial-1.c" />
|
||||||
|
</ItemGroup>
|
||||||
|
</Project>
|
95
tutorials/vs2010/basic-tutorial-12/basic-tutorial-12.vcxproj
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
|
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||||
|
<ItemGroup Label="ProjectConfigurations">
|
||||||
|
<ProjectConfiguration Include="Debug|Win32">
|
||||||
|
<Configuration>Debug</Configuration>
|
||||||
|
<Platform>Win32</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Debug|x64">
|
||||||
|
<Configuration>Debug</Configuration>
|
||||||
|
<Platform>x64</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Release|Win32">
|
||||||
|
<Configuration>Release</Configuration>
|
||||||
|
<Platform>Win32</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
<ProjectConfiguration Include="Release|x64">
|
||||||
|
<Configuration>Release</Configuration>
|
||||||
|
<Platform>x64</Platform>
|
||||||
|
</ProjectConfiguration>
|
||||||
|
</ItemGroup>
|
||||||
|
<ItemGroup>
|
||||||
|
<ClCompile Include="..\..\basic-tutorial-12.c" />
|
||||||
|
</ItemGroup>
|
||||||
|
<PropertyGroup Label="Globals">
|
||||||
|
<Keyword>Win32Proj</Keyword>
|
||||||
|
<ProjectGuid>{A2E63C29-3375-4930-B7D3-2F23EC824EAF}</ProjectGuid>
|
||||||
|
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
|
||||||
|
</PropertyGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||||
|
<PropertyGroup Condition="'$(Configuration)'=='Debug'" Label="Configuration">
|
||||||
|
<ConfigurationType>Application</ConfigurationType>
|
||||||
|
<UseDebugLibraries>true</UseDebugLibraries>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)'=='Release'" Label="Configuration">
|
||||||
|
<ConfigurationType>Application</ConfigurationType>
|
||||||
|
<UseDebugLibraries>false</UseDebugLibraries>
|
||||||
|
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||||
|
<CharacterSet>Unicode</CharacterSet>
|
||||||
|
</PropertyGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||||
|
<ImportGroup Label="ExtensionSettings">
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Label="PropertySheets" Condition="'$(Platform)'=='Win32'">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
<Import Project="$(GSTREAMER_1_0_ROOT_X86)\share\vs\2010\libs\gstreamer-1.0.props" Condition="exists('$(GSTREAMER_1_0_ROOT_X86)\share\vs\2010\libs\gstreamer-1.0.props')" />
|
||||||
|
<Import Project="$(GSTREAMER_1_0_ROOT_X86)\share\vs\2010\msvc\x86.props" Condition="exists('$(GSTREAMER_1_0_ROOT_X86)\share\vs\2010\msvc\x86.props')" />
|
||||||
|
</ImportGroup>
|
||||||
|
<ImportGroup Label="PropertySheets" Condition="'$(Platform)'=='x64'">
|
||||||
|
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||||
|
<Import Project="$(GSTREAMER_1_0_ROOT_X86_64)\share\vs\2010\libs\gstreamer-1.0.props" Condition="exists('$(GSTREAMER_1_0_ROOT_X86_64)\share\vs\2010\libs\gstreamer-1.0.props')" />
|
||||||
|
<Import Project="$(GSTREAMER_1_0_ROOT_X86_64)\share\vs\2010\msvc\x86_64.props" Condition="exists('$(GSTREAMER_1_0_ROOT_X86_64)\share\vs\2010\msvc\x86_64.props')" />
|
||||||
|
</ImportGroup>
|
||||||
|
<PropertyGroup Label="UserMacros" />
|
||||||
|
<PropertyGroup Condition="'$(Configuration)'=='Debug'">
|
||||||
|
<LinkIncremental>true</LinkIncremental>
|
||||||
|
</PropertyGroup>
|
||||||
|
<PropertyGroup Condition="'$(Configuration)'=='Release'">
|
||||||
|
<LinkIncremental>false</LinkIncremental>
|
||||||
|
</PropertyGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)'=='Debug'">
|
||||||
|
<ClCompile>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<WarningLevel>Level3</WarningLevel>
|
||||||
|
<Optimization>Disabled</Optimization>
|
||||||
|
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||||
|
<AdditionalDependencies>%(AdditionalDependencies)</AdditionalDependencies>
|
||||||
|
<AdditionalLibraryDirectories>%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<ItemDefinitionGroup Condition="'$(Configuration)'=='Release'">
|
||||||
|
<ClCompile>
|
||||||
|
<WarningLevel>Level3</WarningLevel>
|
||||||
|
<PrecompiledHeader>
|
||||||
|
</PrecompiledHeader>
|
||||||
|
<Optimization>MaxSpeed</Optimization>
|
||||||
|
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||||
|
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||||
|
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||||
|
</ClCompile>
|
||||||
|
<Link>
|
||||||
|
<SubSystem>Console</SubSystem>
|
||||||
|
<GenerateDebugInformation>false</GenerateDebugInformation>
|
||||||
|
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||||
|
</Link>
|
||||||
|
</ItemDefinitionGroup>
|
||||||
|
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||||
|
<ImportGroup Label="ExtensionTargets">
|
||||||
|
</ImportGroup>
|
||||||
|
</Project>
|