Revamp tutorial 5 code to continue what we have been showing in all previous tutorials. Drop unused code and add goodies like media size reporting and seek throttling.

This commit is contained in:
Xavi Artigas 2012-11-05 10:36:01 +01:00
parent ac07b88df9
commit 2635e59fb4
4 changed files with 401 additions and 195 deletions

View file

@ -4,8 +4,9 @@
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include <gst/gst.h>
#include <pthread.h>
#include <gst/interfaces/xoverlay.h>
#include <gst/video/video.h>
#include <pthread.h>
GST_DEBUG_CATEGORY_STATIC (debug_category);
#define GST_CAT_DEFAULT debug_category
@ -22,33 +23,46 @@ GST_DEBUG_CATEGORY_STATIC (debug_category);
# define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField (env, thiz, fieldID, (jlong)(jint)data)
#endif
/* Do not allow seeks to be performed closer than this distance. It is visually useless, and will probably
* confuse some demuxers. */
#define SEEK_MIN_DELAY (500 * GST_MSECOND)
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
jobject app;
GstElement *pipeline;
GMainContext *context;
GMainLoop *main_loop;
ANativeWindow *native_window;
GstState state, target_state;
gint64 position;
gint64 duration;
gint64 desired_position;
GstClockTime last_seek_time;
gboolean initialized;
gboolean is_live;
jobject app; /* Application instance, used to call its methods. A global reference is kept. */
GstElement *pipeline; /* The running pipeline */
GMainContext *context; /* GLib context used to run the main loop */
GMainLoop *main_loop; /* GLib main loop */
gboolean initialized; /* To avoid informing the UI multiple times about the initialization */
ANativeWindow *native_window; /* The Android native window where video will be rendered */
GstState state; /* Current pipeline state */
GstState target_state; /* Desired pipeline state, to be set once buffering is complete */
gint64 duration; /* Cached clip duration */
gint64 desired_position; /* Position to seek to, once the pipeline is running */
GstClockTime last_seek_time; /* For seeking overflow prevention (throttling) */
gboolean is_live; /* Live streams do not use buffering */
} CustomData;
/* playbin2 flags */
typedef enum {
GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
} GstPlayFlags;
/* These global variables cache values which are not changing during execution */
static pthread_t gst_app_thread;
static pthread_key_t current_jni_env;
static JavaVM *java_vm;
static jfieldID custom_data_field_id;
static jmethodID set_message_method_id;
static jmethodID set_current_position_method_id;
static jmethodID set_current_state_method_id;
static jmethodID on_gstreamer_initialized_method_id;
static jmethodID on_media_size_changed_method_id;
/*
* Private methods
*/
/* Register this thread with the VM */
static JNIEnv *attach_current_thread (void) {
JNIEnv *env;
JavaVMAttachArgs args;
@ -66,11 +80,13 @@ static JNIEnv *attach_current_thread (void) {
return env;
}
/* Unregister this thread from the VM */
static void detach_current_thread (void *env) {
GST_DEBUG ("Detaching thread %p", g_thread_self ());
(*java_vm)->DetachCurrentThread (java_vm);
}
/* Retrieve the JNI environment for this thread */
static JNIEnv *get_jni_env (void) {
JNIEnv *env;
@ -82,6 +98,7 @@ static JNIEnv *get_jni_env (void) {
return env;
}
/* Change the content of the UI's TextView */
static void set_ui_message (const gchar *message, CustomData *data) {
JNIEnv *env = get_jni_env ();
GST_DEBUG ("Setting message to: %s", message);
@ -94,9 +111,9 @@ static void set_ui_message (const gchar *message, CustomData *data) {
(*env)->DeleteLocalRef (env, jmessage);
}
/* Tell the application what is the current position and clip duration */
static void set_current_ui_position (gint position, gint duration, CustomData *data) {
JNIEnv *env = get_jni_env ();
// GST_DEBUG ("Setting current position/duration to: %d / %d (ms)", position, duration);
(*env)->CallVoidMethod (env, data->app, set_current_position_method_id, position, duration);
if ((*env)->ExceptionCheck (env)) {
GST_ERROR ("Failed to call Java method");
@ -104,9 +121,12 @@ static void set_current_ui_position (gint position, gint duration, CustomData *d
}
}
/* If we have pipeline and it is running, query the current position and clip duration and inform
* the application */
static gboolean refresh_ui (CustomData *data) {
GstFormat fmt = GST_FORMAT_TIME;
gint64 current = -1;
gint64 position;
/* We do not want to update anything unless we have a working pipeline in the PAUSED or PLAYING state */
if (!data || !data->pipeline || data->state < GST_STATE_PAUSED)
@ -119,25 +139,19 @@ static gboolean refresh_ui (CustomData *data) {
}
}
if (gst_element_query_position (data->pipeline, &fmt, &data->position)) {
/* Java expects these values in milliseconds, and Gst provides nanoseconds */
set_current_ui_position (data->position / GST_MSECOND, data->duration / GST_MSECOND, data);
if (gst_element_query_position (data->pipeline, &fmt, &position)) {
/* Java expects these values in milliseconds, and GStreamer provides nanoseconds */
set_current_ui_position (position / GST_MSECOND, data->duration / GST_MSECOND, data);
}
return TRUE;
}
static void execute_seek (gint64 desired_position, CustomData *data);
static gboolean
delayed_seek_cb (CustomData *data)
{
GST_DEBUG ("Doing delayed seek %" GST_TIME_FORMAT, GST_TIME_ARGS (data->desired_position));
data->last_seek_time = GST_CLOCK_TIME_NONE;
execute_seek (data->desired_position, data);
return FALSE;
}
/* Forward declaration for the delayed seek callback */
static gboolean delayed_seek_cb (CustomData *data);
/* Perform seek, if we are not too close to the previous seek. Otherwise, schedule the seek for
* some time in the future. */
static void execute_seek (gint64 desired_position, CustomData *data) {
gboolean res;
gint64 diff;
if (desired_position == GST_CLOCK_TIME_NONE)
@ -145,27 +159,39 @@ static void execute_seek (gint64 desired_position, CustomData *data) {
diff = gst_util_get_timestamp () - data->last_seek_time;
if (GST_CLOCK_TIME_IS_VALID (data->last_seek_time) && diff < 500 * GST_MSECOND) {
if (GST_CLOCK_TIME_IS_VALID (data->last_seek_time) && diff < SEEK_MIN_DELAY) {
/* The previous seek was too close, delay this one */
GSource *timeout_source;
if (!GST_CLOCK_TIME_IS_VALID (data->desired_position)) {
timeout_source = g_timeout_source_new (diff / GST_MSECOND);
if (data->desired_position == GST_CLOCK_TIME_NONE) {
/* There was no previous seek scheduled. Setup a timer for some time in the future */
timeout_source = g_timeout_source_new ((SEEK_MIN_DELAY - diff) / GST_MSECOND);
g_source_set_callback (timeout_source, (GSourceFunc)delayed_seek_cb, data, NULL);
g_source_attach (timeout_source, data->context);
g_source_unref (timeout_source);
}
/* Update the desired seek position. If multiple petitions are received before it is time
* to perform a seek, only the last one is remembered. */
data->desired_position = desired_position;
GST_DEBUG ("Throttling seek to %" GST_TIME_FORMAT ", will be in %" GST_TIME_FORMAT,
GST_TIME_ARGS (desired_position), GST_TIME_ARGS (500 * GST_MSECOND - diff));
GST_TIME_ARGS (desired_position), GST_TIME_ARGS (SEEK_MIN_DELAY - diff));
} else {
GST_DEBUG ("Setting position to %lld milliseconds", desired_position / GST_MSECOND);
res = gst_element_seek_simple (data->pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, desired_position);
/* Perform the seek now */
GST_DEBUG ("Seeking to %" GST_TIME_FORMAT, GST_TIME_ARGS (desired_position));
data->last_seek_time = gst_util_get_timestamp ();
gst_element_seek_simple (data->pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, desired_position);
data->desired_position = GST_CLOCK_TIME_NONE;
GST_DEBUG ("Seek returned %d", res);
}
}
/* Delayed seek callback. This gets called by the timer setup in the above function. */
static gboolean delayed_seek_cb (CustomData *data) {
GST_DEBUG ("Doing delayed seek to %" GST_TIME_FORMAT, GST_TIME_ARGS (data->desired_position));
execute_seek (data->desired_position, data);
return FALSE;
}
/* Retrieve errors from the bus and show them on the UI */
static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GError *err;
gchar *debug_info;
@ -181,18 +207,20 @@ static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
gst_element_set_state (data->pipeline, GST_STATE_NULL);
}
/* Called when the End Of the Stream is reached. Just move to the beginning of the media and pause. */
static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
set_ui_message (GST_MESSAGE_TYPE_NAME (msg), data);
refresh_ui (data);
data->target_state = GST_STATE_PAUSED;
data->is_live = (gst_element_set_state (data->pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_NO_PREROLL);
execute_seek (0, data);
}
/* Called when the duration of the media changes. Just mark it as unknown, so we re-query it in the next UI refresh. */
static void duration_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
data->duration = GST_CLOCK_TIME_NONE;
}
/* Called when buffering messages are received. We inform the UI about the current buffering level and
* keep the pipeline paused until 100% buffering is reached. At that point, set the desired state. */
static void buffering_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
gint percent;
@ -201,18 +229,18 @@ static void buffering_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
gst_message_parse_buffering (msg, &percent);
if (percent < 100 && data->target_state >= GST_STATE_PAUSED) {
gchar * message_string = g_strdup_printf ("Buffering %d %%", percent);
gchar * message_string = g_strdup_printf ("Buffering %d%%", percent);
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
set_ui_message (message_string, data);
g_free (message_string);
} else if (data->target_state >= GST_STATE_PLAYING) {
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
set_ui_message ("PLAYING", data);
} else if (data->target_state >= GST_STATE_PAUSED) {
set_ui_message ("PAUSED", data);
set_ui_message ("Buffering complete", data);
}
}
/* Called when the clock is lost */
static void clock_lost_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
if (data->target_state >= GST_STATE_PLAYING) {
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
@ -220,30 +248,73 @@ static void clock_lost_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
}
}
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
/* Retrieve the video sink's Caps and tell the application about the media size */
static void check_media_size (CustomData *data) {
JNIEnv *env = get_jni_env ();
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
/* Only pay attention to messages coming from the pipeline, not its children */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->pipeline)) {
data->state = new_state;
if (data->state >= GST_STATE_PAUSED && GST_CLOCK_TIME_IS_VALID (data->desired_position))
execute_seek (data->desired_position, data);
GST_DEBUG ("State changed to %s, notifying application", gst_element_state_get_name(new_state));
(*env)->CallVoidMethod (env, data->app, set_current_state_method_id, new_state);
GstElement *video_sink;
GstPad *video_sink_pad;
GstCaps *caps;
GstVideoFormat fmt;
int width;
int height;
/* Retrieve the Caps at the entrance of the video sink */
g_object_get (data->pipeline, "video-sink", &video_sink, NULL);
video_sink_pad = gst_element_get_static_pad (video_sink, "sink");
caps = gst_pad_get_negotiated_caps (video_sink_pad);
if (gst_video_format_parse_caps(caps, &fmt, &width, &height)) {
int par_n, par_d;
if (gst_video_parse_caps_pixel_aspect_ratio (caps, &par_n, &par_d)) {
width = width * par_n / par_d;
}
GST_DEBUG ("Media size is %dx%d, notifying application", width, height);
(*env)->CallVoidMethod (env, data->app, on_media_size_changed_method_id, (jint)width, (jint)height);
if ((*env)->ExceptionCheck (env)) {
GST_ERROR ("Failed to call Java method");
(*env)->ExceptionClear (env);
}
}
gst_caps_unref(caps);
gst_object_unref (video_sink_pad);
gst_object_unref(video_sink);
}
/* Notify UI about pipeline state changes */
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
/* Only pay attention to messages coming from the pipeline, not its children */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->pipeline)) {
data->state = new_state;
gchar *message = g_strdup_printf("State changed to %s", gst_element_state_get_name(new_state));
set_ui_message(message, data);
g_free (message);
/* The Ready to Paused state change is particularly interesting: */
if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED) {
/* By now the sink already knows the media size */
check_media_size(data);
/* If there was a scheduled seek, perform it now that we have moved to the Paused state */
if (GST_CLOCK_TIME_IS_VALID (data->desired_position))
execute_seek (data->desired_position, data);
}
}
}
/* Check if all conditions are met to report GStreamer as initialized.
* These conditions will change depending on the application */
static void check_initialization_complete (CustomData *data) {
JNIEnv *env = get_jni_env ();
/* Check if all conditions are met to report GStreamer as initialized.
* These conditions will change depending on the application */
if (!data->initialized && data->native_window && data->main_loop) {
GST_DEBUG ("Initialization complete, notifying application. native_window:%p main_loop:%p", data->native_window,data->main_loop);
GST_DEBUG ("Initialization complete, notifying application. native_window:%p main_loop:%p", data->native_window, data->main_loop);
/* The main loop is running and we received a native window, inform the sink about it */
gst_x_overlay_set_window_handle (GST_X_OVERLAY (data->pipeline), (guintptr)data->native_window);
(*env)->CallVoidMethod (env, data->app, on_gstreamer_initialized_method_id);
if ((*env)->ExceptionCheck (env)) {
GST_ERROR ("Failed to call Java method");
@ -253,30 +324,40 @@ static void check_initialization_complete (CustomData *data) {
}
}
/* Main method for the native code. This is executed on its own thread. */
static void *app_function (void *userdata) {
JavaVMAttachArgs args;
GstBus *bus;
GstMessage *msg;
CustomData *data = (CustomData *)userdata;
GSource *timeout_source;
GSource *bus_source;
GError *error = NULL;
guint flags;
GST_DEBUG ("Creating pipeline in CustomData at %p", data);
/* create our own GLib Main Context, so we do not interfere with other libraries using GLib */
/* Create our own GLib Main Context and make it the default one */
data->context = g_main_context_new ();
g_main_context_push_thread_default(data->context);
data->pipeline = gst_element_factory_make ("playbin2", NULL);
/* Build pipeline */
data->pipeline = gst_parse_launch("playbin2", &error);
if (error) {
gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
g_clear_error (&error);
set_ui_message(message, data);
g_free (message);
return NULL;
}
/* Disable subtitles */
g_object_get (data->pipeline, "flags", &flags, NULL);
/* Disable subtitles for now */
flags &= ~0x00000004;
flags &= ~GST_PLAY_FLAG_TEXT;
g_object_set (data->pipeline, "flags", flags, NULL);
if (data->native_window) {
GST_DEBUG ("Native window already received, notifying the pipeline about it.");
gst_x_overlay_set_window_handle (GST_X_OVERLAY (data->pipeline), (guintptr)data->native_window);
}
/* Set the pipeline to READY, so it can already accept a window handle, if we have one */
data->target_state = GST_STATE_READY;
gst_element_set_state(data->pipeline, GST_STATE_READY);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (data->pipeline);
@ -308,6 +389,7 @@ static void *app_function (void *userdata) {
data->main_loop = NULL;
/* Free resources */
g_main_context_pop_thread_default(data->context);
g_main_context_unref (data->context);
data->target_state = GST_STATE_NULL;
gst_element_set_state (data->pipeline, GST_STATE_NULL);
@ -319,9 +401,10 @@ static void *app_function (void *userdata) {
/*
* Java Bindings
*/
void gst_native_init (JNIEnv* env, jobject thiz) {
/* Instruct the native code to create its internal data structure, pipeline and thread */
static void gst_native_init (JNIEnv* env, jobject thiz) {
CustomData *data = g_new0 (CustomData, 1);
data->duration = GST_CLOCK_TIME_NONE;
data->desired_position = GST_CLOCK_TIME_NONE;
data->last_seek_time = GST_CLOCK_TIME_NONE;
SET_CUSTOM_DATA (env, thiz, custom_data_field_id, data);
@ -333,7 +416,8 @@ void gst_native_init (JNIEnv* env, jobject thiz) {
pthread_create (&gst_app_thread, NULL, &app_function, data);
}
void gst_native_finalize (JNIEnv* env, jobject thiz) {
/* Quit the main loop, remove the native thread and free resources */
static void gst_native_finalize (JNIEnv* env, jobject thiz) {
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
if (!data) return;
GST_DEBUG ("Quitting main loop...");
@ -348,6 +432,7 @@ void gst_native_finalize (JNIEnv* env, jobject thiz) {
GST_DEBUG ("Done finalizing");
}
/* Set playbin2's URI */
void gst_native_set_uri (JNIEnv* env, jobject thiz, jstring uri) {
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
if (!data || !data->pipeline) return;
@ -361,7 +446,8 @@ void gst_native_set_uri (JNIEnv* env, jobject thiz, jstring uri) {
data->is_live = (gst_element_set_state (data->pipeline, data->target_state) == GST_STATE_CHANGE_NO_PREROLL);
}
void gst_native_play (JNIEnv* env, jobject thiz) {
/* Set pipeline to PLAYING state */
static void gst_native_play (JNIEnv* env, jobject thiz) {
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
if (!data) return;
GST_DEBUG ("Setting state to PLAYING");
@ -369,7 +455,8 @@ void gst_native_play (JNIEnv* env, jobject thiz) {
data->is_live = (gst_element_set_state (data->pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_NO_PREROLL);
}
void gst_native_pause (JNIEnv* env, jobject thiz) {
/* Set pipeline to PAUSED state */
static void gst_native_pause (JNIEnv* env, jobject thiz) {
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
if (!data) return;
GST_DEBUG ("Setting state to PAUSED");
@ -377,76 +464,79 @@ void gst_native_pause (JNIEnv* env, jobject thiz) {
data->is_live = (gst_element_set_state (data->pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_NO_PREROLL);
}
/* Instruct the pipeline to seek to a different position */
void gst_native_set_position (JNIEnv* env, jobject thiz, int milliseconds) {
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
if (!data) return;
gint64 desired_position = (gint64)(milliseconds * GST_MSECOND);
if (data->state == GST_STATE_PLAYING || data->state == GST_STATE_PAUSED) {
if (data->state >= GST_STATE_PAUSED) {
execute_seek(desired_position, data);
} else {
GST_DEBUG ("Scheduling seek to %d milliseconds for later", milliseconds);
GST_DEBUG ("Scheduling seek to %" GST_TIME_FORMAT " for later", GST_TIME_ARGS (desired_position));
data->desired_position = desired_position;
}
}
jboolean gst_class_init (JNIEnv* env, jclass klass) {
/* Static class initializer: retrieve method and field IDs */
static jboolean gst_native_class_init (JNIEnv* env, jclass klass) {
custom_data_field_id = (*env)->GetFieldID (env, klass, "native_custom_data", "J");
GST_DEBUG ("The FieldID for the native_custom_data field is %p", custom_data_field_id);
set_message_method_id = (*env)->GetMethodID (env, klass, "setMessage", "(Ljava/lang/String;)V");
GST_DEBUG ("The MethodID for the setMessage method is %p", set_message_method_id);
set_current_position_method_id = (*env)->GetMethodID (env, klass, "setCurrentPosition", "(II)V");
GST_DEBUG ("The MethodID for the setCurrentPosition method is %p", set_current_position_method_id);
on_gstreamer_initialized_method_id = (*env)->GetMethodID (env, klass, "onGStreamerInitialized", "()V");
GST_DEBUG ("The MethodID for the onGStreamerInitialized method is %p", on_gstreamer_initialized_method_id);
set_current_state_method_id = (*env)->GetMethodID (env, klass, "setCurrentState", "(I)V");
GST_DEBUG ("The MethodID for the setCurrentState method is %p", set_current_state_method_id);
on_media_size_changed_method_id = (*env)->GetMethodID (env, klass, "onMediaSizeChanged", "(II)V");
if (!custom_data_field_id || !set_message_method_id || !set_current_position_method_id ||
!on_gstreamer_initialized_method_id || !set_current_state_method_id) {
GST_ERROR ("The calling class does not implement all necessary interface methods");
if (!custom_data_field_id || !set_message_method_id || !on_gstreamer_initialized_method_id ||
!on_media_size_changed_method_id || !set_current_position_method_id) {
/* We emit this message through the Android log instead of the GStreamer log because the later
* has not been initialized yet.
*/
__android_log_print (ANDROID_LOG_ERROR, "tutorial-4", "The calling class does not implement all necessary interface methods");
return JNI_FALSE;
}
return JNI_TRUE;
}
void gst_native_surface_init (JNIEnv *env, jobject thiz, jobject surface) {
static void gst_native_surface_init (JNIEnv *env, jobject thiz, jobject surface) {
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
if (!data) return;
GST_DEBUG ("Received surface %p", surface);
if (data->native_window) {
GST_DEBUG ("Releasing previous native window %p", data->native_window);
ANativeWindow_release (data->native_window);
}
data->native_window = ANativeWindow_fromSurface(env, surface);
GST_DEBUG ("Got Native Window %p", data->native_window);
ANativeWindow *new_native_window = ANativeWindow_fromSurface(env, surface);
GST_DEBUG ("Received surface %p (native window %p)", surface, new_native_window);
if (data->native_window) {
ANativeWindow_release (data->native_window);
if (data->native_window == new_native_window) {
GST_DEBUG ("New native window is the same as the previous one", data->native_window);
if (data->pipeline) {
GST_DEBUG ("Pipeline already created, notifying the it about the native window.");
gst_x_overlay_set_window_handle (GST_X_OVERLAY (data->pipeline), (guintptr)data->native_window);
} else {
GST_DEBUG ("Pipeline not created yet, it will later be notified about the native window.");
gst_x_overlay_expose(GST_X_OVERLAY (data->pipeline));
gst_x_overlay_expose(GST_X_OVERLAY (data->pipeline));
}
return;
} else {
GST_DEBUG ("Released previous native window %p", data->native_window);
data->initialized = FALSE;
}
}
data->native_window = new_native_window;
check_initialization_complete (data);
}
void gst_native_surface_finalize (JNIEnv *env, jobject thiz) {
static void gst_native_surface_finalize (JNIEnv *env, jobject thiz) {
CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
if (!data) {
GST_WARNING ("Received surface finalize but there is no CustomData. Ignoring.");
return;
}
if (!data) return;
GST_DEBUG ("Releasing Native Window %p", data->native_window);
if (data->pipeline) {
gst_x_overlay_set_window_handle (GST_X_OVERLAY (data->pipeline), (guintptr)NULL);
gst_element_set_state (data->pipeline, GST_STATE_NULL);
gst_element_set_state (data->pipeline, GST_STATE_READY);
}
ANativeWindow_release (data->native_window);
data->native_window = NULL;
data->initialized = FALSE;
}
/* List of implemented native methods */
static JNINativeMethod native_methods[] = {
{ "nativeInit", "()V", (void *) gst_native_init},
{ "nativeFinalize", "()V", (void *) gst_native_finalize},
@ -454,11 +544,12 @@ static JNINativeMethod native_methods[] = {
{ "nativePlay", "()V", (void *) gst_native_play},
{ "nativePause", "()V", (void *) gst_native_pause},
{ "nativeSetPosition", "(I)V", (void*) gst_native_set_position},
{ "classInit", "()Z", (void *) gst_class_init},
{ "nativeSurfaceInit", "(Ljava/lang/Object;)V", (void *) gst_native_surface_init},
{ "nativeSurfaceFinalize", "()V", (void *) gst_native_surface_finalize}
{ "nativeSurfaceFinalize", "()V", (void *) gst_native_surface_finalize},
{ "nativeClassInit", "()Z", (void *) gst_native_class_init}
};
/* Library initializer */
jint JNI_OnLoad(JavaVM *vm, void *reserved) {
JNIEnv *env = NULL;

View file

@ -2,16 +2,21 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:gravity="center_vertical"
android:orientation="vertical" >
<TextView
android:id="@+id/textview_message"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
android:layout_height="wrap_content"
android:layout_marginBottom="16dip"
android:gravity="center_horizontal" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginBottom="16dip"
android:gravity="center_horizontal"
android:orientation="horizontal" >
<ImageButton
@ -37,6 +42,14 @@
android:contentDescription="@string/button_select"
android:src="@android:drawable/ic_media_next"
android:text="@string/button_select" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginBottom="16dip"
android:gravity="center_horizontal"
android:orientation="horizontal" >
<TextView
android:id="@+id/textview_time"
@ -55,9 +68,10 @@
android:indeterminate="false" />
</LinearLayout>
<SurfaceView
<com.gst_sdk_tutorials.tutorial_5.GStreamerSurfaceView
android:id="@+id/surface_video"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
android:layout_height="wrap_content"
android:layout_gravity="center_vertical|center_horizontal" />
</LinearLayout>

View file

@ -0,0 +1,85 @@
package com.gst_sdk_tutorials.tutorial_5;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
// A simple SurfaceView whose width and height can be set from the outside
public class GStreamerSurfaceView extends SurfaceView {
public int media_width = 320;
public int media_height = 240;
// Mandatory constructors, they do not do much
public GStreamerSurfaceView(Context context, AttributeSet attrs,
int defStyle) {
super(context, attrs, defStyle);
}
public GStreamerSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public GStreamerSurfaceView (Context context) {
super(context);
}
// Called by the layout manager to find out our size and give us some rules.
// We will try to maximize our size, and preserve the media's aspect ratio if
// we are given the freedom to do so.
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int width = 0, height = 0;
int wmode = View.MeasureSpec.getMode(widthMeasureSpec);
int hmode = View.MeasureSpec.getMode(heightMeasureSpec);
int wsize = View.MeasureSpec.getSize(widthMeasureSpec);
int hsize = View.MeasureSpec.getSize(heightMeasureSpec);
Log.i ("GStreamer", "onMeasure called with " + media_width + "x" + media_height);
// Obey width rules
switch (wmode) {
case View.MeasureSpec.AT_MOST:
if (hmode == View.MeasureSpec.EXACTLY) {
width = Math.min(hsize * media_width / media_height, wsize);
break;
}
case View.MeasureSpec.EXACTLY:
width = wsize;
break;
case View.MeasureSpec.UNSPECIFIED:
width = media_width;
}
// Obey height rules
switch (hmode) {
case View.MeasureSpec.AT_MOST:
if (wmode == View.MeasureSpec.EXACTLY) {
height = Math.min(wsize * media_height / media_width, hsize);
break;
}
case View.MeasureSpec.EXACTLY:
height = hsize;
break;
case View.MeasureSpec.UNSPECIFIED:
height = media_height;
}
// Finally, calculate best size when both axis are free
if (hmode == View.MeasureSpec.AT_MOST && wmode == View.MeasureSpec.AT_MOST) {
int correct_height = width * media_height / media_width;
int correct_width = height * media_width / media_height;
if (correct_height < height)
height = correct_height;
else
width = correct_width;
}
// Obey minimum size
width = Math.max (getSuggestedMinimumWidth(), width);
height = Math.max (getSuggestedMinimumHeight(), height);
setMeasuredDimension(width, height);
}
}

View file

@ -4,7 +4,6 @@ import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.TimeZone;
import com.gstreamer.GStreamer;
import com.lamerman.FileDialog;
import com.lamerman.SelectionMode;
@ -25,37 +24,40 @@ import android.widget.SeekBar.OnSeekBarChangeListener;
import android.widget.TextView;
import android.widget.Toast;
import com.gstreamer.GStreamer;
public class Tutorial5 extends Activity implements SurfaceHolder.Callback, OnSeekBarChangeListener {
private native void nativeInit();
private native void nativeFinalize();
private native void nativeSetUri(String uri);
private native void nativePlay();
private native void nativePause();
private native void nativeSetPosition(int milliseconds);
private static native boolean classInit();
private native void nativeSurfaceInit(Object surface);
private native void nativeSurfaceFinalize();
private long native_custom_data;
private native void nativeInit(); // Initialize native code, build pipeline, etc
private native void nativeFinalize(); // Destroy pipeline and shutdown native code
private native void nativeSetUri(String uri); // Set the URI of the media to play
private native void nativePlay(); // Set pipeline to PLAYING
private native void nativeSetPosition(int milliseconds); // Seek to the indicated position, in milliseconds
private native void nativePause(); // Set pipeline to PAUSED
private static native boolean nativeClassInit(); // Initialize native class: cache Method IDs for callbacks
private native void nativeSurfaceInit(Object surface); // A new surface is available
private native void nativeSurfaceFinalize(); // Surface about to be destroyed
private long native_custom_data; // Native code will use this to keep private data
private boolean is_playing_desired;
private int position;
private int duration;
private boolean is_local_media;
private int desired_position;
private boolean is_playing_desired; // Whether the user asked to go to PLAYING
private int position; // Current position, reported by native code
private int duration; // Current clip duration, reported by native code
private boolean is_local_media; // Whether this clip is stored locally or is being streamed
private int desired_position; // Position where the users wants to seek to
private String mediaUri; // URI of the clip being played
private Bundle initialization_data;
private final String defaultMediaUri = "http://docs.gstreamer.com/media/sintel_trailer-368p.ogv";
static private final int PICK_FILE_CODE = 1;
private PowerManager.WakeLock wake_lock;
private String mediaUri = "http://docs.gstreamer.com/media/sintel_trailer-480p.ogv";
static private final int PICK_FILE_CODE = 1;
/* Called when the activity is first created. */
// Called when the activity is first created.
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
// Initialize GStreamer and warn if it fails
try {
GStreamer.init(this);
} catch (Exception e) {
@ -106,16 +108,33 @@ public class Tutorial5 extends Activity implements SurfaceHolder.Callback, OnSee
SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
sb.setOnSeekBarChangeListener(this);
initialization_data = savedInstanceState;
is_local_media = false;
// Retrieve our previous state, or initialize it to default values
if (savedInstanceState != null) {
is_playing_desired = savedInstanceState.getBoolean("playing");
position = savedInstanceState.getInt("position");
duration = savedInstanceState.getInt("duration");
mediaUri = savedInstanceState.getString("mediaUri");
Log.i ("GStreamer", "Activity created with saved state:");
} else {
is_playing_desired = false;
position = duration = 0;
mediaUri = defaultMediaUri;
Log.i ("GStreamer", "Activity created with no saved state:");
}
is_local_media = false;
Log.i ("GStreamer", " playing:" + is_playing_desired + " position:" + position +
" duration: " + duration + " uri: " + mediaUri);
// Start with disabled buttons, until native code is initialized
this.findViewById(R.id.button_play).setEnabled(false);
this.findViewById(R.id.button_stop).setEnabled(false);
nativeInit();
}
protected void onSaveInstanceState (Bundle outState) {
Log.d ("GStreamer", "Saving state, playing:" + is_playing_desired + " position:" + position + " uri: " + mediaUri);
Log.d ("GStreamer", "Saving state, playing:" + is_playing_desired + " position:" + position +
" duration: " + duration + " uri: " + mediaUri);
outState.putBoolean("playing", is_playing_desired);
outState.putInt("position", position);
outState.putInt("duration", duration);
@ -129,7 +148,7 @@ public class Tutorial5 extends Activity implements SurfaceHolder.Callback, OnSee
super.onDestroy();
}
/* Called from native code */
// Called from native code. This sets the content of the TextView from the UI thread.
private void setMessage(final String message) {
final TextView tv = (TextView) this.findViewById(R.id.textview_message);
runOnUiThread (new Runnable() {
@ -139,27 +158,21 @@ public class Tutorial5 extends Activity implements SurfaceHolder.Callback, OnSee
});
}
// Set the URI to play, and record whether it is a local or remote file
private void setMediaUri() {
nativeSetUri (mediaUri);
if (mediaUri.startsWith("file://")) is_local_media = true;
is_local_media = mediaUri.startsWith("file://");
}
/* Called from native code */
// Called from native code. Native code calls this once it has created its pipeline and
// the main loop is running, so it is ready to accept commands.
private void onGStreamerInitialized () {
if (initialization_data != null) {
is_playing_desired = initialization_data.getBoolean("playing");
int milliseconds = initialization_data.getInt("position");
Log.i ("GStreamer", "Restoring state, playing:" + is_playing_desired + " position:" + milliseconds + " ms.");
mediaUri = initialization_data.getString ("mediaUri");
/* Actually, move to one millisecond in the future. Otherwise, due to rounding errors between the
* milliseconds used here and the nanoseconds used by GStreamer, we would be jumping a bit behind
* where we were before. This, combined with seeking to keyframe positions, would skip one keyframe
* backwards on each iteration.
*/
nativeSetPosition(milliseconds + 1);
}
Log.i ("GStreamer", "GStreamer initialized:");
Log.i ("GStreamer", " playing:" + is_playing_desired + " position:" + position + " uri: " + mediaUri);
// Restore previous playing state
setMediaUri ();
nativeSetPosition (position);
if (is_playing_desired) {
nativePlay();
wake_lock.acquire();
@ -167,11 +180,19 @@ public class Tutorial5 extends Activity implements SurfaceHolder.Callback, OnSee
nativePause();
wake_lock.release();
}
// Re-enable buttons, now that GStreamer is initialized
final Activity activity = this;
runOnUiThread(new Runnable() {
public void run() {
activity.findViewById(R.id.button_play).setEnabled(true);
activity.findViewById(R.id.button_stop).setEnabled(true);
}
});
}
/* The text widget acts as an slave for the seek bar, so it reflects what the seek bar shows, whether
* it is an actual pipeline position or the position the user is currently dragging to.
*/
// The text widget acts as an slave for the seek bar, so it reflects what the seek bar shows, whether
// it is an actual pipeline position or the position the user is currently dragging to.
private void updateTimeWidget () {
final TextView tv = (TextView) this.findViewById(R.id.textview_time);
final SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
@ -183,11 +204,11 @@ public class Tutorial5 extends Activity implements SurfaceHolder.Callback, OnSee
tv.setText(message);
}
/* Called from native code */
// Called from native code
private void setCurrentPosition(final int position, final int duration) {
final SeekBar sb = (SeekBar) this.findViewById(R.id.seek_bar);
/* Ignore position messages from the pipeline if the seek bar is being dragged */
// Ignore position messages from the pipeline if the seek bar is being dragged
if (sb.isPressed()) return;
runOnUiThread (new Runnable() {
@ -201,29 +222,10 @@ public class Tutorial5 extends Activity implements SurfaceHolder.Callback, OnSee
this.duration = duration;
}
/* Called from native code */
private void setCurrentState (int state) {
Log.d ("GStreamer", "State has changed to " + state);
switch (state) {
case 1:
setMessage ("NULL");
break;
case 2:
setMessage ("READY");
break;
case 3:
setMessage ("PAUSED");
break;
case 4:
setMessage ("PLAYING");
break;
}
}
static {
System.loadLibrary("gstreamer_android");
System.loadLibrary("tutorial-5");
classInit();
nativeClassInit();
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
@ -242,24 +244,38 @@ public class Tutorial5 extends Activity implements SurfaceHolder.Callback, OnSee
nativeSurfaceFinalize ();
}
// Called from native code when the size of the media changes or is first detected.
// Inform the video surface about the new size and recalculate the layout.
private void onMediaSizeChanged (int width, int height) {
Log.i ("GStreamer", "Media size changed to " + width + "x" + height);
final GStreamerSurfaceView gsv = (GStreamerSurfaceView) this.findViewById(R.id.surface_video);
gsv.media_width = width;
gsv.media_height = height;
runOnUiThread(new Runnable() {
public void run() {
gsv.requestLayout();
}
});
}
// The Seek Bar thumb has moved, either because the user dragged it or we have called setProgress()
public void onProgressChanged(SeekBar sb, int progress, boolean fromUser) {
if (fromUser == false) return;
desired_position = progress;
/* If this is a local file, allow scrub seeking, this is, seek soon as the slider
* is moved.
*/
// If this is a local file, allow scrub seeking, this is, seek as soon as the slider is moved.
if (is_local_media) nativeSetPosition(desired_position);
updateTimeWidget();
}
// The user started dragging the Seek Bar thumb
public void onStartTrackingTouch(SeekBar sb) {
nativePause();
}
// The user released the Seek Bar thumb
public void onStopTrackingTouch(SeekBar sb) {
/* If this is a remote file, scrub seeking is probably not going to work smoothly enough.
* Therefore, perform only the seek when the slider is released.
*/
// If this is a remote file, scrub seeking is probably not going to work smoothly enough.
// Therefore, perform only the seek when the slider is released.
if (!is_local_media) nativeSetPosition(desired_position);
if (is_playing_desired) nativePlay();
}