2004-12-15 17:32:49 +00:00
|
|
|
<chapter id="chapter-queryevents">
|
|
|
|
<title>Position tracking and seeking</title>
|
|
|
|
|
|
|
|
<para>
|
|
|
|
So far, we've looked at how to create a pipeline to do media processing
|
2005-06-29 09:25:51 +00:00
|
|
|
and how to make it run. Most application developers will be interested
|
|
|
|
in providing feedback to the user on media progress. Media players, for
|
|
|
|
example, will want to show a slider showing the progress in the song,
|
|
|
|
and usually also a label indicating stream length. Transcoding
|
|
|
|
applications will want to show a progress bar on how much percent of
|
|
|
|
the task is done. &GStreamer; has built-in support for doing all this
|
|
|
|
using a concept known as <emphasis>querying</emphasis>. Since seeking
|
|
|
|
is very similar, it will be discussed here as well. Seeking is done
|
|
|
|
using the concept of <emphasis>events</emphasis>.
|
2004-12-15 17:32:49 +00:00
|
|
|
</para>
|
|
|
|
|
|
|
|
<sect1 id="section-querying">
|
|
|
|
<title>Querying: getting the position or length of a stream</title>
|
|
|
|
|
|
|
|
<para>
|
|
|
|
Querying is defined as requesting a specific stream-property related
|
|
|
|
to progress tracking. This includes getting the length of a stream (if
|
|
|
|
available) or getting the current position. Those stream properties
|
|
|
|
can be retrieved in various formats such as time, audio samples, video
|
2005-06-29 09:25:51 +00:00
|
|
|
frames or bytes. The function most commonly used for this is
|
|
|
|
<function>gst_element_query ()</function>, although some convenience
|
|
|
|
wrappers are provided as well (such as
|
|
|
|
<function>gst_element_query_position ()</function>). You can generally
|
|
|
|
query the pipeline directly, it'll figure out the internal details
|
|
|
|
for you, like which element to query.
|
2004-12-15 17:32:49 +00:00
|
|
|
</para>
|
|
|
|
|
|
|
|
<para>
|
2005-06-29 09:25:51 +00:00
|
|
|
Internally, queries will be sent to the sinks, and
|
|
|
|
<quote>dispatched</quote> backwards until one element can handle it;
|
|
|
|
that result will be sent back to the function caller. Usually, that
|
|
|
|
is the demuxer, although with live sources (from a webcam), it is the
|
|
|
|
source itself.
|
2004-12-15 17:32:49 +00:00
|
|
|
</para>
|
|
|
|
|
|
|
|
<programlisting><!-- example-begin query.c a -->
|
|
|
|
#include <gst/gst.h>
|
2005-06-29 09:25:51 +00:00
|
|
|
<!-- example-end query.c a -->
|
|
|
|
<!-- example-begin query.c b --><!--
|
|
|
|
static gboolean
|
|
|
|
my_bus_callback (GstBus *bus,
|
|
|
|
GstMessage *message,
|
|
|
|
gpointer data)
|
|
|
|
{
|
|
|
|
GMainLoop *loop = data;
|
|
|
|
|
|
|
|
switch (GST_MESSAGE_TYPE (message)) {
|
|
|
|
case GST_MESSAGE_ERROR: {
|
|
|
|
GError *err;
|
|
|
|
gchar *debug;
|
|
|
|
|
|
|
|
gst_message_parse_error (message, &err, &debug);
|
|
|
|
g_print ("Error: %s\n", err->message);
|
|
|
|
g_error_free (err);
|
|
|
|
g_free (debug);
|
|
|
|
|
|
|
|
g_main_loop_quit (loop);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_MESSAGE_EOS:
|
|
|
|
/* end-of-stream */
|
|
|
|
g_main_loop_quit (loop);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* remove from queue */
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
--><!-- example-end query.c b -->
|
|
|
|
<!-- example-begin query.c c -->
|
|
|
|
static gboolean
|
|
|
|
cb_print_position (GstElement *pipeline)
|
|
|
|
{
|
|
|
|
GstFormat fmt = GST_FORMAT_TIME;
|
|
|
|
gint64 pos, len;
|
|
|
|
|
|
|
|
if (gst_element_query_position (pipeline, &fmt, &pos, &len)) {
|
|
|
|
g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
|
|
|
|
GST_TIME_ARGS (pos), GST_TIME_ARGS (len));
|
|
|
|
}
|
|
|
|
|
|
|
|
/* call me again */
|
|
|
|
return TRUE;
|
|
|
|
}
|
2004-12-15 17:32:49 +00:00
|
|
|
|
|
|
|
gint
|
|
|
|
main (gint argc,
|
|
|
|
gchar *argv[])
|
|
|
|
{
|
2005-06-29 09:25:51 +00:00
|
|
|
GstElement *pipeline;
|
|
|
|
<!-- example-end query.c c -->
|
|
|
|
[..]<!-- example-begin query.c d --><!--
|
|
|
|
GMainLoop *loop;
|
2004-12-15 17:32:49 +00:00
|
|
|
gchar *l;
|
|
|
|
|
|
|
|
/* init */
|
|
|
|
gst_init (&argc, &argv);
|
|
|
|
|
|
|
|
/* args */
|
|
|
|
if (argc != 2) {
|
|
|
|
g_print ("Usage: %s <filename>\n", argv[0]);
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* build pipeline, the easy way */
|
|
|
|
l = g_strdup_printf ("filesrc location=\"%s\" ! oggdemux ! vorbisdec ! "
|
2005-06-29 09:25:51 +00:00
|
|
|
"audioconvert ! audioscale ! alsasink",
|
2004-12-15 17:32:49 +00:00
|
|
|
argv[1]);
|
|
|
|
pipeline = gst_parse_launch (l, NULL);
|
|
|
|
g_free (l);
|
2005-06-29 09:25:51 +00:00
|
|
|
gst_bus_add_watch (gst_pipeline_get_bus (GST_PIPELINE (pipeline)),
|
|
|
|
my_bus_callback, NULL);
|
2004-12-15 17:32:49 +00:00
|
|
|
|
|
|
|
/* play */
|
|
|
|
gst_element_set_state (pipeline, GST_STATE_PLAYING);
|
2005-06-29 09:25:51 +00:00
|
|
|
loop = g_main_loop_new (NULL, FALSE);
|
|
|
|
--><!-- example-end query.c d -->
|
|
|
|
<!-- example-begin query.c e -->
|
2004-12-15 17:32:49 +00:00
|
|
|
/* run pipeline */
|
2005-06-29 09:25:51 +00:00
|
|
|
g_timeout_add (200, (GSourceFunc) cb_print_position, pipeline);
|
|
|
|
g_main_loop_run (loop);
|
|
|
|
<!-- example-end query.c e -->
|
|
|
|
[..]<!-- example-begin query.c f --><!--
|
2004-12-15 17:32:49 +00:00
|
|
|
/* clean up */
|
|
|
|
gst_element_set_state (pipeline, GST_STATE_NULL);
|
|
|
|
gst_object_unref (GST_OBJECT (pipeline));
|
|
|
|
|
|
|
|
return 0;
|
2005-06-29 09:25:51 +00:00
|
|
|
--><!-- example-end query.c f -->
|
|
|
|
<!-- example-begin query.c g -->
|
2004-12-15 17:32:49 +00:00
|
|
|
}
|
2005-06-29 09:25:51 +00:00
|
|
|
<!-- example-end query.c g --></programlisting>
|
2004-12-15 17:32:49 +00:00
|
|
|
</sect1>
|
|
|
|
|
|
|
|
<sect1 id="section-eventsseek">
|
|
|
|
<title>Events: seeking (and more)</title>
|
|
|
|
|
|
|
|
<para>
|
|
|
|
Events work in a very similar way as queries. Dispatching, for
|
|
|
|
example, works exactly the same for events (and also has the same
|
2005-06-29 09:25:51 +00:00
|
|
|
limitations), and they can similarly be sent to the toplevel pipeline
|
|
|
|
and it will figure out everything for you. Although there are more
|
|
|
|
ways in which applications and elements can interact using events,
|
|
|
|
we will only focus on seeking here. This is done using the seek-event.
|
|
|
|
A seek-event contains a seeking offset, a seek method (which indicates
|
|
|
|
relative to what the offset was given), a seek format (which is the
|
|
|
|
unit of the offset, e.g. time, audio samples, video frames or bytes)
|
|
|
|
and optionally a set of seeking-related flags (e.g. whether internal
|
|
|
|
buffers should be flushed). The behaviour of a seek is also wrapped
|
|
|
|
in the function <function>gst_element_seek ()</function>.
|
2004-12-15 17:32:49 +00:00
|
|
|
</para>
|
|
|
|
|
|
|
|
<programlisting>
|
|
|
|
static void
|
2005-06-29 09:25:51 +00:00
|
|
|
seek_to_time (GstElement *pipeline,
|
|
|
|
gint64 time_nanoseconds)
|
2004-12-15 17:32:49 +00:00
|
|
|
{
|
2005-06-29 09:25:51 +00:00
|
|
|
gst_element_seek (pipeline,
|
2004-12-15 17:32:49 +00:00
|
|
|
GST_SEEK_METHOD_SET | GST_FORMAT_TIME |
|
|
|
|
GST_SEEK_FLAG_FLUSH, time_nanoseconds);
|
|
|
|
}
|
|
|
|
</programlisting>
|
2005-06-29 09:25:51 +00:00
|
|
|
<para>
|
|
|
|
It is possible to do multiple seeks in short time-intervals, such as
|
|
|
|
a direct response to slider movement. After a seek, internally, the
|
|
|
|
pipeline will be paused (if it was playing), the position will be
|
|
|
|
re-set internally, the demuxers and decoders will decode from the new
|
|
|
|
position onwards and this will continue until all sinks have data
|
|
|
|
again. If it was playing originally, it will be set to playing again,
|
|
|
|
too. Since the new position is immediately available in a video output,
|
|
|
|
you will see the new frame, even if your pipeline is not in the playing
|
|
|
|
state.
|
|
|
|
</para>
|
2004-12-15 17:32:49 +00:00
|
|
|
</sect1>
|
|
|
|
</chapter>
|
|
|
|
|