forked from mirrors/gstreamer-rs
Add verbose documentation to the examples
Added verbose documentation to all of the repository's examples.
This commit is contained in:
parent
db6a6543b4
commit
466e02df3a
22 changed files with 879 additions and 43 deletions
|
@ -1,3 +1,15 @@
|
||||||
|
// This example demonstrates the use of the appsink element.
|
||||||
|
// It operates the following pipeline:
|
||||||
|
|
||||||
|
// {audiotestsrc} - {appsink}
|
||||||
|
|
||||||
|
// The application specifies what format it wants to handle. This format
|
||||||
|
// is applied by calling set_caps on the appsink. Now it's the audiotestsrc's
|
||||||
|
// task to provide this data format. If the element connected to the appsink's
|
||||||
|
// sink-pad were not able to provide what we ask them to, this would fail.
|
||||||
|
// This is the format we request:
|
||||||
|
// Audio / Signed 16bit / 1 channel / arbitrary sample rate
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
@ -54,6 +66,10 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
.dynamic_cast::<gst_app::AppSink>()
|
.dynamic_cast::<gst_app::AppSink>()
|
||||||
.expect("Sink element is expected to be an appsink!");
|
.expect("Sink element is expected to be an appsink!");
|
||||||
|
|
||||||
|
// Tell the appsink what format we want. It will then be the audiotestsrc's job to
|
||||||
|
// provide the format we request.
|
||||||
|
// This can be set after linking the two objects, because format negotiation between
|
||||||
|
// both elements will happen during pre-rolling of the pipeline.
|
||||||
appsink.set_caps(&gst::Caps::new_simple(
|
appsink.set_caps(&gst::Caps::new_simple(
|
||||||
"audio/x-raw",
|
"audio/x-raw",
|
||||||
&[
|
&[
|
||||||
|
@ -64,9 +80,13 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
],
|
],
|
||||||
));
|
));
|
||||||
|
|
||||||
|
// Getting data out of the appsink is done by setting callbacks on it.
|
||||||
|
// The appsink will then call those handlers, as soon as data is available.
|
||||||
appsink.set_callbacks(
|
appsink.set_callbacks(
|
||||||
gst_app::AppSinkCallbacks::new()
|
gst_app::AppSinkCallbacks::new()
|
||||||
|
// Add a handler to the "new-sample" signal.
|
||||||
.new_sample(|appsink| {
|
.new_sample(|appsink| {
|
||||||
|
// Pull the sample in question out of the appsink's buffer.
|
||||||
let sample = match appsink.pull_sample() {
|
let sample = match appsink.pull_sample() {
|
||||||
None => return gst::FlowReturn::Eos,
|
None => return gst::FlowReturn::Eos,
|
||||||
Some(sample) => sample,
|
Some(sample) => sample,
|
||||||
|
@ -84,6 +104,13 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
return gst::FlowReturn::Error;
|
return gst::FlowReturn::Error;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// At this point, buffer is only a reference to an existing memory region somewhere.
|
||||||
|
// When we want to access its content, we have to map it while requesting the required
|
||||||
|
// mode of access (read, read/write).
|
||||||
|
// This type of abstraction is necessary, because the buffer in question might not be
|
||||||
|
// on the machine's main memory itself, but rather in the GPU's memory.
|
||||||
|
// So mapping the buffer makes the underlying memory region accessible to us.
|
||||||
|
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
|
||||||
let map = if let Some(map) = buffer.map_readable() {
|
let map = if let Some(map) = buffer.map_readable() {
|
||||||
map
|
map
|
||||||
} else {
|
} else {
|
||||||
|
@ -96,6 +123,9 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
return gst::FlowReturn::Error;
|
return gst::FlowReturn::Error;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// We know what format the data in the memory region has, since we requested
|
||||||
|
// it by setting the appsink's caps. So what we do here is interpret the
|
||||||
|
// memory region we mapped as an array of signed 16 bit integers.
|
||||||
let samples = if let Ok(samples) = map.as_slice_of::<i16>() {
|
let samples = if let Ok(samples) = map.as_slice_of::<i16>() {
|
||||||
samples
|
samples
|
||||||
} else {
|
} else {
|
||||||
|
@ -108,6 +138,8 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
return gst::FlowReturn::Error;
|
return gst::FlowReturn::Error;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// For buffer (= chunk of samples), we calculate the root mean square:
|
||||||
|
// (https://en.wikipedia.org/wiki/Root_mean_square)
|
||||||
let sum: f64 = samples
|
let sum: f64 = samples
|
||||||
.iter()
|
.iter()
|
||||||
.map(|sample| {
|
.map(|sample| {
|
||||||
|
|
|
@ -1,3 +1,15 @@
|
||||||
|
// This example shows how to use the appsrc element.
|
||||||
|
// It operates the following pipeline:
|
||||||
|
|
||||||
|
// {appsrc} - {videoconvert} - {autovideosink}
|
||||||
|
|
||||||
|
// The application itself provides the video-data for the pipeline, by providing
|
||||||
|
// it in the callback of the appsrc element. Videoconvert makes sure that the
|
||||||
|
// format the application provides can be displayed by the autovideosink
|
||||||
|
// at the end of the pipeline.
|
||||||
|
// The application provides data of the following format:
|
||||||
|
// Video / BGRx (4 bytes) / 2 fps
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
extern crate gstreamer_app as gst_app;
|
extern crate gstreamer_app as gst_app;
|
||||||
|
@ -53,12 +65,15 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
.dynamic_cast::<gst_app::AppSrc>()
|
.dynamic_cast::<gst_app::AppSrc>()
|
||||||
.expect("Source element is expected to be an appsrc!");
|
.expect("Source element is expected to be an appsrc!");
|
||||||
|
|
||||||
let info = gst_video::VideoInfo::new(gst_video::VideoFormat::Bgrx, WIDTH as u32, HEIGHT as u32)
|
// Specify the format we want to provide as application into the pipeline
|
||||||
|
// by creating a video info with the given format and creating caps from it for the appsrc element.
|
||||||
|
let video_info =
|
||||||
|
gst_video::VideoInfo::new(gst_video::VideoFormat::Bgrx, WIDTH as u32, HEIGHT as u32)
|
||||||
.fps(gst::Fraction::new(2, 1))
|
.fps(gst::Fraction::new(2, 1))
|
||||||
.build()
|
.build()
|
||||||
.expect("Failed to create video info");
|
.expect("Failed to create video info");
|
||||||
|
|
||||||
appsrc.set_caps(&info.to_caps().unwrap());
|
appsrc.set_caps(&video_info.to_caps().unwrap());
|
||||||
appsrc.set_property_format(gst::Format::Time);
|
appsrc.set_property_format(gst::Format::Time);
|
||||||
|
|
||||||
// Our frame counter, that is stored in the mutable environment
|
// Our frame counter, that is stored in the mutable environment
|
||||||
|
@ -70,8 +85,15 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
// need-data callback.
|
// need-data callback.
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
appsrc.set_callbacks(
|
appsrc.set_callbacks(
|
||||||
|
// Since our appsrc element operates in pull mode (it asks us to provide data),
|
||||||
|
// we add a handler for the need-data callback and provide new data from there.
|
||||||
|
// In our case, we told gstreamer that we do 2 frames per second. While the
|
||||||
|
// buffers of all elements of the pipeline are still empty, this will be called
|
||||||
|
// a couple of times until all of them are filled. After this initial period,
|
||||||
|
// this handler will be called (on average) twice per second.
|
||||||
gst_app::AppSrcCallbacks::new()
|
gst_app::AppSrcCallbacks::new()
|
||||||
.need_data(move |appsrc, _| {
|
.need_data(move |appsrc, _| {
|
||||||
|
// We only produce 100 frames
|
||||||
if i == 100 {
|
if i == 100 {
|
||||||
let _ = appsrc.end_of_stream();
|
let _ = appsrc.end_of_stream();
|
||||||
return;
|
return;
|
||||||
|
@ -83,11 +105,19 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
let g = if i % 3 == 0 { 0 } else { 255 };
|
let g = if i % 3 == 0 { 0 } else { 255 };
|
||||||
let b = if i % 5 == 0 { 0 } else { 255 };
|
let b = if i % 5 == 0 { 0 } else { 255 };
|
||||||
|
|
||||||
let mut buffer = gst::Buffer::with_size(WIDTH * HEIGHT * 4).unwrap();
|
// Create the buffer that can hold exactly one BGRx frame.
|
||||||
|
let mut buffer = gst::Buffer::with_size(video_info.size()).unwrap();
|
||||||
{
|
{
|
||||||
let buffer = buffer.get_mut().unwrap();
|
let buffer = buffer.get_mut().unwrap();
|
||||||
|
// For each frame we produce, we set the timestamp when it should be displayed
|
||||||
|
// (pts = presentation time stamp)
|
||||||
|
// The autovideosink will use this information to display the frame at the right time.
|
||||||
buffer.set_pts(i * 500 * gst::MSECOND);
|
buffer.set_pts(i * 500 * gst::MSECOND);
|
||||||
|
|
||||||
|
// At this point, buffer is only a reference to an existing memory region somewhere.
|
||||||
|
// When we want to access its content, we have to map it while requesting the required
|
||||||
|
// mode of access (read, read/write).
|
||||||
|
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
|
||||||
let mut data = buffer.map_writable().unwrap();
|
let mut data = buffer.map_writable().unwrap();
|
||||||
|
|
||||||
for p in data.as_mut_slice().chunks_mut(4) {
|
for p in data.as_mut_slice().chunks_mut(4) {
|
||||||
|
|
|
@ -1,3 +1,34 @@
|
||||||
|
// This example demonstrates the use of the decodebin element
|
||||||
|
// The decodebin element tries to automatically detect the incoming
|
||||||
|
// format and to autoplug the appropriate demuxers / decoders to handle it.
|
||||||
|
// and decode it to raw audio, video or subtitles.
|
||||||
|
// Before the pipeline hasn't been prerolled, the decodebin can't possibly know what
|
||||||
|
// format it gets as its input. So at first, the pipeline looks like this:
|
||||||
|
|
||||||
|
// {filesrc} - {decodebin}
|
||||||
|
|
||||||
|
// As soon as the decodebin has detected the stream format, it will try to decode every
|
||||||
|
// contained stream to its raw format.
|
||||||
|
// The application connects a signal-handler to decodebin's pad-added signal, which tells us
|
||||||
|
// whenever the decodebin provided us with another contained (raw) stream from the input file.
|
||||||
|
|
||||||
|
// This application supports audio and video streams. Video streams are
|
||||||
|
// displayed using an autovideosink, and audiostreams are played back using autoaudiosink.
|
||||||
|
// So for a file that contains one audio and one video stream,
|
||||||
|
// the pipeline looks like the following:
|
||||||
|
|
||||||
|
// /-[audio]-{audioconvert}-{audioresample}-{autoaudiosink}
|
||||||
|
// {filesrc}-{decodebin}-|
|
||||||
|
// \-[video]-{viceoconvert}-{videoscale}-{autovideosink}
|
||||||
|
|
||||||
|
// Both auto-sinks at the end automatically select the best available (actual) sink. Since the
|
||||||
|
// selection of available actual sinks is platform specific
|
||||||
|
// (like using pulseaudio for audio output on linux, e.g.),
|
||||||
|
// we need to add the audioconvert and audioresample elements before handing the stream to the
|
||||||
|
// autoaudiosink, because we need to make sure, that the stream is always supported by the actual sink.
|
||||||
|
// Especially Windows APIs tend to be quite picky about samplerate and sample-format.
|
||||||
|
// The same applies to videostreams.
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
@ -66,18 +97,36 @@ fn example_main() -> Result<(), Error> {
|
||||||
let decodebin =
|
let decodebin =
|
||||||
gst::ElementFactory::make("decodebin", None).ok_or(MissingElement("decodebin"))?;
|
gst::ElementFactory::make("decodebin", None).ok_or(MissingElement("decodebin"))?;
|
||||||
|
|
||||||
|
// Tell the filesrc what file to load
|
||||||
src.set_property("location", &uri)?;
|
src.set_property("location", &uri)?;
|
||||||
|
|
||||||
pipeline.add_many(&[&src, &decodebin])?;
|
pipeline.add_many(&[&src, &decodebin])?;
|
||||||
gst::Element::link_many(&[&src, &decodebin])?;
|
gst::Element::link_many(&[&src, &decodebin])?;
|
||||||
|
|
||||||
|
// Need to move a new reference into the closure.
|
||||||
|
// !!ATTENTION!!:
|
||||||
|
// It might seem appealing to use pipeline.clone() here, because that greatly
|
||||||
|
// simplifies the code within the callback. What this actually does, however, is creating
|
||||||
|
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
|
||||||
|
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
|
||||||
|
// which is in turn stored in another strong reference on the pipeline is creating a
|
||||||
|
// reference cycle.
|
||||||
|
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
|
||||||
let pipeline_weak = pipeline.downgrade();
|
let pipeline_weak = pipeline.downgrade();
|
||||||
|
// Connect to decodebin's pad-added signal, that is emitted whenever
|
||||||
|
// it found another stream from the input file and found a way to decode it to its raw format.
|
||||||
|
// decodebin automatically adds a src-pad for this raw stream, which
|
||||||
|
// we can use to build the follow-up pipeline.
|
||||||
decodebin.connect_pad_added(move |dbin, src_pad| {
|
decodebin.connect_pad_added(move |dbin, src_pad| {
|
||||||
|
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||||
|
// we moved into this callback.
|
||||||
let pipeline = match pipeline_weak.upgrade() {
|
let pipeline = match pipeline_weak.upgrade() {
|
||||||
Some(pipeline) => pipeline,
|
Some(pipeline) => pipeline,
|
||||||
None => return,
|
None => return,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Try to detect whether the raw stream decodebin provided us with
|
||||||
|
// just now is either audio or video (or none of both, e.g. subtitles).
|
||||||
let (is_audio, is_video) = {
|
let (is_audio, is_video) = {
|
||||||
let media_type = src_pad.get_current_caps().and_then(|caps| {
|
let media_type = src_pad.get_current_caps().and_then(|caps| {
|
||||||
caps.get_structure(0).map(|s| {
|
caps.get_structure(0).map(|s| {
|
||||||
|
@ -100,8 +149,14 @@ fn example_main() -> Result<(), Error> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// We create a closure here, calling it directly below it, because this greatly
|
||||||
|
// improves readability for error-handling. Like this, we can simply use the
|
||||||
|
// ?-operator within the closure, and handle the actual error down below where
|
||||||
|
// we call the insert_sink(..) closure.
|
||||||
let insert_sink = |is_audio, is_video| -> Result<(), Error> {
|
let insert_sink = |is_audio, is_video| -> Result<(), Error> {
|
||||||
if is_audio {
|
if is_audio {
|
||||||
|
// decodebin found a raw audiostream, so we build the follow-up pipeline to
|
||||||
|
// play it on the default audio playback device (using autoaudiosink).
|
||||||
let queue =
|
let queue =
|
||||||
gst::ElementFactory::make("queue", None).ok_or(MissingElement("queue"))?;
|
gst::ElementFactory::make("queue", None).ok_or(MissingElement("queue"))?;
|
||||||
let convert = gst::ElementFactory::make("audioconvert", None)
|
let convert = gst::ElementFactory::make("audioconvert", None)
|
||||||
|
@ -115,13 +170,21 @@ fn example_main() -> Result<(), Error> {
|
||||||
pipeline.add_many(elements)?;
|
pipeline.add_many(elements)?;
|
||||||
gst::Element::link_many(elements)?;
|
gst::Element::link_many(elements)?;
|
||||||
|
|
||||||
|
// !!ATTENTION!!:
|
||||||
|
// This is quite important and people forget it often. Without making sure that
|
||||||
|
// the new elements have the same state as the pipeline, things will fail later.
|
||||||
|
// They would still be in Null state and can't process data.
|
||||||
for e in elements {
|
for e in elements {
|
||||||
e.sync_state_with_parent()?;
|
e.sync_state_with_parent()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get the queue element's sink pad and link the decodebin's newly created
|
||||||
|
// src pad for the audio stream to it.
|
||||||
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
||||||
src_pad.link(&sink_pad).into_result()?;
|
src_pad.link(&sink_pad).into_result()?;
|
||||||
} else if is_video {
|
} else if is_video {
|
||||||
|
// decodebin found a raw videostream, so we build the follow-up pipeline to
|
||||||
|
// display it using the autovideosink.
|
||||||
let queue =
|
let queue =
|
||||||
gst::ElementFactory::make("queue", None).ok_or(MissingElement("queue"))?;
|
gst::ElementFactory::make("queue", None).ok_or(MissingElement("queue"))?;
|
||||||
let convert = gst::ElementFactory::make("videoconvert", None)
|
let convert = gst::ElementFactory::make("videoconvert", None)
|
||||||
|
@ -139,6 +202,8 @@ fn example_main() -> Result<(), Error> {
|
||||||
e.sync_state_with_parent()?
|
e.sync_state_with_parent()?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get the queue element's sink pad and link the decodebin's newly created
|
||||||
|
// src pad for the video stream to it.
|
||||||
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
||||||
src_pad.link(&sink_pad).into_result()?;
|
src_pad.link(&sink_pad).into_result()?;
|
||||||
}
|
}
|
||||||
|
@ -146,7 +211,17 @@ fn example_main() -> Result<(), Error> {
|
||||||
Ok(())
|
Ok(())
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// When adding and linking new elements in a callback fails, error information is often sparse.
|
||||||
|
// GStreamer's built-in debugging can be hard to link back to the exact position within the code
|
||||||
|
// that failed. Since callbacks are called from random threads within the pipeline, it can get hard
|
||||||
|
// to get good error information. The macros used in the following can solve that. With the use
|
||||||
|
// of those, one can send arbitrary rust types (using the pipeline's bus) into the mainloop.
|
||||||
|
// What we send here is unpacked down below, in the iteration-code over sent bus-messages.
|
||||||
|
// Because we are using the failure crate for error details here, we even get a backtrace for
|
||||||
|
// where the error was constructed. (If RUST_BACKTRACE=1 is set)
|
||||||
if let Err(err) = insert_sink(is_audio, is_video) {
|
if let Err(err) = insert_sink(is_audio, is_video) {
|
||||||
|
// The following sends a message of type Error on the bus, containing our detailed
|
||||||
|
// error information.
|
||||||
#[cfg(feature = "v1_10")]
|
#[cfg(feature = "v1_10")]
|
||||||
gst_element_error!(
|
gst_element_error!(
|
||||||
dbin,
|
dbin,
|
||||||
|
@ -174,6 +249,10 @@ fn example_main() -> Result<(), Error> {
|
||||||
.get_bus()
|
.get_bus()
|
||||||
.expect("Pipeline without bus. Shouldn't happen!");
|
.expect("Pipeline without bus. Shouldn't happen!");
|
||||||
|
|
||||||
|
// This code iterates over all messages that are sent across our pipeline's bus.
|
||||||
|
// In the callback ("pad-added" on the decodebin), we sent better error information
|
||||||
|
// using a bus message. This is the position where we get those messages and log
|
||||||
|
// the contained information.
|
||||||
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
|
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
|
||||||
use gst::MessageView;
|
use gst::MessageView;
|
||||||
|
|
||||||
|
@ -185,6 +264,11 @@ fn example_main() -> Result<(), Error> {
|
||||||
#[cfg(feature = "v1_10")]
|
#[cfg(feature = "v1_10")]
|
||||||
{
|
{
|
||||||
match err.get_details() {
|
match err.get_details() {
|
||||||
|
// This bus-message of type error contained our custom error-details struct
|
||||||
|
// that we sent in the pad-added callback above. So we unpack it and log
|
||||||
|
// the detailed error information here. details contains a glib::SendValue.
|
||||||
|
// The unpacked error is the converted to a Result::Err, stopping the
|
||||||
|
// application's execution.
|
||||||
Some(details) if details.get_name() == "error-details" => details
|
Some(details) if details.get_name() == "error-details" => details
|
||||||
.get::<&ErrorValue>("error")
|
.get::<&ErrorValue>("error")
|
||||||
.and_then(|v| v.0.lock().unwrap().take())
|
.and_then(|v| v.0.lock().unwrap().take())
|
||||||
|
|
|
@ -1,3 +1,13 @@
|
||||||
|
// This example uses gstreamer's discoverer api
|
||||||
|
// https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/GstDiscoverer.html
|
||||||
|
// To detect as much information from a given URI.
|
||||||
|
// The amount of time that the discoverer is allowed to use is limited by a timeout.
|
||||||
|
// This allows to handle e.g. network problems gracefully. When the timeout hits before
|
||||||
|
// discoverer was able to detect anything, discoverer will report an error.
|
||||||
|
// In this example, we catch this error and stop the application.
|
||||||
|
// Discovered information could for example contain the stream's duration or whether it is
|
||||||
|
// seekable (filesystem) or not (some http servers).
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
|
|
||||||
extern crate gstreamer_pbutils as pbutils;
|
extern crate gstreamer_pbutils as pbutils;
|
||||||
|
|
|
@ -1,3 +1,17 @@
|
||||||
|
// This example demonstrates the use of the encodebin element.
|
||||||
|
// The example takes an arbitrary URI as input, which it will try to decode
|
||||||
|
// and finally reencode using the encodebin element.
|
||||||
|
// For more information about how the decodebin element works, have a look at
|
||||||
|
// the decodebin-example.
|
||||||
|
// Since we tell the encodebin what format we want to get out of it from the start,
|
||||||
|
// it provides the correct caps and we can link it before starting the pipeline.
|
||||||
|
// After the decodebin has found all streams and we piped them into the encodebin,
|
||||||
|
// the operated pipeline looks as follows:
|
||||||
|
|
||||||
|
// /-{queue}-{audioconvert}-{audioresample}-\
|
||||||
|
// {uridecodebin} -| {encodebin}-{filesink}
|
||||||
|
// \-{queue}-{videoconvert}-{videoscale}----/
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
@ -54,16 +68,24 @@ impl glib::subclass::boxed::BoxedType for ErrorValue {
|
||||||
glib_boxed_derive_traits!(ErrorValue);
|
glib_boxed_derive_traits!(ErrorValue);
|
||||||
|
|
||||||
fn configure_encodebin(encodebin: &gst::Element) -> Result<(), Error> {
|
fn configure_encodebin(encodebin: &gst::Element) -> Result<(), Error> {
|
||||||
|
// To tell the encodebin what we want it to produce, we create an EncodingProfile
|
||||||
|
// https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/GstEncodingProfile.html
|
||||||
|
// This profile consists of information about the contained audio and video formats
|
||||||
|
// as well as the container format we want everything to be combined into.
|
||||||
|
|
||||||
|
// Every audiostream piped into the encodebin should be encoded using vorbis.
|
||||||
let audio_profile = gst_pbutils::EncodingAudioProfileBuilder::new()
|
let audio_profile = gst_pbutils::EncodingAudioProfileBuilder::new()
|
||||||
.format(&gst::Caps::new_simple("audio/x-vorbis", &[]))
|
.format(&gst::Caps::new_simple("audio/x-vorbis", &[]))
|
||||||
.presence(0)
|
.presence(0)
|
||||||
.build()?;
|
.build()?;
|
||||||
|
|
||||||
|
// Every videostream piped into the encodebin should be encoded using theora.
|
||||||
let video_profile = gst_pbutils::EncodingVideoProfileBuilder::new()
|
let video_profile = gst_pbutils::EncodingVideoProfileBuilder::new()
|
||||||
.format(&gst::Caps::new_simple("video/x-theora", &[]))
|
.format(&gst::Caps::new_simple("video/x-theora", &[]))
|
||||||
.presence(0)
|
.presence(0)
|
||||||
.build()?;
|
.build()?;
|
||||||
|
|
||||||
|
// All streams are then finally combined into a matroska container.
|
||||||
let container_profile = gst_pbutils::EncodingContainerProfileBuilder::new()
|
let container_profile = gst_pbutils::EncodingContainerProfileBuilder::new()
|
||||||
.name("container")
|
.name("container")
|
||||||
.format(&gst::Caps::new_simple("video/x-matroska", &[]))
|
.format(&gst::Caps::new_simple("video/x-matroska", &[]))
|
||||||
|
@ -71,6 +93,7 @@ fn configure_encodebin(encodebin: &gst::Element) -> Result<(), Error> {
|
||||||
.add_profile(&(audio_profile))
|
.add_profile(&(audio_profile))
|
||||||
.build()?;
|
.build()?;
|
||||||
|
|
||||||
|
// Finally, apply the EncodingProfile onto our encodebin element.
|
||||||
encodebin
|
encodebin
|
||||||
.set_property("profile", &container_profile)
|
.set_property("profile", &container_profile)
|
||||||
.expect("set profile property failed");
|
.expect("set profile property failed");
|
||||||
|
@ -105,17 +128,38 @@ fn example_main() -> Result<(), Error> {
|
||||||
sink.set_property("location", &output_file)
|
sink.set_property("location", &output_file)
|
||||||
.expect("setting location property failed");
|
.expect("setting location property failed");
|
||||||
|
|
||||||
|
// Configure the encodebin.
|
||||||
|
// Here we tell the bin what format we expect it to create at its output.
|
||||||
configure_encodebin(&encodebin)?;
|
configure_encodebin(&encodebin)?;
|
||||||
|
|
||||||
pipeline
|
pipeline
|
||||||
.add_many(&[&src, &encodebin, &sink])
|
.add_many(&[&src, &encodebin, &sink])
|
||||||
.expect("failed to add elements to pipeline");
|
.expect("failed to add elements to pipeline");
|
||||||
|
// It is clear from the start, that encodebin has only one src pad, so we can
|
||||||
|
// directly link it to our filesink without problems.
|
||||||
|
// The caps of encodebin's src-pad are set after we configured the encoding-profile.
|
||||||
|
// (But filesink doesn't really care about the caps at its input anyway)
|
||||||
gst::Element::link_many(&[&encodebin, &sink])?;
|
gst::Element::link_many(&[&encodebin, &sink])?;
|
||||||
|
|
||||||
// Need to move a new reference into the closure
|
// Need to move a new reference into the closure.
|
||||||
let pipeline_clone = pipeline.clone();
|
// !!ATTENTION!!:
|
||||||
|
// It might seem appealing to use pipeline.clone() here, because that greatly
|
||||||
|
// simplifies the code within the callback. What this actually does, however, is creating
|
||||||
|
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
|
||||||
|
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
|
||||||
|
// which is in turn stored in another strong reference on the pipeline is creating a
|
||||||
|
// reference cycle.
|
||||||
|
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
|
||||||
|
let pipeline_weak = pipeline.downgrade();
|
||||||
|
// Much of the following is the same code as in the decodebin example
|
||||||
|
// so if you want more information on that front, have a look there.
|
||||||
src.connect_pad_added(move |dbin, dbin_src_pad| {
|
src.connect_pad_added(move |dbin, dbin_src_pad| {
|
||||||
let pipeline = &pipeline_clone;
|
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||||
|
// we moved into this callback.
|
||||||
|
let pipeline = match pipeline_weak.upgrade() {
|
||||||
|
Some(pipeline) => pipeline,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
|
||||||
let (is_audio, is_video) = {
|
let (is_audio, is_video) = {
|
||||||
let media_type = dbin_src_pad.get_current_caps().and_then(|caps| {
|
let media_type = dbin_src_pad.get_current_caps().and_then(|caps| {
|
||||||
|
@ -157,6 +201,9 @@ fn example_main() -> Result<(), Error> {
|
||||||
.expect("failed to add audio elements to pipeline");
|
.expect("failed to add audio elements to pipeline");
|
||||||
gst::Element::link_many(elements)?;
|
gst::Element::link_many(elements)?;
|
||||||
|
|
||||||
|
// Request a sink pad from our encodebin, that can handle a raw audiostream.
|
||||||
|
// The encodebin will then automatically create an internal pipeline, that encodes
|
||||||
|
// the audio stream in the format we specified in the EncodingProfile.
|
||||||
let enc_sink_pad = encodebin
|
let enc_sink_pad = encodebin
|
||||||
.get_request_pad("audio_%u")
|
.get_request_pad("audio_%u")
|
||||||
.expect("Could not get audio pad from encodebin");
|
.expect("Could not get audio pad from encodebin");
|
||||||
|
@ -169,6 +216,8 @@ fn example_main() -> Result<(), Error> {
|
||||||
e.sync_state_with_parent()?;
|
e.sync_state_with_parent()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get the queue element's sink pad and link the decodebin's newly created
|
||||||
|
// src pad for the audio stream to it.
|
||||||
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
||||||
dbin_src_pad.link(&sink_pad).into_result()?;
|
dbin_src_pad.link(&sink_pad).into_result()?;
|
||||||
} else if is_video {
|
} else if is_video {
|
||||||
|
@ -185,6 +234,9 @@ fn example_main() -> Result<(), Error> {
|
||||||
.expect("failed to add video elements to pipeline");
|
.expect("failed to add video elements to pipeline");
|
||||||
gst::Element::link_many(elements)?;
|
gst::Element::link_many(elements)?;
|
||||||
|
|
||||||
|
// Request a sink pad from our encodebin, that can handle a raw videostream.
|
||||||
|
// The encodebin will then automatically create an internal pipeline, that encodes
|
||||||
|
// the audio stream in the format we specified in the EncodingProfile.
|
||||||
let enc_sink_pad = encodebin
|
let enc_sink_pad = encodebin
|
||||||
.get_request_pad("video_%u")
|
.get_request_pad("video_%u")
|
||||||
.expect("Could not get video pad from encodebin");
|
.expect("Could not get video pad from encodebin");
|
||||||
|
@ -197,6 +249,8 @@ fn example_main() -> Result<(), Error> {
|
||||||
e.sync_state_with_parent()?
|
e.sync_state_with_parent()?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get the queue element's sink pad and link the decodebin's newly created
|
||||||
|
// src pad for the video stream to it.
|
||||||
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
||||||
dbin_src_pad.link(&sink_pad).into_result()?;
|
dbin_src_pad.link(&sink_pad).into_result()?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,24 @@
|
||||||
|
// This example demonstrates how events can be created and sent to the pipeline.
|
||||||
|
// What this example does is scheduling a timeout on the main loop, and
|
||||||
|
// sending an EOS message on the bus from there - telling the pipeline
|
||||||
|
// to shut down. Once that event is processed by everything, the EOS message
|
||||||
|
// is going to be sent and we catch that one to shut down everything.
|
||||||
|
|
||||||
|
// GStreamer's bus is an abstraction layer above an arbitrary main loop.
|
||||||
|
// This makes sure that GStreamer can be used in conjunction with any existing
|
||||||
|
// other framework (GUI frameworks, mostly) that operate their own main loops.
|
||||||
|
// Main idea behind the bus is the simplification between the application and
|
||||||
|
// GStreamer, because GStreamer is heavily threaded underneath.
|
||||||
|
|
||||||
|
// Any thread can post messages to the bus, which is essentially a thread-safe
|
||||||
|
// queue of messages to process. When a new message was sent to the bus, it
|
||||||
|
// will wake up the main loop implementation underneath it (which will then
|
||||||
|
// process the pending messages from the main loop thread).
|
||||||
|
|
||||||
|
// An application itself can post messages to the bus aswell.
|
||||||
|
// This makes it possible, e.g., to schedule an arbitrary piece of code
|
||||||
|
// to run in the main loop thread - avoiding potential threading issues.
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -11,14 +32,31 @@ fn example_main() {
|
||||||
|
|
||||||
let main_loop = glib::MainLoop::new(None, false);
|
let main_loop = glib::MainLoop::new(None, false);
|
||||||
|
|
||||||
|
// This creates a pipeline by parsing the gst-launch pipeline syntax.
|
||||||
let pipeline = gst::parse_launch("audiotestsrc ! fakesink").unwrap();
|
let pipeline = gst::parse_launch("audiotestsrc ! fakesink").unwrap();
|
||||||
let bus = pipeline.get_bus().unwrap();
|
let bus = pipeline.get_bus().unwrap();
|
||||||
|
|
||||||
let ret = pipeline.set_state(gst::State::Playing);
|
let ret = pipeline.set_state(gst::State::Playing);
|
||||||
assert_ne!(ret, gst::StateChangeReturn::Failure);
|
assert_ne!(ret, gst::StateChangeReturn::Failure);
|
||||||
|
|
||||||
|
// Need to move a new reference into the closure.
|
||||||
|
// !!ATTENTION!!:
|
||||||
|
// It might seem appealing to use pipeline.clone() here, because that greatly
|
||||||
|
// simplifies the code within the callback. What this actually does, however, is creating
|
||||||
|
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
|
||||||
|
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
|
||||||
|
// which is in turn stored in another strong reference on the pipeline is creating a
|
||||||
|
// reference cycle.
|
||||||
|
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
|
||||||
let pipeline_weak = pipeline.downgrade();
|
let pipeline_weak = pipeline.downgrade();
|
||||||
|
// Add a timeout to the main loop. This closure will be executed
|
||||||
|
// in an interval of 5 seconds. The return value of the handler function
|
||||||
|
// determines whether the handler still wants to be called:
|
||||||
|
// - glib::Continue(false) - stop calling this handler, remove timeout
|
||||||
|
// - glib::Continue(true) - continue calling this handler
|
||||||
glib::timeout_add_seconds(5, move || {
|
glib::timeout_add_seconds(5, move || {
|
||||||
|
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||||
|
// we moved into this callback.
|
||||||
let pipeline = match pipeline_weak.upgrade() {
|
let pipeline = match pipeline_weak.upgrade() {
|
||||||
Some(pipeline) => pipeline,
|
Some(pipeline) => pipeline,
|
||||||
None => return glib::Continue(false),
|
None => return glib::Continue(false),
|
||||||
|
@ -26,15 +64,32 @@ fn example_main() {
|
||||||
|
|
||||||
println!("sending eos");
|
println!("sending eos");
|
||||||
|
|
||||||
|
// We create an EndOfStream event here, that tells all elements to drain
|
||||||
|
// their internal buffers to their following elements, essentially draining the
|
||||||
|
// whole pipeline (front to back). It ensuring that no data is left unhandled and potentially
|
||||||
|
// headers were rewritten (e.g. when using something like an MP4 or Matroska muxer).
|
||||||
|
// The EOS event is handled directly from this very thread until the first
|
||||||
|
// queue element is reached during pipeline-traversal, where it is then queued
|
||||||
|
// up and later handled from the queue's streaming thread for the elements
|
||||||
|
// following that queue.
|
||||||
|
// Once all sinks are done handling the EOS event (and all buffers that were before the
|
||||||
|
// EOS event in the pipeline already), the pipeline would post an EOS message on the bus,
|
||||||
|
// essentially telling the application that the pipeline is completely drained.
|
||||||
let ev = gst::Event::new_eos().build();
|
let ev = gst::Event::new_eos().build();
|
||||||
pipeline.send_event(ev);
|
pipeline.send_event(ev);
|
||||||
|
|
||||||
|
// Remove this handler, the pipeline will shutdown anyway, now that we
|
||||||
|
// sent the EOS event.
|
||||||
glib::Continue(false)
|
glib::Continue(false)
|
||||||
});
|
});
|
||||||
|
|
||||||
//bus.add_signal_watch();
|
//bus.add_signal_watch();
|
||||||
//bus.connect_message(move |_, msg| {
|
//bus.connect_message(move |_, msg| {
|
||||||
let main_loop_clone = main_loop.clone();
|
let main_loop_clone = main_loop.clone();
|
||||||
|
// This sets the bus's signal handler (don't be mislead by the "add", there can only be one).
|
||||||
|
// Every message from the bus is passed through this function. Its returnvalue determines
|
||||||
|
// whether the handler wants to be called again. If glib::Continue(false) is returned, the
|
||||||
|
// handler is removed and will never be called again. The mainloop still runs though.
|
||||||
bus.add_watch(move |_, msg| {
|
bus.add_watch(move |_, msg| {
|
||||||
use gst::MessageView;
|
use gst::MessageView;
|
||||||
|
|
||||||
|
@ -42,6 +97,8 @@ fn example_main() {
|
||||||
match msg.view() {
|
match msg.view() {
|
||||||
MessageView::Eos(..) => {
|
MessageView::Eos(..) => {
|
||||||
println!("received eos");
|
println!("received eos");
|
||||||
|
// An EndOfStream event was sent to the pipeline, so we tell our main loop
|
||||||
|
// to stop execution here.
|
||||||
main_loop.quit()
|
main_loop.quit()
|
||||||
}
|
}
|
||||||
MessageView::Error(err) => {
|
MessageView::Error(err) => {
|
||||||
|
@ -56,14 +113,21 @@ fn example_main() {
|
||||||
_ => (),
|
_ => (),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Tell the mainloop to continue executing this callback.
|
||||||
glib::Continue(true)
|
glib::Continue(true)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Operate GStreamer's bus, facilliating GLib's mainloop here.
|
||||||
|
// This function call will block until you tell the mainloop to quit
|
||||||
|
// (see above for how to do this).
|
||||||
main_loop.run();
|
main_loop.run();
|
||||||
|
|
||||||
let ret = pipeline.set_state(gst::State::Null);
|
let ret = pipeline.set_state(gst::State::Null);
|
||||||
assert_ne!(ret, gst::StateChangeReturn::Failure);
|
assert_ne!(ret, gst::StateChangeReturn::Failure);
|
||||||
|
|
||||||
|
// Remove the watch function from the bus.
|
||||||
|
// Again: There can always only be one watch function.
|
||||||
|
// Thus we don't have to tell him which function to remove.
|
||||||
bus.remove_watch();
|
bus.remove_watch();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,8 @@
|
||||||
|
// This example demonstrates how to use the gstreamer crate in conjunction
|
||||||
|
// with the future trait. The example waits for either an error to occur,
|
||||||
|
// or for an EOS message. When a message notifying about either of both
|
||||||
|
// is received, the future is resolved.
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -11,20 +16,28 @@ use std::env;
|
||||||
mod examples_common;
|
mod examples_common;
|
||||||
|
|
||||||
fn example_main() {
|
fn example_main() {
|
||||||
|
// Read the pipeline to launch from the commandline, using the launch syntax.
|
||||||
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
|
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
|
||||||
|
|
||||||
gst::init().unwrap();
|
gst::init().unwrap();
|
||||||
|
|
||||||
|
// Create a pipeline from the launch-syntax given on the cli.
|
||||||
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
|
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
|
||||||
let bus = pipeline.get_bus().unwrap();
|
let bus = pipeline.get_bus().unwrap();
|
||||||
|
|
||||||
let ret = pipeline.set_state(gst::State::Playing);
|
let ret = pipeline.set_state(gst::State::Playing);
|
||||||
assert_ne!(ret, gst::StateChangeReturn::Failure);
|
assert_ne!(ret, gst::StateChangeReturn::Failure);
|
||||||
|
|
||||||
|
// BusStream implements the Stream trait, but Stream::for_each is
|
||||||
|
// calling a closure for each item and returns a Future that resolves
|
||||||
|
// when the stream is done or an error has happened
|
||||||
let messages = gst::BusStream::new(&bus)
|
let messages = gst::BusStream::new(&bus)
|
||||||
.for_each(|msg| {
|
.for_each(|msg| {
|
||||||
use gst::MessageView;
|
use gst::MessageView;
|
||||||
|
|
||||||
|
// Determine whether we want to resolve the future, or we still have
|
||||||
|
// to wait. The future is resolved when either an error occurs, or the
|
||||||
|
// pipeline succeeded execution (got an EOS event).
|
||||||
let quit = match msg.view() {
|
let quit = match msg.view() {
|
||||||
MessageView::Eos(..) => true,
|
MessageView::Eos(..) => true,
|
||||||
MessageView::Error(err) => {
|
MessageView::Error(err) => {
|
||||||
|
@ -40,13 +53,16 @@ fn example_main() {
|
||||||
};
|
};
|
||||||
|
|
||||||
if quit {
|
if quit {
|
||||||
Err(())
|
Err(()) // This resolves the future that is returned by for_each
|
||||||
|
// FIXME: At the moment, EOS messages also result in the future to be resolved
|
||||||
|
// by an error. This should probably be changed in the future.
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(()) // Continue - do not resolve the future yet.
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.and_then(|_| Ok(()));
|
.and_then(|_| Ok(()));
|
||||||
|
|
||||||
|
// Synchronously wait on the future we created above.
|
||||||
let _ = block_on(messages);
|
let _ = block_on(messages);
|
||||||
|
|
||||||
let ret = pipeline.set_state(gst::State::Null);
|
let ret = pipeline.set_state(gst::State::Null);
|
||||||
|
|
|
@ -1,3 +1,42 @@
|
||||||
|
// HELP: New to GES. Is everything here correct?
|
||||||
|
|
||||||
|
// This example demonstrates how to use the gstreamer editing services.
|
||||||
|
// This is gstreamer's framework to implement non-linear editing.
|
||||||
|
// It provides a timeline API that internally manages a dynamically changing
|
||||||
|
// pipeline. (e.g.: alternating video streams in second 1, 2, and 3)
|
||||||
|
// Timeline:
|
||||||
|
// _________________________________________________
|
||||||
|
// | 00:01 | 00:02 | 00:03 |
|
||||||
|
// =================================================
|
||||||
|
// Layer0: || ###CLIP####|| || ###CLIP###||
|
||||||
|
// || ####00#####|| || ####01####||
|
||||||
|
// =================================================
|
||||||
|
// Layer1: || ###CLIP#### || ||
|
||||||
|
// || ####00##### || ||
|
||||||
|
// =================================================
|
||||||
|
|
||||||
|
// - Assets are the base of most components in GES. One asset essentially represents
|
||||||
|
// one resource (e.g. a file). Different files and filetypes can contain different
|
||||||
|
// types of things. Thus - you can extract different high-level types from an
|
||||||
|
// asset. If you created an asset from a video file, you could for example "extract"
|
||||||
|
// a GESClip from it. Same goes for audio files.
|
||||||
|
// - There even is the GESProject subclass of GESAsset, which can be used to load a whole
|
||||||
|
// previously saved project. And since GESProject essentially is a GESAsset itself, you
|
||||||
|
// can then extract the stored components (like the timeline e.g.) from it.
|
||||||
|
// - Clips are the high-level types (above assets), managing multimedia elements (such as
|
||||||
|
// videos or audio clips). Within the timeline, they are arranged in layers.
|
||||||
|
// Those layers essentially behave like in common photo editing software: They specify
|
||||||
|
// the order in which they are composited, and can therefore overlay each other.
|
||||||
|
// Clips are essentially wrappers around the underlying GStreamer elements needed
|
||||||
|
// to work with them. They also provide high-level APIs to add effects into the
|
||||||
|
// clip's internal pipeline.
|
||||||
|
// Multiple clips can also be grouped together (even across layers!) to one, making it
|
||||||
|
// possible to work with all of them as if they were one.
|
||||||
|
// - Like noted above, Layers specify the order in which the different layers are composited.
|
||||||
|
// This is specified by their priority. Layers with higher priority (lower number) trump
|
||||||
|
// those with lowers (higher number). Thus, Layers with higher priority are "in the front".
|
||||||
|
// - The timeline is the enclosing element, grouping all layers and providing a timeframe.
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -19,14 +58,18 @@ extern crate glib;
|
||||||
fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
|
fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
|
||||||
ges::init()?;
|
ges::init()?;
|
||||||
|
|
||||||
|
// Begin by creating a timeline with audio and video tracks
|
||||||
let timeline = ges::Timeline::new_audio_video();
|
let timeline = ges::Timeline::new_audio_video();
|
||||||
|
// Create a new layer that will contain our timed clips.
|
||||||
let layer = timeline.append_layer();
|
let layer = timeline.append_layer();
|
||||||
let pipeline = ges::Pipeline::new();
|
let pipeline = ges::Pipeline::new();
|
||||||
pipeline.set_timeline(&timeline);
|
pipeline.set_timeline(&timeline);
|
||||||
|
|
||||||
|
// Load a clip from the given uri and add it to the layer.
|
||||||
let clip = ges::UriClip::new(uri);
|
let clip = ges::UriClip::new(uri);
|
||||||
layer.add_clip(&clip);
|
layer.add_clip(&clip);
|
||||||
|
|
||||||
|
// Add an effect to the clip's video stream.
|
||||||
let effect = ges::Effect::new("agingtv");
|
let effect = ges::Effect::new("agingtv");
|
||||||
clip.add(&effect).unwrap();
|
clip.add(&effect).unwrap();
|
||||||
|
|
||||||
|
@ -38,6 +81,8 @@ fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Retrieve the asset that was automatically used behind the scenes, to
|
||||||
|
// extract the clip from.
|
||||||
let asset = clip.get_asset().unwrap();
|
let asset = clip.get_asset().unwrap();
|
||||||
let duration = asset
|
let duration = asset
|
||||||
.downcast::<ges::UriClipAsset>()
|
.downcast::<ges::UriClipAsset>()
|
||||||
|
@ -50,6 +95,10 @@ fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
|
||||||
duration / 4
|
duration / 4
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// The inpoint specifies where in the clip we start, the duration specifies
|
||||||
|
// how much we play from that point onwards. Setting the inpoint to something else
|
||||||
|
// than 0, or the duration something smaller than the clip's actual duration will
|
||||||
|
// cut the clip.
|
||||||
clip.set_inpoint(duration / 2);
|
clip.set_inpoint(duration / 2);
|
||||||
clip.set_duration(duration / 4);
|
clip.set_duration(duration / 4);
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,15 @@
|
||||||
|
// This example demonstrates how to use gstreamer in conjunction with the gtk widget toolkit.
|
||||||
|
// This example shows the video produced by a videotestsrc within a small gtk gui.
|
||||||
|
// For this, the gtkglsink is used, which creates a gtk widget one can embed the gtk gui.
|
||||||
|
// For this, there multiple types of widgets. gtkglsink uses OpenGL to render frames, and
|
||||||
|
// gtksink uses the CPU to render the frames (which is way slower).
|
||||||
|
// So the example application first tries to use OpenGL, and when that fails, fall back.
|
||||||
|
// The pipeline looks like the following:
|
||||||
|
|
||||||
|
// gtk-gui: {gtkglsink}-widget
|
||||||
|
// (|)
|
||||||
|
// {videotestsrc} - {glsinkbin}
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -15,16 +27,32 @@ use std::env;
|
||||||
fn create_ui(app: >k::Application) {
|
fn create_ui(app: >k::Application) {
|
||||||
let pipeline = gst::Pipeline::new(None);
|
let pipeline = gst::Pipeline::new(None);
|
||||||
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
|
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
|
||||||
|
// Create the gtk sink and retrieve the widget from it. The sink element will be used
|
||||||
|
// in the pipeline, and the widget will be embedded in our gui.
|
||||||
|
// Gstreamer then displays frames in the gtk widget.
|
||||||
|
// First, we try to use the OpenGL version - and if that fails, we fall back to non-OpenGL.
|
||||||
let (sink, widget) = if let Some(gtkglsink) = gst::ElementFactory::make("gtkglsink", None) {
|
let (sink, widget) = if let Some(gtkglsink) = gst::ElementFactory::make("gtkglsink", None) {
|
||||||
|
// Using the OpenGL widget succeeded, so we are in for a nice playback experience with
|
||||||
|
// low cpu usage. :)
|
||||||
|
// The gtkglsink essentially allocates an OpenGL texture on the GPU, that it will display.
|
||||||
|
// Now we create the glsinkbin element, which is responsible for conversions and for uploading
|
||||||
|
// video frames to our texture (if they are not already in the GPU). Now we tell the OpenGL-sink
|
||||||
|
// about our gtkglsink element, form where it will retrieve the OpenGL texture to fill.
|
||||||
let glsinkbin = gst::ElementFactory::make("glsinkbin", None).unwrap();
|
let glsinkbin = gst::ElementFactory::make("glsinkbin", None).unwrap();
|
||||||
glsinkbin
|
glsinkbin
|
||||||
.set_property("sink", >kglsink.to_value())
|
.set_property("sink", >kglsink.to_value())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
// The gtkglsink creates the gtk widget for us. This is accessible through a property.
|
||||||
|
// So we get it and use it later to add it to our gui.
|
||||||
let widget = gtkglsink.get_property("widget").unwrap();
|
let widget = gtkglsink.get_property("widget").unwrap();
|
||||||
(glsinkbin, widget.get::<gtk::Widget>().unwrap())
|
(glsinkbin, widget.get::<gtk::Widget>().unwrap())
|
||||||
} else {
|
} else {
|
||||||
|
// Unfortunately, using the OpenGL widget didn't work out, so we will have to render
|
||||||
|
// our frames manually, using the CPU. An example why this may fail is, when
|
||||||
|
// the PC doesn't have proper graphics drivers installed.
|
||||||
let sink = gst::ElementFactory::make("gtksink", None).unwrap();
|
let sink = gst::ElementFactory::make("gtksink", None).unwrap();
|
||||||
|
// The gtksink creates the gtk widget for us. This is accessible through a property.
|
||||||
|
// So we get it and use it later to add it to our gui.
|
||||||
let widget = sink.get_property("widget").unwrap();
|
let widget = sink.get_property("widget").unwrap();
|
||||||
(sink, widget.get::<gtk::Widget>().unwrap())
|
(sink, widget.get::<gtk::Widget>().unwrap())
|
||||||
};
|
};
|
||||||
|
@ -32,9 +60,11 @@ fn create_ui(app: >k::Application) {
|
||||||
pipeline.add_many(&[&src, &sink]).unwrap();
|
pipeline.add_many(&[&src, &sink]).unwrap();
|
||||||
src.link(&sink).unwrap();
|
src.link(&sink).unwrap();
|
||||||
|
|
||||||
|
// Create a simple gtk gui window to place our widget into.
|
||||||
let window = gtk::Window::new(gtk::WindowType::Toplevel);
|
let window = gtk::Window::new(gtk::WindowType::Toplevel);
|
||||||
window.set_default_size(320, 240);
|
window.set_default_size(320, 240);
|
||||||
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
|
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
|
||||||
|
// Add our widget to the gui
|
||||||
vbox.pack_start(&widget, true, true, 0);
|
vbox.pack_start(&widget, true, true, 0);
|
||||||
let label = gtk::Label::new("Position: 00:00:00");
|
let label = gtk::Label::new("Position: 00:00:00");
|
||||||
vbox.pack_start(&label, true, true, 5);
|
vbox.pack_start(&label, true, true, 5);
|
||||||
|
@ -43,32 +73,39 @@ fn create_ui(app: >k::Application) {
|
||||||
|
|
||||||
app.add_window(&window);
|
app.add_window(&window);
|
||||||
|
|
||||||
|
// Need to move a new reference into the closure.
|
||||||
|
// !!ATTENTION!!:
|
||||||
|
// It might seem appealing to use pipeline.clone() here, because that greatly
|
||||||
|
// simplifies the code within the callback. What this actually does, however, is creating
|
||||||
|
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
|
||||||
|
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
|
||||||
|
// which is in turn stored in another strong reference on the pipeline is creating a
|
||||||
|
// reference cycle.
|
||||||
|
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
|
||||||
let pipeline_weak = pipeline.downgrade();
|
let pipeline_weak = pipeline.downgrade();
|
||||||
|
// Add a timeout to the main loop that will periodically (every 500ms) be
|
||||||
|
// executed. This will query the current position within the stream from
|
||||||
|
// the underlying pipeline, and display it in our gui.
|
||||||
|
// Since this closure is called by the mainloop thread, we are allowed
|
||||||
|
// to modify the gui widgets here.
|
||||||
let timeout_id = gtk::timeout_add(500, move || {
|
let timeout_id = gtk::timeout_add(500, move || {
|
||||||
|
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||||
|
// we moved into this callback.
|
||||||
let pipeline = match pipeline_weak.upgrade() {
|
let pipeline = match pipeline_weak.upgrade() {
|
||||||
Some(pipeline) => pipeline,
|
Some(pipeline) => pipeline,
|
||||||
None => return glib::Continue(true),
|
None => return glib::Continue(true),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Query the current playing position from the underlying pipeline.
|
||||||
let position = pipeline
|
let position = pipeline
|
||||||
.query_position::<gst::ClockTime>()
|
.query_position::<gst::ClockTime>()
|
||||||
.unwrap_or_else(|| 0.into());
|
.unwrap_or_else(|| 0.into());
|
||||||
|
// Display the playing position in the gui.
|
||||||
label.set_text(&format!("Position: {:.0}", position));
|
label.set_text(&format!("Position: {:.0}", position));
|
||||||
|
// Tell the callback to continue calling this closure.
|
||||||
glib::Continue(true)
|
glib::Continue(true)
|
||||||
});
|
});
|
||||||
|
|
||||||
let app_weak = app.downgrade();
|
|
||||||
window.connect_delete_event(move |_, _| {
|
|
||||||
let app = match app_weak.upgrade() {
|
|
||||||
Some(app) => app,
|
|
||||||
None => return Inhibit(false),
|
|
||||||
};
|
|
||||||
|
|
||||||
app.quit();
|
|
||||||
Inhibit(false)
|
|
||||||
});
|
|
||||||
|
|
||||||
let bus = pipeline.get_bus().unwrap();
|
let bus = pipeline.get_bus().unwrap();
|
||||||
|
|
||||||
let ret = pipeline.set_state(gst::State::Playing);
|
let ret = pipeline.set_state(gst::State::Playing);
|
||||||
|
@ -115,6 +152,7 @@ fn create_ui(app: >k::Application) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
// Initialize gstreamer and the gtk widget toolkit libraries.
|
||||||
gst::init().unwrap();
|
gst::init().unwrap();
|
||||||
gtk::init().unwrap();
|
gtk::init().unwrap();
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,22 @@
|
||||||
|
// This example demonstrates another type of combination of gtk and gstreamer,
|
||||||
|
// in comparision to the gtksink example.
|
||||||
|
// This example uses regions that are managed by the window system, and uses
|
||||||
|
// the window system's api to insert a videostream into these regions.
|
||||||
|
// So essentially, the window system of the system overlays our gui with
|
||||||
|
// the video frames - within the region that we tell it to use.
|
||||||
|
// Disadvantage of this method is, that it's highly platform specific, since
|
||||||
|
// the big platforms all have their own window system. Thus, this example
|
||||||
|
// has special code to handle differences between platforms.
|
||||||
|
// Windows could theoretically be supported by this example, but is not yet implemented.
|
||||||
|
// One of the very few (if not the single one) platform, that can not provide the API
|
||||||
|
// needed for this are Linux desktops using Wayland.
|
||||||
|
// TODO: Add Windows support
|
||||||
|
// In this case, a testvideo is displayed within our gui, using the
|
||||||
|
// following pipeline:
|
||||||
|
|
||||||
|
// {videotestsrc} - {xvimagesink(on linux)}
|
||||||
|
// {videotestsrc} - {glimagesink(on mac)}
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -28,9 +47,17 @@ fn create_ui(app: >k::Application) {
|
||||||
let pipeline = gst::Pipeline::new(None);
|
let pipeline = gst::Pipeline::new(None);
|
||||||
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
|
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
|
||||||
|
|
||||||
|
// Since using the window system to overlay our gui window is making
|
||||||
|
// direct contact with the windowing system, this is highly platform-
|
||||||
|
// specific. This example supports Linux and Mac (using X11 and Quartz).
|
||||||
let sink = if cfg!(feature = "gtkvideooverlay-x11") {
|
let sink = if cfg!(feature = "gtkvideooverlay-x11") {
|
||||||
|
// When we are on linux with the Xorg display server, we use the
|
||||||
|
// X11 protocol's XV extension, which allows to overlay regions
|
||||||
|
// with video streams. For this, we use the xvimagesink element.
|
||||||
gst::ElementFactory::make("xvimagesink", None).unwrap()
|
gst::ElementFactory::make("xvimagesink", None).unwrap()
|
||||||
} else if cfg!(feature = "gtkvideooverlay-quartz") {
|
} else if cfg!(feature = "gtkvideooverlay-quartz") {
|
||||||
|
// On Mac, this is done by overlaying a window region with an
|
||||||
|
// OpenGL-texture, using the glimagesink element.
|
||||||
gst::ElementFactory::make("glimagesink", None).unwrap()
|
gst::ElementFactory::make("glimagesink", None).unwrap()
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
|
@ -39,26 +66,49 @@ fn create_ui(app: >k::Application) {
|
||||||
pipeline.add_many(&[&src, &sink]).unwrap();
|
pipeline.add_many(&[&src, &sink]).unwrap();
|
||||||
src.link(&sink).unwrap();
|
src.link(&sink).unwrap();
|
||||||
|
|
||||||
|
// First, we create our gtk window - which will contain a region where
|
||||||
|
// our overlayed video will be displayed in.
|
||||||
let window = gtk::Window::new(gtk::WindowType::Toplevel);
|
let window = gtk::Window::new(gtk::WindowType::Toplevel);
|
||||||
window.set_default_size(320, 240);
|
window.set_default_size(320, 240);
|
||||||
|
|
||||||
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
|
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
|
||||||
|
|
||||||
|
// This creates the widget we will display our overlay in.
|
||||||
|
// Later, we will try to tell our window system about this region, so
|
||||||
|
// it can overlay it with our video stream.
|
||||||
let video_window = gtk::DrawingArea::new();
|
let video_window = gtk::DrawingArea::new();
|
||||||
video_window.set_size_request(320, 240);
|
video_window.set_size_request(320, 240);
|
||||||
|
|
||||||
|
// Use the platform-specific sink to create our overlay.
|
||||||
|
// Since we only use the video_overlay in the closure below, we need a weak reference.
|
||||||
|
// !!ATTENTION!!:
|
||||||
|
// It might seem appealing to use .clone() here, because that greatly
|
||||||
|
// simplifies the code within the callback. What this actually does, however, is creating
|
||||||
|
// a memory leak.
|
||||||
let video_overlay = sink
|
let video_overlay = sink
|
||||||
.clone()
|
.clone()
|
||||||
.dynamic_cast::<gst_video::VideoOverlay>()
|
.dynamic_cast::<gst_video::VideoOverlay>()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.downgrade();
|
.downgrade();
|
||||||
|
// Connect to this widget's realize signal, which will be emitted
|
||||||
|
// after its display has been initialized. This is neccessary, because
|
||||||
|
// the window system doesn't know about our region until it was initialized.
|
||||||
video_window.connect_realize(move |video_window| {
|
video_window.connect_realize(move |video_window| {
|
||||||
|
// Here we temporarily retrieve a strong reference on the video-overlay from the
|
||||||
|
// weak reference that we moved into the closure.
|
||||||
let video_overlay = match video_overlay.upgrade() {
|
let video_overlay = match video_overlay.upgrade() {
|
||||||
Some(video_overlay) => video_overlay,
|
Some(video_overlay) => video_overlay,
|
||||||
None => return,
|
None => return,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Gtk uses gdk under the hood, to handle its drawing. Drawing regions are
|
||||||
|
// called gdk windows. We request this underlying drawing region from the
|
||||||
|
// widget we will overlay with our video.
|
||||||
let gdk_window = video_window.get_window().unwrap();
|
let gdk_window = video_window.get_window().unwrap();
|
||||||
|
|
||||||
|
// This is where we tell our window system about the drawing-region we
|
||||||
|
// want it to overlay. Most often, the window system would only know
|
||||||
|
// about our most outer region (or: our window).
|
||||||
if !gdk_window.ensure_native() {
|
if !gdk_window.ensure_native() {
|
||||||
println!("Can't create native window for widget");
|
println!("Can't create native window for widget");
|
||||||
process::exit(-1);
|
process::exit(-1);
|
||||||
|
@ -75,7 +125,14 @@ fn create_ui(app: >k::Application) {
|
||||||
) -> *mut c_void;
|
) -> *mut c_void;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is unsafe because the "window handle" we pass here is basically like a raw pointer.
|
||||||
|
// If a wrong value were to be passed here (and you can pass any integer), then the window
|
||||||
|
// system will most likely cause the application to crash.
|
||||||
unsafe {
|
unsafe {
|
||||||
|
// Here we ask gdk what native window handle we got assigned for
|
||||||
|
// our video region from the window system, and then we will
|
||||||
|
// pass this unique identifier to the overlay provided by our
|
||||||
|
// sink - so the sink can then arrange the overlay.
|
||||||
let xid = gdk_x11_window_get_xid(gdk_window.to_glib_none().0);
|
let xid = gdk_x11_window_get_xid(gdk_window.to_glib_none().0);
|
||||||
video_overlay.set_window_handle(xid as usize);
|
video_overlay.set_window_handle(xid as usize);
|
||||||
}
|
}
|
||||||
|
@ -91,7 +148,14 @@ fn create_ui(app: >k::Application) {
|
||||||
) -> *mut c_void;
|
) -> *mut c_void;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This is unsafe because the "window handle" we pass here is basically like a raw pointer.
|
||||||
|
// If a wrong value were to be passed here (and you can pass any integer), then the window
|
||||||
|
// system will most likely cause the application to crash.
|
||||||
unsafe {
|
unsafe {
|
||||||
|
// Here we ask gdk what native window handle we got assigned for
|
||||||
|
// our video region from the windowing system, and then we will
|
||||||
|
// pass this unique identifier to the overlay provided by our
|
||||||
|
// sink - so the sink can then arrange the overlay.
|
||||||
let window = gdk_quartz_window_get_nsview(gdk_window.to_glib_none().0);
|
let window = gdk_quartz_window_get_nsview(gdk_window.to_glib_none().0);
|
||||||
video_overlay.set_window_handle(window as usize);
|
video_overlay.set_window_handle(window as usize);
|
||||||
}
|
}
|
||||||
|
@ -112,32 +176,39 @@ fn create_ui(app: >k::Application) {
|
||||||
|
|
||||||
app.add_window(&window);
|
app.add_window(&window);
|
||||||
|
|
||||||
|
// Need to move a new reference into the closure.
|
||||||
|
// !!ATTENTION!!:
|
||||||
|
// It might seem appealing to use pipeline.clone() here, because that greatly
|
||||||
|
// simplifies the code within the callback. What this actually does, however, is creating
|
||||||
|
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
|
||||||
|
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
|
||||||
|
// which is in turn stored in another strong reference on the pipeline is creating a
|
||||||
|
// reference cycle.
|
||||||
|
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
|
||||||
let pipeline_weak = pipeline.downgrade();
|
let pipeline_weak = pipeline.downgrade();
|
||||||
|
// Add a timeout to the main loop that will periodically (every 500ms) be
|
||||||
|
// executed. This will query the current position within the stream from
|
||||||
|
// the underlying pipeline, and display it in our gui.
|
||||||
|
// Since this closure is called by the mainloop thread, we are allowed
|
||||||
|
// to modify the gui widgets here.
|
||||||
let timeout_id = gtk::timeout_add(500, move || {
|
let timeout_id = gtk::timeout_add(500, move || {
|
||||||
|
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||||
|
// we moved into this callback.
|
||||||
let pipeline = match pipeline_weak.upgrade() {
|
let pipeline = match pipeline_weak.upgrade() {
|
||||||
Some(pipeline) => pipeline,
|
Some(pipeline) => pipeline,
|
||||||
None => return glib::Continue(true),
|
None => return glib::Continue(false),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Query the current playing position from the underlying pipeline.
|
||||||
let position = pipeline
|
let position = pipeline
|
||||||
.query_position::<gst::ClockTime>()
|
.query_position::<gst::ClockTime>()
|
||||||
.unwrap_or_else(|| 0.into());
|
.unwrap_or_else(|| 0.into());
|
||||||
|
// Display the playing position in the gui.
|
||||||
label.set_text(&format!("Position: {:.0}", position));
|
label.set_text(&format!("Position: {:.0}", position));
|
||||||
|
// Tell the timeout to continue calling this callback.
|
||||||
glib::Continue(true)
|
glib::Continue(true)
|
||||||
});
|
});
|
||||||
|
|
||||||
let app_weak = app.downgrade();
|
|
||||||
window.connect_delete_event(move |_, _| {
|
|
||||||
let app = match app_weak.upgrade() {
|
|
||||||
Some(app) => app,
|
|
||||||
None => return Inhibit(false),
|
|
||||||
};
|
|
||||||
|
|
||||||
app.quit();
|
|
||||||
Inhibit(false)
|
|
||||||
});
|
|
||||||
|
|
||||||
let bus = pipeline.get_bus().unwrap();
|
let bus = pipeline.get_bus().unwrap();
|
||||||
|
|
||||||
let ret = pipeline.set_state(gst::State::Playing);
|
let ret = pipeline.set_state(gst::State::Playing);
|
||||||
|
@ -190,6 +261,7 @@ fn main() {
|
||||||
process::exit(-1);
|
process::exit(-1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Initialize gstreamer and the gtk widget toolkit libraries.
|
||||||
gst::init().unwrap();
|
gst::init().unwrap();
|
||||||
gtk::init().unwrap();
|
gtk::init().unwrap();
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
// This example demonstrates how to use GStreamer's iteration APIs.
|
||||||
|
// This is used at multiple occassions - for example to iterate an
|
||||||
|
// element's pads.
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -7,15 +11,28 @@ mod examples_common;
|
||||||
fn example_main() {
|
fn example_main() {
|
||||||
gst::init().unwrap();
|
gst::init().unwrap();
|
||||||
|
|
||||||
|
// Create and use an identity element here.
|
||||||
|
// This element does nothing, really. We also never add it to a pipeline.
|
||||||
|
// We just want to iterate the identity element's pads.
|
||||||
let identity = gst::ElementFactory::make("identity", None).unwrap();
|
let identity = gst::ElementFactory::make("identity", None).unwrap();
|
||||||
|
// Get an iterator over all pads of the identity-element.
|
||||||
let mut iter = identity.iterate_pads();
|
let mut iter = identity.iterate_pads();
|
||||||
loop {
|
loop {
|
||||||
|
// In an endless-loop, we use the iterator until we either reach the end
|
||||||
|
// or we hit an error.
|
||||||
match iter.next() {
|
match iter.next() {
|
||||||
Ok(Some(pad)) => println!("Pad: {}", pad.get_name()),
|
Ok(Some(pad)) => println!("Pad: {}", pad.get_name()),
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
|
// We reached the end of the iterator, there are no more pads
|
||||||
println!("Done");
|
println!("Done");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
// It is very important to handle this resync error by calling resync
|
||||||
|
// on the iterator. This error happens, when the container that is iterated
|
||||||
|
// changed during iteration. (e.g. a pad was added while we used the
|
||||||
|
// iterator to iterate over all of an element's pads).
|
||||||
|
// After calling resync on the iterator, iteration will start from the beginning
|
||||||
|
// again. So the application should be able to handle that.
|
||||||
Err(gst::IteratorError::Resync) => {
|
Err(gst::IteratorError::Resync) => {
|
||||||
println!("Iterator resync");
|
println!("Iterator resync");
|
||||||
iter.resync();
|
iter.resync();
|
||||||
|
|
|
@ -1,3 +1,8 @@
|
||||||
|
// This is a simplified rust-reimplementation of the gst-launch-<version>
|
||||||
|
// cli tool. It has no own parameters and simply parses the cli arguments
|
||||||
|
// as launch syntax.
|
||||||
|
// When the parsing succeeded, the pipeline is run until the stream ends or an error happens.
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -8,10 +13,20 @@ use std::process;
|
||||||
mod examples_common;
|
mod examples_common;
|
||||||
|
|
||||||
fn example_main() {
|
fn example_main() {
|
||||||
|
// Get a string containing the passed pipeline launch syntax
|
||||||
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
|
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
|
||||||
|
|
||||||
gst::init().unwrap();
|
gst::init().unwrap();
|
||||||
|
|
||||||
|
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
|
||||||
|
// In comparision to the launch_glib_main example, this is using the advanced launch syntax
|
||||||
|
// parsing API of GStreamer. The function returns a Result, handing us the pipeline if
|
||||||
|
// parsing and creating succeeded, and hands us detailed error information if something
|
||||||
|
// went wrong. The error is passed as gst::ParseError. In this example, we separately
|
||||||
|
// handle the NoSuchElement error, that GStreamer uses to notify us about elements
|
||||||
|
// used within the launch syntax, that are not available (not installed).
|
||||||
|
// Especially GUIs should probably handle this case, to tell users that they need to
|
||||||
|
// install the corresponding gstreamer plugins.
|
||||||
let mut context = gst::ParseContext::new();
|
let mut context = gst::ParseContext::new();
|
||||||
let pipeline =
|
let pipeline =
|
||||||
match gst::parse_launch_full(&pipeline_str, Some(&mut context), gst::ParseFlags::NONE) {
|
match gst::parse_launch_full(&pipeline_str, Some(&mut context), gst::ParseFlags::NONE) {
|
||||||
|
|
|
@ -1,3 +1,12 @@
|
||||||
|
// This is a simplified rust-reimplementation of the gst-launch-<version>
|
||||||
|
// cli tool. It has no own parameters and simply parses the cli arguments
|
||||||
|
// as launch syntax.
|
||||||
|
// When the parsing succeeded, the pipeline is run until it exits.
|
||||||
|
// Main difference between this example and the launch example is the use of
|
||||||
|
// GLib's main loop to operate GStreamer's bus. This allows to also do other
|
||||||
|
// things from the main loop (timeouts, UI events, socket events, ...) instead
|
||||||
|
// of just handling messages from GStreamer's bus.
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -9,12 +18,15 @@ use std::env;
|
||||||
mod examples_common;
|
mod examples_common;
|
||||||
|
|
||||||
fn example_main() {
|
fn example_main() {
|
||||||
|
// Get a string containing the passed pipeline launch syntax
|
||||||
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
|
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
|
||||||
|
|
||||||
gst::init().unwrap();
|
gst::init().unwrap();
|
||||||
|
|
||||||
|
// Like teasered above, we use GLib's main loop to operate GStreamer's bus.
|
||||||
let main_loop = glib::MainLoop::new(None, false);
|
let main_loop = glib::MainLoop::new(None, false);
|
||||||
|
|
||||||
|
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
|
||||||
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
|
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
|
||||||
let bus = pipeline.get_bus().unwrap();
|
let bus = pipeline.get_bus().unwrap();
|
||||||
|
|
||||||
|
@ -51,6 +63,9 @@ fn example_main() {
|
||||||
let ret = pipeline.set_state(gst::State::Null);
|
let ret = pipeline.set_state(gst::State::Null);
|
||||||
assert_ne!(ret, gst::StateChangeReturn::Failure);
|
assert_ne!(ret, gst::StateChangeReturn::Failure);
|
||||||
|
|
||||||
|
// Here we remove the bus watch we added above. This avoids a memory leak, that might
|
||||||
|
// otherwise happen because we moved a strong reference (clone of main_loop) into the
|
||||||
|
// callback closure above.
|
||||||
bus.remove_watch();
|
bus.remove_watch();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,12 @@
|
||||||
|
// This example demonstrates the use of GStreamer's pad probe APIs.
|
||||||
|
// Probes are callbacks that can be installed by the application and will notify
|
||||||
|
// the application about the states of the dataflow. Those are mostly used for
|
||||||
|
// changing pipelines dynamically at runtime or for inspecting/modifying buffers or events
|
||||||
|
|
||||||
|
// |-[probe]
|
||||||
|
// /
|
||||||
|
// {audiotestsrc} - {fakesink}
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
extern crate gstreamer_audio as gst_audio;
|
extern crate gstreamer_audio as gst_audio;
|
||||||
|
@ -13,6 +22,9 @@ mod examples_common;
|
||||||
fn example_main() {
|
fn example_main() {
|
||||||
gst::init().unwrap();
|
gst::init().unwrap();
|
||||||
|
|
||||||
|
// Parse the pipeline we want to probe from a static in-line string.
|
||||||
|
// Here we give our audiotestsrc a name, so we can retrieve that element
|
||||||
|
// from the resulting pipeline.
|
||||||
let pipeline = gst::parse_launch(&format!(
|
let pipeline = gst::parse_launch(&format!(
|
||||||
"audiotestsrc name=src ! audio/x-raw,format={},channels=1 ! fakesink",
|
"audiotestsrc name=src ! audio/x-raw,format={},channels=1 ! fakesink",
|
||||||
gst_audio::AUDIO_FORMAT_S16.to_string()
|
gst_audio::AUDIO_FORMAT_S16.to_string()
|
||||||
|
@ -20,18 +32,35 @@ fn example_main() {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
|
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
|
||||||
|
|
||||||
|
// Get the audiotestsrc element from the pipeline that GStreamer
|
||||||
|
// created for us while parsing the launch syntax above.
|
||||||
let src = pipeline.get_by_name("src").unwrap();
|
let src = pipeline.get_by_name("src").unwrap();
|
||||||
|
// Get the audiotestsrc's src-pad.
|
||||||
let src_pad = src.get_static_pad("src").unwrap();
|
let src_pad = src.get_static_pad("src").unwrap();
|
||||||
|
// Add a probe handler on the audiotestsrc's src-pad.
|
||||||
|
// This handler gets called for every buffer that passes the pad we probe.
|
||||||
src_pad.add_probe(gst::PadProbeType::BUFFER, |_, probe_info| {
|
src_pad.add_probe(gst::PadProbeType::BUFFER, |_, probe_info| {
|
||||||
|
// Interpret the data sent over the pad as one buffer
|
||||||
if let Some(gst::PadProbeData::Buffer(ref buffer)) = probe_info.data {
|
if let Some(gst::PadProbeData::Buffer(ref buffer)) = probe_info.data {
|
||||||
|
// At this point, buffer is only a reference to an existing memory region somewhere.
|
||||||
|
// When we want to access its content, we have to map it while requesting the required
|
||||||
|
// mode of access (read, read/write).
|
||||||
|
// This type of abstraction is necessary, because the buffer in question might not be
|
||||||
|
// on the machine's main memory itself, but rather in the GPU's memory.
|
||||||
|
// So mapping the buffer makes the underlying memory region accessible to us.
|
||||||
|
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
|
||||||
let map = buffer.map_readable().unwrap();
|
let map = buffer.map_readable().unwrap();
|
||||||
|
|
||||||
|
// We know what format the data in the memory region has, since we requested
|
||||||
|
// it by setting the appsink's caps. So what we do here is interpret the
|
||||||
|
// memory region we mapped as an array of signed 16 bit integers.
|
||||||
let samples = if let Ok(samples) = map.as_slice_of::<i16>() {
|
let samples = if let Ok(samples) = map.as_slice_of::<i16>() {
|
||||||
samples
|
samples
|
||||||
} else {
|
} else {
|
||||||
return gst::PadProbeReturn::Ok;
|
return gst::PadProbeReturn::Ok;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// For buffer (= chunk of samples), we calculate the root mean square:
|
||||||
let sum: f64 = samples
|
let sum: f64 = samples
|
||||||
.iter()
|
.iter()
|
||||||
.map(|sample| {
|
.map(|sample| {
|
||||||
|
|
|
@ -1,3 +1,16 @@
|
||||||
|
// This example demonstrates how to overlay a video using the cairo
|
||||||
|
// library. For this, the cairooverlay element is used on a video stream.
|
||||||
|
// Additionally, this example uses functionality of the pango library, which handles
|
||||||
|
// text layouting. The pangocairo crate is a nice wrapper combining both libraries
|
||||||
|
// into a nice interface.
|
||||||
|
// The drawing surface which the cairooverlay element creates internally can then
|
||||||
|
// normally be drawn on using the cairo library.
|
||||||
|
// The operated pipeline looks like this:
|
||||||
|
|
||||||
|
// {videotestsrc} - {cairooverlay} - {capsfilter} - {videoconvert} - {autovideosink}
|
||||||
|
// The capsfilter element allows us to dictate the video resolution we want for the
|
||||||
|
// videotestsrc and the cairooverlay element.
|
||||||
|
|
||||||
extern crate glib;
|
extern crate glib;
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
|
@ -79,28 +92,54 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
pipeline.add_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
pipeline.add_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||||
gst::Element::link_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
gst::Element::link_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||||
|
|
||||||
|
// Plug in a capsfilter element that will force the videotestsrc and the cairooverlay to work
|
||||||
|
// with images of the size 800x800.
|
||||||
let caps = gst::Caps::builder("video/x-raw")
|
let caps = gst::Caps::builder("video/x-raw")
|
||||||
.field("width", &800i32)
|
.field("width", &800i32)
|
||||||
.field("height", &800i32)
|
.field("height", &800i32)
|
||||||
.build();
|
.build();
|
||||||
capsfilter.set_property("caps", &caps).unwrap();
|
capsfilter.set_property("caps", &caps).unwrap();
|
||||||
|
|
||||||
|
// The videotestsrc supports multiple test patterns. In this example, we will use the
|
||||||
|
// pattern with a white ball moving around the video's center point.
|
||||||
src.set_property_from_str("pattern", "ball");
|
src.set_property_from_str("pattern", "ball");
|
||||||
|
|
||||||
|
// The PangoFontMap represents the set of fonts available for a particular rendering system.
|
||||||
let fontmap = pangocairo::FontMap::new().unwrap();
|
let fontmap = pangocairo::FontMap::new().unwrap();
|
||||||
|
// Create a new pango layouting context for the fontmap.
|
||||||
let context = fontmap.create_context().unwrap();
|
let context = fontmap.create_context().unwrap();
|
||||||
|
// Create a pango layout object. This object is a string of text we want to layout.
|
||||||
|
// It is wrapped in a LayoutWrapper (defined above) to be able to send it across threads.
|
||||||
let layout = LayoutWrapper(pango::Layout::new(&context));
|
let layout = LayoutWrapper(pango::Layout::new(&context));
|
||||||
|
|
||||||
|
// Select the text content and the font we want to use for the piece of text.
|
||||||
let font_desc = pango::FontDescription::from_string("Sans Bold 26");
|
let font_desc = pango::FontDescription::from_string("Sans Bold 26");
|
||||||
layout.set_font_description(&font_desc);
|
layout.set_font_description(&font_desc);
|
||||||
layout.set_text("GStreamer");
|
layout.set_text("GStreamer");
|
||||||
|
|
||||||
|
// The following is a context struct (containing the pango layout and the configured video info).
|
||||||
|
// We have to wrap it in an Arc (or Rc) to get reference counting, that is: to be able to have
|
||||||
|
// shared ownership of it in multiple different places (the two signal handlers here).
|
||||||
|
// We have to wrap it in a Mutex because Rust's type-system can't know that both signals are
|
||||||
|
// only ever called from a single thread (the streaming thread). It would be enough to have
|
||||||
|
// something that is Send in theory but that's not how signal handlers are generated unfortunately.
|
||||||
|
// The Mutex (or otherwise if we didn't need the Sync bound we could use a RefCell) is to implement
|
||||||
|
// interior mutability (see Rust docs). Via this we can get a mutable reference to the contained
|
||||||
|
// data which is checked at runtime for uniqueness (blocking in case of mutex, panic in case
|
||||||
|
// of refcell) instead of compile-time (like with normal references).
|
||||||
let drawer = Arc::new(Mutex::new(DrawingContext {
|
let drawer = Arc::new(Mutex::new(DrawingContext {
|
||||||
layout: glib::SendUniqueCell::new(layout).unwrap(),
|
layout: glib::SendUniqueCell::new(layout).unwrap(),
|
||||||
info: None,
|
info: None,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let drawer_clone = drawer.clone();
|
let drawer_clone = drawer.clone();
|
||||||
|
// Connect to the cairooverlay element's "draw" signal, which is emitted for
|
||||||
|
// each videoframe piped through the element. Here we have the possibility to
|
||||||
|
// draw on top of the frame (overlay it), using the cairo render api.
|
||||||
|
// Signals connected with the connect(<name>, ...) API get their arguments
|
||||||
|
// passed as array of glib::Value. For a documentation about the actual arguments
|
||||||
|
// it is always a good idea to either check the element's signals using either
|
||||||
|
// gst-inspect, or the online documentation.
|
||||||
overlay
|
overlay
|
||||||
.connect("draw", false, move |args| {
|
.connect("draw", false, move |args| {
|
||||||
use std::f64::consts::PI;
|
use std::f64::consts::PI;
|
||||||
|
@ -108,7 +147,10 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
let drawer = &drawer_clone;
|
let drawer = &drawer_clone;
|
||||||
let drawer = drawer.lock().unwrap();
|
let drawer = drawer.lock().unwrap();
|
||||||
|
|
||||||
|
// Get the signal's arguments
|
||||||
let _overlay = args[0].get::<gst::Element>().unwrap();
|
let _overlay = args[0].get::<gst::Element>().unwrap();
|
||||||
|
// This is the cairo context. This is the root of all of cairo's
|
||||||
|
// drawing functionality.
|
||||||
let cr = args[1].get::<cairo::Context>().unwrap();
|
let cr = args[1].get::<cairo::Context>().unwrap();
|
||||||
let timestamp = args[2].get::<gst::ClockTime>().unwrap();
|
let timestamp = args[2].get::<gst::ClockTime>().unwrap();
|
||||||
let _duration = args[3].get::<gst::ClockTime>().unwrap();
|
let _duration = args[3].get::<gst::ClockTime>().unwrap();
|
||||||
|
@ -121,10 +163,22 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
* ((timestamp % (10 * gst::SECOND)).unwrap() as f64
|
* ((timestamp % (10 * gst::SECOND)).unwrap() as f64
|
||||||
/ (10.0 * gst::SECOND_VAL as f64));
|
/ (10.0 * gst::SECOND_VAL as f64));
|
||||||
|
|
||||||
|
// The image we draw (the text) will be static, but we will change the
|
||||||
|
// transformation on the drawing context, which rotates and shifts everything
|
||||||
|
// that we draw afterwards. Like this, we have no complicated calulations
|
||||||
|
// in the actual drawing below.
|
||||||
|
// Calling multiple transformation methods after each other will apply the
|
||||||
|
// new transformation on top. If you repeat the cr.rotate(angle) line below
|
||||||
|
// this a second time, everything in the canvas will rotate twice as fast.
|
||||||
cr.translate(info.width() as f64 / 2.0, info.height() as f64 / 2.0);
|
cr.translate(info.width() as f64 / 2.0, info.height() as f64 / 2.0);
|
||||||
cr.rotate(angle);
|
cr.rotate(angle);
|
||||||
|
|
||||||
|
// This loop will render 10 times the string "GStreamer" in a circle
|
||||||
for i in 0..10 {
|
for i in 0..10 {
|
||||||
|
// Cairo, like most rendering frameworks, is using a stack for transformations
|
||||||
|
// with this, we push our current transformation onto this stack - allowing us
|
||||||
|
// to make temporary changes / render something / and then returning to the
|
||||||
|
// previous transformations.
|
||||||
cr.save();
|
cr.save();
|
||||||
|
|
||||||
let angle = (360. * i as f64) / 10.0;
|
let angle = (360. * i as f64) / 10.0;
|
||||||
|
@ -132,14 +186,23 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
cr.set_source_rgb(red, 0.0, 1.0 - red);
|
cr.set_source_rgb(red, 0.0, 1.0 - red);
|
||||||
cr.rotate(angle * PI / 180.0);
|
cr.rotate(angle * PI / 180.0);
|
||||||
|
|
||||||
|
// Update the text layout. This function is only updating pango's internal state.
|
||||||
|
// So e.g. that after a 90 degree rotation it knows that what was previously going
|
||||||
|
// to end up as a 200x100 rectangle would now be 100x200.
|
||||||
pangocairo::functions::update_layout(&cr, &layout);
|
pangocairo::functions::update_layout(&cr, &layout);
|
||||||
let (width, _height) = layout.get_size();
|
let (width, _height) = layout.get_size();
|
||||||
|
// Using width and height of the text, we can properly possition it within
|
||||||
|
// our canvas.
|
||||||
cr.move_to(
|
cr.move_to(
|
||||||
-(width as f64 / pango::SCALE as f64) / 2.0,
|
-(width as f64 / pango::SCALE as f64) / 2.0,
|
||||||
-(info.height() as f64) / 2.0,
|
-(info.height() as f64) / 2.0,
|
||||||
);
|
);
|
||||||
|
// After telling the layout object where to draw itself, we actually tell
|
||||||
|
// it to draw itself into our cairo context.
|
||||||
pangocairo::functions::show_layout(&cr, &layout);
|
pangocairo::functions::show_layout(&cr, &layout);
|
||||||
|
|
||||||
|
// Here we go one step up in our stack of transformations, removing any
|
||||||
|
// changes we did to them since the last call to cr.save();
|
||||||
cr.restore();
|
cr.restore();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,6 +211,13 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let drawer_clone = drawer.clone();
|
let drawer_clone = drawer.clone();
|
||||||
|
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
|
||||||
|
// be called when the sink that we render to does not support resizing the image
|
||||||
|
// itself - but the user just changed the window-size. The element after the overlay
|
||||||
|
// will then change its caps and we use the notification about this change to
|
||||||
|
// resize our canvas's size.
|
||||||
|
// Another possibility for when this might happen is, when our video is a network
|
||||||
|
// stream that dynamically changes resolution when enough bandwith is available.
|
||||||
overlay
|
overlay
|
||||||
.connect("caps-changed", false, move |args| {
|
.connect("caps-changed", false, move |args| {
|
||||||
let _overlay = args[0].get::<gst::Element>().unwrap();
|
let _overlay = args[0].get::<gst::Element>().unwrap();
|
||||||
|
|
|
@ -1,3 +1,14 @@
|
||||||
|
// This example demonstrates GStreamer's playbin element.
|
||||||
|
// This element takes an arbitrary URI as parameter, and if there is a source
|
||||||
|
// element within gstreamer, that supports this uri, the playbin will try
|
||||||
|
// to automatically create a pipeline that properly plays this media source.
|
||||||
|
// For this, the playbin internally relies on more bin elements, like the
|
||||||
|
// autovideosink and the decodebin.
|
||||||
|
// Essentially, this element is a single-element pipeline able to play
|
||||||
|
// any format from any uri-addressable source that gstreamer supports.
|
||||||
|
// Much of the playbin's behavior can be controlled by so-called flags, as well
|
||||||
|
// as the playbin's properties and signals.
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -19,12 +30,15 @@ fn example_main() {
|
||||||
std::process::exit(-1)
|
std::process::exit(-1)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Create a new playbin element, and tell it what uri to play back.
|
||||||
let playbin = gst::ElementFactory::make("playbin", None).unwrap();
|
let playbin = gst::ElementFactory::make("playbin", None).unwrap();
|
||||||
playbin
|
playbin
|
||||||
.set_property("uri", &glib::Value::from(uri))
|
.set_property("uri", &glib::Value::from(uri))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// For flags handling
|
// For flags handling
|
||||||
|
// With flags, one can configure playbin's behavior such as whether it
|
||||||
|
// should play back contained video streams, or if it should render subtitles.
|
||||||
// let flags = playbin.get_property("flags").unwrap();
|
// let flags = playbin.get_property("flags").unwrap();
|
||||||
// let flags_class = FlagsClass::new(flags.type_()).unwrap();
|
// let flags_class = FlagsClass::new(flags.type_()).unwrap();
|
||||||
// let flags = flags_class.builder_with_value(flags).unwrap()
|
// let flags = flags_class.builder_with_value(flags).unwrap()
|
||||||
|
@ -34,13 +48,35 @@ fn example_main() {
|
||||||
// .unwrap();
|
// .unwrap();
|
||||||
// playbin.set_property("flags", &flags).unwrap();
|
// playbin.set_property("flags", &flags).unwrap();
|
||||||
|
|
||||||
|
// The playbin also provides any kind of metadata that it found in the played stream.
|
||||||
|
// For this, the playbin provides signals notifying about changes in the metadata.
|
||||||
|
// Doing this with a signal makes sense for multiple reasons.
|
||||||
|
// - The metadata is only found after the pipeline has been started
|
||||||
|
// - Live streams (such as internet radios) update this metadata during the stream
|
||||||
|
// Note that this signal will be emitted from the streaming threads usually,
|
||||||
|
// not the application's threads!
|
||||||
playbin
|
playbin
|
||||||
.connect("audio-tags-changed", false, |values| {
|
.connect("audio-tags-changed", false, |values| {
|
||||||
|
// The metadata of any of the contained audio streams changed
|
||||||
|
// In the case of a live-stream from an internet radio, this could for example
|
||||||
|
// mark the beginning of a new track, or a new DJ.
|
||||||
let playbin = values[0].get::<glib::Object>().unwrap();
|
let playbin = values[0].get::<glib::Object>().unwrap();
|
||||||
|
// This gets the index of the stream that changed. This is neccessary, since
|
||||||
|
// there could e.g. be multiple audio streams (english, spanish, ...).
|
||||||
let idx = values[1].get::<i32>().unwrap();
|
let idx = values[1].get::<i32>().unwrap();
|
||||||
|
|
||||||
println!("audio tags of audio stream {} changed:", idx);
|
println!("audio tags of audio stream {} changed:", idx);
|
||||||
|
|
||||||
|
// HELP: is this correct?
|
||||||
|
// We were only notified about the change of metadata. If we want to do
|
||||||
|
// something with it, we first need to actually query the metadata from the playbin.
|
||||||
|
// We do this by facilliating the get-audio-tags action-signal on playbin.
|
||||||
|
// Sending an action-signal to an element essentially is a function call on the element.
|
||||||
|
// It is done that way, because elements do not have their own function API, they are
|
||||||
|
// relying on GStreamer and GLib's API. The only way an element can communicate with an
|
||||||
|
// application is via properties, signals or action signals (or custom messages, events, queries).
|
||||||
|
// So what the following code does, is essentially asking playbin to tell us its already
|
||||||
|
// internally stored tag list for this stream index.
|
||||||
let tags = playbin
|
let tags = playbin
|
||||||
.emit("get-audio-tags", &[&idx.to_value()])
|
.emit("get-audio-tags", &[&idx.to_value()])
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -63,6 +99,8 @@ fn example_main() {
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
// The playbin element itself is a playbin, so it can be used as one, despite being
|
||||||
|
// created from an element factory.
|
||||||
let bus = playbin.get_bus().unwrap();
|
let bus = playbin.get_bus().unwrap();
|
||||||
|
|
||||||
let ret = playbin.set_state(gst::State::Playing);
|
let ret = playbin.set_state(gst::State::Playing);
|
||||||
|
|
|
@ -1,3 +1,10 @@
|
||||||
|
// This example shows how to use the GstPlayer API.
|
||||||
|
// The GstPlayer API is a convenience API to allow implement playback applications
|
||||||
|
// without having to write too much code.
|
||||||
|
// Most of the tasks a player needs to support (such as seeking and switching
|
||||||
|
// audio / subtitle streams or changing the volume) are all supported by simple
|
||||||
|
// one-line function calls on the GstPlayer.
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -28,11 +35,14 @@ fn main_loop(uri: &str) -> Result<(), Error> {
|
||||||
Some(&dispatcher.upcast::<gst_player::PlayerSignalDispatcher>()),
|
Some(&dispatcher.upcast::<gst_player::PlayerSignalDispatcher>()),
|
||||||
);
|
);
|
||||||
|
|
||||||
player.set_property("uri", &glib::Value::from(uri))?;
|
// Tell the player what uri to play.
|
||||||
|
player.set_uri(uri);
|
||||||
|
|
||||||
let error = Arc::new(Mutex::new(Ok(())));
|
let error = Arc::new(Mutex::new(Ok(())));
|
||||||
|
|
||||||
let main_loop_clone = main_loop.clone();
|
let main_loop_clone = main_loop.clone();
|
||||||
|
// Connect to the player's "end-of-stream" signal, which will tell us when the
|
||||||
|
// currently played media stream reached its end.
|
||||||
player.connect_end_of_stream(move |player| {
|
player.connect_end_of_stream(move |player| {
|
||||||
let main_loop = &main_loop_clone;
|
let main_loop = &main_loop_clone;
|
||||||
player.stop();
|
player.stop();
|
||||||
|
@ -41,6 +51,8 @@ fn main_loop(uri: &str) -> Result<(), Error> {
|
||||||
|
|
||||||
let main_loop_clone = main_loop.clone();
|
let main_loop_clone = main_loop.clone();
|
||||||
let error_clone = Arc::clone(&error);
|
let error_clone = Arc::clone(&error);
|
||||||
|
// Connect to the player's "error" signal, which will inform us about eventual
|
||||||
|
// errors (such as failing to retrieve a http stream).
|
||||||
player.connect_error(move |player, err| {
|
player.connect_error(move |player, err| {
|
||||||
let main_loop = &main_loop_clone;
|
let main_loop = &main_loop_clone;
|
||||||
let error = &error_clone;
|
let error = &error_clone;
|
||||||
|
|
|
@ -1,3 +1,17 @@
|
||||||
|
// This example demonstrates how to use GStreamer's query functionality.
|
||||||
|
// These are a way to query information from either elements or pads.
|
||||||
|
// Such information could for example be the current position within
|
||||||
|
// the stream (i.e. the playing time). Queries can traverse the pipeline
|
||||||
|
// (both up and downstream). This functionality is essential, since most
|
||||||
|
// queries can only answered by specific elements in a pipeline (such as the
|
||||||
|
// stream's duration, which often can only be answered by the demuxer).
|
||||||
|
// Since gstreamer has many elements that itself contain other elements that
|
||||||
|
// we don't know of, we can simply send a query for the duration into the
|
||||||
|
// pipeline and the query is passed along until an element feels capable
|
||||||
|
// of answering.
|
||||||
|
// For convenience, the API has a set of pre-defined queries, but also
|
||||||
|
// allows custom queries (which can be defined and used by your own elements).
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -9,22 +23,35 @@ use std::env;
|
||||||
mod examples_common;
|
mod examples_common;
|
||||||
|
|
||||||
fn example_main() {
|
fn example_main() {
|
||||||
|
// Get a string containing the passed pipeline launch syntax
|
||||||
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
|
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
|
||||||
|
|
||||||
gst::init().unwrap();
|
gst::init().unwrap();
|
||||||
|
|
||||||
let main_loop = glib::MainLoop::new(None, false);
|
let main_loop = glib::MainLoop::new(None, false);
|
||||||
|
|
||||||
|
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
|
||||||
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
|
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
|
||||||
let bus = pipeline.get_bus().unwrap();
|
let bus = pipeline.get_bus().unwrap();
|
||||||
|
|
||||||
let ret = pipeline.set_state(gst::State::Playing);
|
let ret = pipeline.set_state(gst::State::Playing);
|
||||||
assert_ne!(ret, gst::StateChangeReturn::Failure);
|
assert_ne!(ret, gst::StateChangeReturn::Failure);
|
||||||
|
|
||||||
let main_loop_clone = main_loop.clone();
|
// Need to move a new reference into the closure.
|
||||||
|
// !!ATTENTION!!:
|
||||||
|
// It might seem appealing to use pipeline.clone() here, because that greatly
|
||||||
|
// simplifies the code within the callback. What this actually dose, however, is creating
|
||||||
|
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
|
||||||
|
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
|
||||||
|
// which is in turn stored in another strong reference on the pipeline is creating a
|
||||||
|
// reference cycle.
|
||||||
|
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
|
||||||
let pipeline_weak = pipeline.downgrade();
|
let pipeline_weak = pipeline.downgrade();
|
||||||
|
// Add a timeout to the main loop. This closure will be executed
|
||||||
|
// in an interval of 1 second.
|
||||||
let timeout_id = glib::timeout_add_seconds(1, move || {
|
let timeout_id = glib::timeout_add_seconds(1, move || {
|
||||||
|
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||||
|
// we moved into this callback.
|
||||||
let pipeline = match pipeline_weak.upgrade() {
|
let pipeline = match pipeline_weak.upgrade() {
|
||||||
Some(pipeline) => pipeline,
|
Some(pipeline) => pipeline,
|
||||||
None => return glib::Continue(true),
|
None => return glib::Continue(true),
|
||||||
|
@ -33,6 +60,9 @@ fn example_main() {
|
||||||
//let pos = pipeline.query_position(gst::Format::Time).unwrap_or(-1);
|
//let pos = pipeline.query_position(gst::Format::Time).unwrap_or(-1);
|
||||||
//let dur = pipeline.query_duration(gst::Format::Time).unwrap_or(-1);
|
//let dur = pipeline.query_duration(gst::Format::Time).unwrap_or(-1);
|
||||||
let pos = {
|
let pos = {
|
||||||
|
// Create a new position query and send it to the pipeline.
|
||||||
|
// This will traverse all elements in the pipeline, until one feels
|
||||||
|
// capable of answering the query.
|
||||||
let mut q = gst::Query::new_position(gst::Format::Time);
|
let mut q = gst::Query::new_position(gst::Format::Time);
|
||||||
if pipeline.query(&mut q) {
|
if pipeline.query(&mut q) {
|
||||||
Some(q.get_result())
|
Some(q.get_result())
|
||||||
|
@ -44,6 +74,9 @@ fn example_main() {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let dur = {
|
let dur = {
|
||||||
|
// Create a new duration query and send it to the pipeline.
|
||||||
|
// This will traverse all elements in the pipeline, until one feels
|
||||||
|
// capable of answering the query.
|
||||||
let mut q = gst::Query::new_duration(gst::Format::Time);
|
let mut q = gst::Query::new_duration(gst::Format::Time);
|
||||||
if pipeline.query(&mut q) {
|
if pipeline.query(&mut q) {
|
||||||
Some(q.get_result())
|
Some(q.get_result())
|
||||||
|
@ -59,6 +92,8 @@ fn example_main() {
|
||||||
glib::Continue(true)
|
glib::Continue(true)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Need to move a new reference into the closure.
|
||||||
|
let main_loop_clone = main_loop.clone();
|
||||||
//bus.add_signal_watch();
|
//bus.add_signal_watch();
|
||||||
//bus.connect_message(move |_, msg| {
|
//bus.connect_message(move |_, msg| {
|
||||||
bus.add_watch(move |_, msg| {
|
bus.add_watch(move |_, msg| {
|
||||||
|
|
|
@ -1,3 +1,9 @@
|
||||||
|
// This example demonstrates how to set up a rtsp server using GStreamer.
|
||||||
|
// While the "rtsp-server" example is about streaming media to connecting
|
||||||
|
// clients, this example is mainly about recording media that clients
|
||||||
|
// send to the server. For this, the launch syntax pipeline, that is passed
|
||||||
|
// to this example's cli is spawned and the client's media is streamed into it.
|
||||||
|
|
||||||
extern crate failure;
|
extern crate failure;
|
||||||
extern crate gio;
|
extern crate gio;
|
||||||
extern crate glib;
|
extern crate glib;
|
||||||
|
@ -37,13 +43,26 @@ fn main_loop() -> Result<(), Error> {
|
||||||
return Err(Error::from(UsageError(args[0].clone())));
|
return Err(Error::from(UsageError(args[0].clone())));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Mostly analog to the rtsp-server example, the server is created
|
||||||
|
// and the factory for our test mount is configured.
|
||||||
let main_loop = glib::MainLoop::new(None, false);
|
let main_loop = glib::MainLoop::new(None, false);
|
||||||
let server = RTSPServer::new();
|
let server = RTSPServer::new();
|
||||||
let factory = RTSPMediaFactory::new();
|
// Much like HTTP servers, RTSP servers have multiple endpoints that
|
||||||
|
// provide or take different streams. Here, we ask our server to give
|
||||||
|
// us a reference to its list of endpoints, so we can add our
|
||||||
|
// test endpoint.
|
||||||
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
|
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
|
||||||
|
// Next, we create a factory for the endpoint we want to create.
|
||||||
|
// The job of the factory is to create a new pipeline for each client that
|
||||||
|
// connects, or (if configured to do so) to reuse an existing pipeline.
|
||||||
|
let factory = RTSPMediaFactory::new();
|
||||||
|
// Here we configure a method of authentication that we want the
|
||||||
|
// server to require from clients.
|
||||||
let auth = RTSPAuth::new();
|
let auth = RTSPAuth::new();
|
||||||
let token = RTSPToken::new(&[(*RTSP_TOKEN_MEDIA_FACTORY_ROLE, &"user")]);
|
let token = RTSPToken::new(&[(*RTSP_TOKEN_MEDIA_FACTORY_ROLE, &"user")]);
|
||||||
let basic = RTSPAuth::make_basic("user", "password");
|
let basic = RTSPAuth::make_basic("user", "password");
|
||||||
|
// For propery authentication, we want to use encryption. And there's no
|
||||||
|
// encryption without a certificate!
|
||||||
let cert = gio::TlsCertificate::new_from_pem(
|
let cert = gio::TlsCertificate::new_from_pem(
|
||||||
"-----BEGIN CERTIFICATE-----\
|
"-----BEGIN CERTIFICATE-----\
|
||||||
MIICJjCCAY+gAwIBAgIBBzANBgkqhkiG9w0BAQUFADCBhjETMBEGCgmSJomT8ixk\
|
MIICJjCCAY+gAwIBAgIBBzANBgkqhkiG9w0BAQUFADCBhjETMBEGCgmSJomT8ixk\
|
||||||
|
@ -71,6 +90,8 @@ fn main_loop() -> Result<(), Error> {
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
// Bindable versions were added in b1f515178a363df0322d7adbd5754e1f6e2083c9
|
// Bindable versions were added in b1f515178a363df0322d7adbd5754e1f6e2083c9
|
||||||
|
// This declares that the user "user" (once authenticated) has a role that
|
||||||
|
// allows them to access and construct media factories.
|
||||||
unsafe {
|
unsafe {
|
||||||
ffi::gst_rtsp_media_factory_add_role(
|
ffi::gst_rtsp_media_factory_add_role(
|
||||||
factory.to_glib_none().0,
|
factory.to_glib_none().0,
|
||||||
|
@ -87,13 +108,35 @@ fn main_loop() -> Result<(), Error> {
|
||||||
|
|
||||||
auth.set_tls_certificate(&cert);
|
auth.set_tls_certificate(&cert);
|
||||||
auth.add_basic(basic.as_str(), &token);
|
auth.add_basic(basic.as_str(), &token);
|
||||||
|
// Here, we tell the RTSP server about the authentication method we
|
||||||
|
// configured above.
|
||||||
server.set_auth(&auth);
|
server.set_auth(&auth);
|
||||||
|
|
||||||
factory.set_launch(args[1].as_str());
|
factory.set_launch(args[1].as_str());
|
||||||
|
// Tell the RTSP server that we want to work in RECORD mode (clients send)
|
||||||
|
// data to us.
|
||||||
factory.set_transport_mode(RTSPTransportMode::RECORD);
|
factory.set_transport_mode(RTSPTransportMode::RECORD);
|
||||||
|
// The RTSP protocol allows a couple of different profiles for the actually
|
||||||
|
// used protocol of data-transmission. With this, we can limit the selection
|
||||||
|
// from which connecting clients have to choose.
|
||||||
|
// SAVP/SAVPF are via SRTP (encrypted), that's what the S is for.
|
||||||
|
// The F in the end is for feedback (an extension that allows more bidirectional
|
||||||
|
// feedback between sender and receiver). AV is just Audio/Video, P is Profile :)
|
||||||
|
// The default, old RTP profile is AVP
|
||||||
factory.set_profiles(RTSPProfile::SAVP | RTSPProfile::SAVPF);
|
factory.set_profiles(RTSPProfile::SAVP | RTSPProfile::SAVPF);
|
||||||
|
|
||||||
|
// Now we add a new mount-point and tell the RTSP server to use the factory
|
||||||
|
// we configured beforehand. This factory will take on the job of creating
|
||||||
|
// a pipeline, which will take on the incoming data of connected clients.
|
||||||
mounts.add_factory("/test", &factory);
|
mounts.add_factory("/test", &factory);
|
||||||
|
|
||||||
|
// Attach the server to our main context.
|
||||||
|
// A main context is the thing where other stuff is registering itself for its
|
||||||
|
// events (e.g. sockets, GStreamer bus, ...) and the main loop is something that
|
||||||
|
// polls the main context for its events and dispatches them to whoever is
|
||||||
|
// interested in them. In this example, we only do have one, so we can
|
||||||
|
// leave the context parameter empty, it will automatically select
|
||||||
|
// the default one.
|
||||||
let id = server.attach(None);
|
let id = server.attach(None);
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
|
@ -101,6 +144,8 @@ fn main_loop() -> Result<(), Error> {
|
||||||
server.get_bound_port()
|
server.get_bound_port()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Start the mainloop. From this point on, the server will start to take
|
||||||
|
// incoming connections from clients.
|
||||||
main_loop.run();
|
main_loop.run();
|
||||||
|
|
||||||
glib::source_remove(id);
|
glib::source_remove(id);
|
||||||
|
|
|
@ -1,3 +1,8 @@
|
||||||
|
// This example demonstrates how to set up a rtsp server using GStreamer.
|
||||||
|
// For this, the example parses an arbitrary pipeline in launch syntax
|
||||||
|
// from the cli and provides this pipeline's output as stream, served
|
||||||
|
// using GStreamers rtsp server.
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
|
@ -33,14 +38,42 @@ fn main_loop() -> Result<(), Error> {
|
||||||
|
|
||||||
let main_loop = glib::MainLoop::new(None, false);
|
let main_loop = glib::MainLoop::new(None, false);
|
||||||
let server = gst_rtsp_server::RTSPServer::new();
|
let server = gst_rtsp_server::RTSPServer::new();
|
||||||
let factory = gst_rtsp_server::RTSPMediaFactory::new();
|
// Much like HTTP servers, RTSP servers have multiple endpoints that
|
||||||
|
// provide different streams. Here, we ask our server to give
|
||||||
|
// us a reference to his list of endpoints, so we can add our
|
||||||
|
// test endpoint, providing the pipeline from the cli.
|
||||||
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
|
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
|
||||||
|
|
||||||
|
// Next, we create a factory for the endpoint we want to create.
|
||||||
|
// The job of the factory is to create a new pipeline for each client that
|
||||||
|
// connects, or (if configured to do so) to reuse an existing pipeline.
|
||||||
|
let factory = gst_rtsp_server::RTSPMediaFactory::new();
|
||||||
|
// Here we tell the media factory the media we want to serve.
|
||||||
|
// This is done in the launch syntax. When the first client connects,
|
||||||
|
// the factory will use this syntax to create a new pipeline instance.
|
||||||
factory.set_launch(args[1].as_str());
|
factory.set_launch(args[1].as_str());
|
||||||
|
// This setting specifies whether each connecting client gets the output
|
||||||
|
// of a new instance of the pipeline, or whether all connected clients share
|
||||||
|
// the output of the same pipeline.
|
||||||
|
// If you want to stream a fixed video you have stored on the server to any
|
||||||
|
// client, you would not set this to shared here (since every client wants
|
||||||
|
// to start at the beginning of the video). But if you want to distribute
|
||||||
|
// a live source, you will probably want to set this to shared, to save
|
||||||
|
// computing and memory capacity on the server.
|
||||||
factory.set_shared(true);
|
factory.set_shared(true);
|
||||||
|
|
||||||
|
// Now we add a new mount-point and tell the RTSP server to serve the content
|
||||||
|
// provided by the factory we configured above, when a client connects to
|
||||||
|
// this specific path.
|
||||||
mounts.add_factory("/test", &factory);
|
mounts.add_factory("/test", &factory);
|
||||||
|
|
||||||
|
// Attach the server to our main context.
|
||||||
|
// A main context is the thing where other stuff is registering itself for its
|
||||||
|
// events (e.g. sockets, GStreamer bus, ...) and the main loop is something that
|
||||||
|
// polls the main context for its events and dispatches them to whoever is
|
||||||
|
// interested in them. In this example, we only do have one, so we can
|
||||||
|
// leave the context parameter empty, it will automatically select
|
||||||
|
// the default one.
|
||||||
let id = server.attach(None);
|
let id = server.attach(None);
|
||||||
|
|
||||||
println!(
|
println!(
|
||||||
|
@ -48,6 +81,8 @@ fn main_loop() -> Result<(), Error> {
|
||||||
server.get_bound_port()
|
server.get_bound_port()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Start the mainloop. From this point on, the server will start to serve
|
||||||
|
// our quality content to connecting clients.
|
||||||
main_loop.run();
|
main_loop.run();
|
||||||
|
|
||||||
glib::source_remove(id);
|
glib::source_remove(id);
|
||||||
|
|
|
@ -1,3 +1,23 @@
|
||||||
|
// This example demonstrates how to set and store metadata using
|
||||||
|
// GStreamer. Some elements support setting tags on a media stream.
|
||||||
|
// An example would be id3v2mux. The element signals this by implementing
|
||||||
|
// The GstTagsetter interface. You can query any element implementing this
|
||||||
|
// interface from the pipeline, and then tell the returned implementation
|
||||||
|
// of GstTagsetter what tags to apply to the media stream.
|
||||||
|
// This example's pipeline creates a new flac file from the testaudiosrc
|
||||||
|
// that the example application will add tags to using GstTagsetter.
|
||||||
|
// The operated pipeline looks like this:
|
||||||
|
|
||||||
|
// {audiotestsrc} - {flacenc} - {filesink}
|
||||||
|
|
||||||
|
// For example for pipelines that transcode a multimedia file, the input
|
||||||
|
// already has tags. For cases like this, the GstTagsetter has the merge
|
||||||
|
// setting, which the application can configure to tell the element
|
||||||
|
// implementing the interface whether to merge newly applied tags to the
|
||||||
|
// already existing ones, or if all existing ones should replace, etc.
|
||||||
|
// (More modes of operation are possible, see: gst::TagMergeMode)
|
||||||
|
// This merge-mode can also be supplied to any method that adds new tags.
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -34,6 +54,7 @@ struct ErrorMessage {
|
||||||
fn example_main() -> Result<(), Error> {
|
fn example_main() -> Result<(), Error> {
|
||||||
gst::init()?;
|
gst::init()?;
|
||||||
|
|
||||||
|
// Parse the pipeline we want to probe from a static in-line string.
|
||||||
let mut context = gst::ParseContext::new();
|
let mut context = gst::ParseContext::new();
|
||||||
let pipeline = match gst::parse_launch_full(
|
let pipeline = match gst::parse_launch_full(
|
||||||
"audiotestsrc wave=white-noise num-buffers=100 ! flacenc ! filesink location=test.flac",
|
"audiotestsrc wave=white-noise num-buffers=100 ! flacenc ! filesink location=test.flac",
|
||||||
|
@ -54,6 +75,8 @@ fn example_main() -> Result<(), Error> {
|
||||||
.downcast::<gst::Pipeline>()
|
.downcast::<gst::Pipeline>()
|
||||||
.map_err(|_| failure::err_msg("Generated pipeline is no pipeline"))?;
|
.map_err(|_| failure::err_msg("Generated pipeline is no pipeline"))?;
|
||||||
|
|
||||||
|
// Query the pipeline for elements implementing the GstTagsetter interface.
|
||||||
|
// In our case, this will return the flacenc element.
|
||||||
let tagsetter = pipeline
|
let tagsetter = pipeline
|
||||||
.get_by_interface(gst::TagSetter::static_type())
|
.get_by_interface(gst::TagSetter::static_type())
|
||||||
.ok_or_else(|| failure::err_msg("No TagSetter found"))?;
|
.ok_or_else(|| failure::err_msg("No TagSetter found"))?;
|
||||||
|
@ -61,7 +84,12 @@ fn example_main() -> Result<(), Error> {
|
||||||
.dynamic_cast::<gst::TagSetter>()
|
.dynamic_cast::<gst::TagSetter>()
|
||||||
.map_err(|_| failure::err_msg("No TagSetter found"))?;
|
.map_err(|_| failure::err_msg("No TagSetter found"))?;
|
||||||
|
|
||||||
|
// Tell the element implementing the GstTagsetter interface how to handle already existing
|
||||||
|
// metadata.
|
||||||
tagsetter.set_tag_merge_mode(gst::TagMergeMode::KeepAll);
|
tagsetter.set_tag_merge_mode(gst::TagMergeMode::KeepAll);
|
||||||
|
// Set the "title" tag to "Special randomized white-noise".
|
||||||
|
// The second parameter gst::TagMergeMode::Append tells the tagsetter to append this title
|
||||||
|
// if there already is one.
|
||||||
tagsetter.add::<gst::tags::Title>(&"Special randomized white-noise", gst::TagMergeMode::Append);
|
tagsetter.add::<gst::tags::Title>(&"Special randomized white-noise", gst::TagMergeMode::Append);
|
||||||
|
|
||||||
let bus = pipeline.get_bus().unwrap();
|
let bus = pipeline.get_bus().unwrap();
|
||||||
|
|
|
@ -1,3 +1,14 @@
|
||||||
|
// This example demonstrates the use of GStreamer's ToC API. This API is used
|
||||||
|
// to manage a table of contents contained in the handled media stream.
|
||||||
|
// Chapters within a matroska file would be an example of a scenario for using
|
||||||
|
// this API. Elements that can parse ToCs from a stream (such as matroskademux)
|
||||||
|
// notify all elements in the pipeline when they encountered a ToC.
|
||||||
|
// For this, the example operates the following pipeline:
|
||||||
|
|
||||||
|
// /-{queue} - {fakesink}
|
||||||
|
// {filesrc} - {decodebin} - {queue} - {fakesink}
|
||||||
|
// \- ...
|
||||||
|
|
||||||
extern crate gstreamer as gst;
|
extern crate gstreamer as gst;
|
||||||
use gst::prelude::*;
|
use gst::prelude::*;
|
||||||
|
|
||||||
|
@ -29,13 +40,29 @@ fn example_main() {
|
||||||
pipeline.add_many(&[&src, &decodebin]).unwrap();
|
pipeline.add_many(&[&src, &decodebin]).unwrap();
|
||||||
gst::Element::link_many(&[&src, &decodebin]).unwrap();
|
gst::Element::link_many(&[&src, &decodebin]).unwrap();
|
||||||
|
|
||||||
// Need to move a new reference into the closure
|
// Need to move a new reference into the closure.
|
||||||
|
// !!ATTENTION!!:
|
||||||
|
// It might seem appealing to use pipeline.clone() here, because that greatly
|
||||||
|
// simplifies the code within the callback. What this actually dose, however, is creating
|
||||||
|
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
|
||||||
|
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
|
||||||
|
// which is in turn stored in another strong reference on the pipeline is creating a
|
||||||
|
// reference cycle.
|
||||||
|
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
|
||||||
let pipeline_weak = pipeline.downgrade();
|
let pipeline_weak = pipeline.downgrade();
|
||||||
|
// Connect to decodebin's pad-added signal, that is emitted whenever it found another stream
|
||||||
|
// from the input file and found a way to decode it to its raw format.
|
||||||
decodebin.connect_pad_added(move |_, src_pad| {
|
decodebin.connect_pad_added(move |_, src_pad| {
|
||||||
|
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||||
|
// we moved into this callback.
|
||||||
let pipeline = match pipeline_weak.upgrade() {
|
let pipeline = match pipeline_weak.upgrade() {
|
||||||
Some(pipeline) => pipeline,
|
Some(pipeline) => pipeline,
|
||||||
None => return,
|
None => return,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// In this example, we are only interested about parsing the ToC, so
|
||||||
|
// we simply pipe every encountered stream into a fakesink, essentially
|
||||||
|
// throwing away the data.
|
||||||
let queue = gst::ElementFactory::make("queue", None).unwrap();
|
let queue = gst::ElementFactory::make("queue", None).unwrap();
|
||||||
let sink = gst::ElementFactory::make("fakesink", None).unwrap();
|
let sink = gst::ElementFactory::make("fakesink", None).unwrap();
|
||||||
|
|
||||||
|
@ -58,6 +85,11 @@ fn example_main() {
|
||||||
|
|
||||||
let bus = pipeline.get_bus().unwrap();
|
let bus = pipeline.get_bus().unwrap();
|
||||||
|
|
||||||
|
// Instead of using a main loop (like GLib's), we manually iterate over
|
||||||
|
// GStreamer's bus messages in this example. We don't need any special
|
||||||
|
// functionality like timeouts or GLib socket notifications, so this is sufficient.
|
||||||
|
// The bus is manually operated by repeatedly calling timed_pop on the bus with
|
||||||
|
// the desired timeout for when to stop waiting for new messages. (None = Wait forever)
|
||||||
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
|
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
|
||||||
use gst::MessageView;
|
use gst::MessageView;
|
||||||
|
|
||||||
|
@ -73,27 +105,43 @@ fn example_main() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
MessageView::Toc(msg_toc) => {
|
MessageView::Toc(msg_toc) => {
|
||||||
|
// Some element found a ToC in the current media stream and told
|
||||||
|
// us by posting a message to GStreamer's bus.
|
||||||
let (toc, updated) = msg_toc.get_toc();
|
let (toc, updated) = msg_toc.get_toc();
|
||||||
println!(
|
println!(
|
||||||
"\nReceived toc: {:?} - updated: {}",
|
"\nReceived toc: {:?} - updated: {}",
|
||||||
toc.get_scope(),
|
toc.get_scope(),
|
||||||
updated
|
updated
|
||||||
);
|
);
|
||||||
|
// Get a list of tags that are ToC specific.
|
||||||
if let Some(tags) = toc.get_tags() {
|
if let Some(tags) = toc.get_tags() {
|
||||||
println!("- tags: {}", tags.to_string());
|
println!("- tags: {}", tags.to_string());
|
||||||
}
|
}
|
||||||
|
// ToCs do not have a fixed structure. Depending on the format that
|
||||||
|
// they were parsed from, they might have different tree-like structures,
|
||||||
|
// so applications that want to support ToCs (for example in the form
|
||||||
|
// of jumping between chapters in a video) have to try parsing and
|
||||||
|
// interpreting the ToC manually.
|
||||||
|
// In this example, we simply want to print the ToC structure, so
|
||||||
|
// we iterate everything and don't try to interpret anything.
|
||||||
for toc_entry in toc.get_entries() {
|
for toc_entry in toc.get_entries() {
|
||||||
|
// Every entry in a ToC has its own type. One type could for
|
||||||
|
// example be Chapter.
|
||||||
println!(
|
println!(
|
||||||
"\t{:?} - {}",
|
"\t{:?} - {}",
|
||||||
toc_entry.get_entry_type(),
|
toc_entry.get_entry_type(),
|
||||||
toc_entry.get_uid()
|
toc_entry.get_uid()
|
||||||
);
|
);
|
||||||
|
// Every ToC entry can have a set of timestamps (start, stop).
|
||||||
if let Some((start, stop)) = toc_entry.get_start_stop_times() {
|
if let Some((start, stop)) = toc_entry.get_start_stop_times() {
|
||||||
println!("\t- start: {}, stop: {}", start, stop);
|
println!("\t- start: {}, stop: {}", start, stop);
|
||||||
}
|
}
|
||||||
|
// Every ToC entry can have tags to it.
|
||||||
if let Some(tags) = toc_entry.get_tags() {
|
if let Some(tags) = toc_entry.get_tags() {
|
||||||
println!("\t- tags: {}", tags.to_string());
|
println!("\t- tags: {}", tags.to_string());
|
||||||
}
|
}
|
||||||
|
// Every ToC entry can have a set of child entries.
|
||||||
|
// With this structure, you can create trees of arbitrary depth.
|
||||||
for toc_sub_entry in toc_entry.get_sub_entries() {
|
for toc_sub_entry in toc_entry.get_sub_entries() {
|
||||||
println!(
|
println!(
|
||||||
"\n\t\t{:?} - {}",
|
"\n\t\t{:?} - {}",
|
||||||
|
|
Loading…
Reference in a new issue