examples/tutorials: Add missing playback tutorials

This commit is contained in:
Piotrek Brzeziński 2021-11-06 15:43:15 +01:00
parent 14dadf4c62
commit 31b78f483e
6 changed files with 754 additions and 0 deletions

View file

@ -34,3 +34,11 @@ required-features = ["termion"]
[[bin]]
name = "playback-tutorial-1"
required-features = ["termion"]
[[bin]]
name = "playback-tutorial-2"
required-features = ["termion"]
[[bin]]
name = "playback-tutorial-5"
required-features = ["termion", "gst-video"]

View file

@ -0,0 +1,201 @@
use glib::FlagsClass;
use gst::prelude::*;
use anyhow::Error;
use termion::event::Key;
use termion::input::TermRead;
use std::{thread, time};
#[path = "../tutorials-common.rs"]
mod tutorials_common;
fn analyze_streams(playbin: &gst::Element) {
let n_video = playbin.property::<i32>("n-video");
let n_audio = playbin.property::<i32>("n-audio");
let n_text = playbin.property::<i32>("n-text");
println!(
"{} video stream(s), {} audio stream(s), {} subtitle stream(s)",
n_video, n_audio, n_text
);
for i in 0..n_video {
let tags = playbin.emit_by_name::<Option<gst::TagList>>("get-video-tags", &[&i]);
if let Some(tags) = tags {
println!("video stream {}:", i);
if let Some(codec) = tags.get::<gst::tags::VideoCodec>() {
println!(" codec: {}", codec.get());
}
}
}
for i in 0..n_audio {
let tags = playbin.emit_by_name::<Option<gst::TagList>>("get-audio-tags", &[&i]);
if let Some(tags) = tags {
println!("audio stream {}:", i);
if let Some(codec) = tags.get::<gst::tags::AudioCodec>() {
println!(" codec: {}", codec.get());
}
if let Some(codec) = tags.get::<gst::tags::LanguageCode>() {
println!(" language: {}", codec.get());
}
if let Some(codec) = tags.get::<gst::tags::Bitrate>() {
println!(" bitrate: {}", codec.get());
}
}
}
for i in 0..n_text {
let tags = playbin.emit_by_name::<Option<gst::TagList>>("get-text-tags", &[&i]);
if let Some(tags) = tags {
println!("subtitle stream {}:", i);
if let Some(codec) = tags.get::<gst::tags::LanguageCode>() {
println!(" language: {}", codec.get());
}
} else {
println!("no tags found for sub track");
}
}
let current_video = playbin.property::<i32>("current-video");
let current_audio = playbin.property::<i32>("current-audio");
let current_text = playbin.property::<i32>("current-text");
println!(
"Currently playing video stream {}, audio stream {}, subtitle stream {}",
current_video, current_audio, current_text
);
println!("Type any number and hit ENTER to select a different subtitle stream");
}
fn handle_keyboard(playbin: &gst::Element, main_loop: &glib::MainLoop) {
let mut stdin = termion::async_stdin().keys();
loop {
if let Some(Ok(input)) = stdin.next() {
match input {
Key::Char(index) => {
if let Some(index) = index.to_digit(10) {
// Here index can only be 0-9
let index = index as i32;
let n_audio = playbin.property::<i32>("n-text");
if index < n_audio {
println!("Setting current subtitle stream to {}", index);
playbin.set_property("current-text", index);
} else {
eprintln!("Index out of bounds");
}
}
}
Key::Ctrl('c') => {
main_loop.quit();
break;
}
_ => continue,
};
}
thread::sleep(time::Duration::from_millis(50));
}
}
fn tutorial_main() -> Result<(), Error> {
// Create the main loop
let main_loop = glib::MainLoop::new(None, false);
// Initialize GStreamer
gst::init()?;
// Create PlayBin element
let playbin = gst::ElementFactory::make("playbin", Some("playbin"))?;
// Set URI to play
let uri =
"https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.ogv";
playbin.set_property("uri", uri);
// Set the subtitle URI and font description
let subtitle_uri =
"https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer_gr.srt";
playbin.set_property("suburi", subtitle_uri);
playbin.set_property("subtitle-font-desc", "Sans, 18");
// Set flags to show Audio, Video and Subtitles
let flags = playbin.property_value("flags");
let flags_class = FlagsClass::new(flags.type_()).unwrap();
let flags = flags_class
.builder_with_value(flags)
.unwrap()
.set_by_nick("audio")
.set_by_nick("video")
.set_by_nick("text")
.build()
.unwrap();
playbin.set_property_from_value("flags", &flags);
// Add a keyboard watch so we get notified of keystrokes
let playbin_clone = playbin.clone();
let main_loop_clone = main_loop.clone();
thread::spawn(move || handle_keyboard(&playbin_clone, &main_loop_clone));
// Add a bus watch, so we get notified when a message arrives
let playbin_clone = playbin.clone();
let main_loop_clone = main_loop.clone();
let bus = playbin.bus().unwrap();
bus.add_watch(move |_bus, message| {
use gst::MessageView;
match message.view() {
MessageView::Error(err) => {
eprintln!(
"Error received from element {:?} {}",
err.src().map(|s| s.path_string()),
err.error()
);
eprintln!("Debugging information: {:?}", err.debug());
main_loop_clone.quit();
Continue(false)
}
MessageView::StateChanged(state_changed) => {
if state_changed
.src()
.map(|s| s == playbin_clone)
.unwrap_or(false)
&& state_changed.current() == gst::State::Playing
{
analyze_streams(&playbin_clone);
}
Continue(true)
}
MessageView::Eos(..) => {
println!("Reached end of stream");
main_loop_clone.quit();
Continue(false)
}
_ => Continue(true),
}
})?;
// Start playing
playbin.set_state(gst::State::Playing)?;
// Set GLib mainloop to run
main_loop.run();
// Clean up
playbin.set_state(gst::State::Null)?;
Ok(())
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
match tutorials_common::run(tutorial_main) {
Ok(_) => {}
Err(err) => eprintln!("Failed: {}", err),
};
}

View file

@ -0,0 +1,190 @@
use std::sync::{Arc, Mutex};
use anyhow::Error;
use byte_slice_cast::*;
use glib::source::SourceId;
use gst::prelude::*;
use gst_app::AppSrc;
use gst_audio::AudioInfo;
#[path = "../tutorials-common.rs"]
mod tutorials_common;
const CHUNK_SIZE: usize = 1024; // Amount of bytes we are sending in each buffer
const SAMPLE_RATE: u32 = 44_100; // Samples per second we are sending
#[derive(Debug)]
struct CustomData {
source_id: Option<SourceId>,
num_samples: u64, // Number of samples generated so far (for timestamp generation)
// For waveform generation
a: f64,
b: f64,
c: f64,
d: f64,
appsrc: AppSrc,
}
impl CustomData {
fn new(appsrc: &AppSrc) -> CustomData {
CustomData {
source_id: None,
num_samples: 0,
a: 0.0,
b: 1.0,
c: 0.0,
d: 1.0,
appsrc: appsrc.clone(),
}
}
}
fn tutorial_main() -> Result<(), Error> {
// Initialize GStreamer
gst::init().unwrap();
// Create the playbin element
let pipeline = gst::parse_launch("playbin uri=appsrc://").unwrap();
// This part is called when playbin has created the appsrc element,
// so we have a chance to configure it.
pipeline.connect("source-setup", false, |args| {
println!("Source has been created. Configuring.");
let _pipeline = args[0].get::<gst::Element>().unwrap();
let source = args[1]
.get::<gst_app::AppSrc>()
.expect("Source element is expected to be an appsrc!");
let audio_info = AudioInfo::builder(gst_audio::AudioFormat::S16le, SAMPLE_RATE, 1)
.build()
.unwrap();
let audio_caps = audio_info.to_caps().unwrap();
source.set_caps(Some(&audio_caps));
source.set_format(gst::Format::Time);
let data: Arc<Mutex<CustomData>> = Arc::new(Mutex::new(CustomData::new(&source)));
let data_clone = data.clone();
source.set_callbacks(
gst_app::AppSrcCallbacks::builder()
// This signal callback is triggered when appsrc needs data.
// Here, we add an idle handler to the mainloop to start pushing data into the appsrc.
.need_data(move |_, _size| {
let data = &data_clone;
let mut d = data.lock().unwrap();
if d.source_id.is_none() {
println!("Start feeding");
let data_weak = Arc::downgrade(data);
d.source_id = Some(glib::source::idle_add(move || {
let data = match data_weak.upgrade() {
Some(data) => data,
None => return glib::Continue(false),
};
let (appsrc, buffer) = {
let mut data = data.lock().unwrap();
// Create a new empty buffer
let mut buffer = gst::Buffer::with_size(CHUNK_SIZE).unwrap();
// Each sample is 16 bits
let num_samples = CHUNK_SIZE / 2;
// Calculate timestamp and duration
let pts = gst::ClockTime::SECOND
.mul_div_floor(data.num_samples, u64::from(SAMPLE_RATE))
.expect("u64 overflow");
let duration = gst::ClockTime::SECOND
.mul_div_floor(num_samples as u64, u64::from(SAMPLE_RATE))
.expect("u64 overflow");
{
let buffer = buffer.get_mut().unwrap();
{
let mut samples = buffer.map_writable().unwrap();
let samples = samples.as_mut_slice_of::<i16>().unwrap();
// Generate some psychodelic waveforms
data.c += data.d;
data.d -= data.c / 1000.0;
let freq = 1100.0 + 1000.0 * data.d;
for sample in samples.iter_mut() {
data.a += data.b;
data.b -= data.a / freq;
*sample = 500 * (data.a as i16);
}
}
data.num_samples += num_samples as u64;
buffer.set_pts(pts);
buffer.set_duration(duration);
}
(data.appsrc.clone(), buffer)
};
// Push the buffer into the appsrc
glib::Continue(appsrc.push_buffer(buffer).is_ok())
}));
}
})
// This callback is triggered when appsrc has enough data and we can stop sending.
.enough_data(move |_| {
let mut d = data.lock().unwrap();
if let Some(source) = d.source_id.take() {
println!("Stop feeding");
source.remove();
}
})
.build(),
);
None
});
// Create a GLib main loop
let main_loop = glib::MainLoop::new(None, false);
let main_loop_clone = main_loop.clone();
let bus = pipeline.bus().unwrap();
// Instruct the bus to emit signals for each received message, and connect to the interesting signals
#[allow(clippy::single_match)]
bus.connect_message(Some("error"), move |_, msg| match msg.view() {
gst::MessageView::Error(err) => {
eprintln!(
"Error received from element {:?}: {}",
err.src().map(|s| s.path_string()),
err.error()
);
eprintln!("Debugging information: {:?}", err.debug());
main_loop_clone.quit();
}
_ => unreachable!(),
});
bus.add_signal_watch();
// Start playing
pipeline.set_state(gst::State::Playing)?;
// Run the GLib main loop
main_loop.run();
// Cleanup
pipeline.set_state(gst::State::Null)?;
bus.remove_signal_watch();
Ok(())
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
match tutorials_common::run(tutorial_main) {
Ok(_) => {}
Err(err) => eprintln!("Failed: {}", err),
};
}

View file

@ -0,0 +1,200 @@
use gst::prelude::*;
use gst_video::prelude::ColorBalanceExt;
use anyhow::Error;
use termion::event::Key;
use termion::input::TermRead;
use std::{cmp, thread, time};
#[path = "../tutorials-common.rs"]
mod tutorials_common;
// Commands that we get from the terminal and we send to the main thread.
#[derive(Clone, PartialEq)]
enum Command {
UpdateChannel(String, bool),
Quit,
}
fn handle_keyboard(ready_tx: glib::Sender<Command>) {
let mut stdin = termion::async_stdin().keys();
loop {
if let Some(Ok(input)) = stdin.next() {
let command = match input {
Key::Char(key) => {
let increase = key.is_uppercase();
match key {
'c' | 'C' => Command::UpdateChannel(String::from("CONTRAST"), increase),
'b' | 'B' => Command::UpdateChannel(String::from("BRIGHTNESS"), increase),
'h' | 'H' => Command::UpdateChannel(String::from("HUE"), increase),
's' | 'S' => Command::UpdateChannel(String::from("SATURATION"), increase),
'q' | 'Q' => Command::Quit,
_ => continue,
}
}
Key::Ctrl('c' | 'C') => Command::Quit,
_ => continue,
};
ready_tx
.send(command.clone())
.expect("Failed to send command to the main thread.");
if command == Command::Quit {
break;
}
}
thread::sleep(time::Duration::from_millis(50));
}
}
fn update_color_channel(
channel_name: &str,
increase: bool,
color_balance: &gst_video::ColorBalance,
) {
// Retrieve the list of all channels and locate the requested one
let channels = color_balance.list_channels();
if let Some(channel) = channels.iter().find(|c| c.label() == channel_name) {
// Change the value in the requested direction
let mut value = color_balance.value(channel);
let step = (channel.max_value() - channel.min_value()) / 10;
if increase {
value = cmp::min(value + step, channel.max_value());
} else {
value = cmp::max(value - step, channel.min_value());
}
color_balance.set_value(channel, value);
}
}
fn print_current_values(pipeline: &gst::Element) {
let balance = pipeline
.dynamic_cast_ref::<gst_video::ColorBalance>()
.unwrap();
let channels = balance.list_channels();
for channel in channels.iter() {
let value = balance.value(channel);
let percentage =
100 * (value - channel.min_value()) / (channel.max_value() - channel.min_value());
print!("{}: {: >3}% ", channel.label(), percentage);
}
println!();
}
fn tutorial_main() -> Result<(), Error> {
// Initialize GStreamer
gst::init()?;
println!(
"USAGE: Choose one of the following options, then press enter:
'C' to increase contrast, 'c' to decrease contrast
'B' to increase brightness, 'b' to decrease brightness
'H' to increase hue, 'h' to decrease hue
'S' to increase saturation, 's' to decrease saturation
'Q' to quit"
);
// Get a main context...
let main_context = glib::MainContext::default();
// ... and make it the main context by default so that we can then have a channel to send the
// commands we received from the terminal.
let _guard = main_context.acquire().unwrap();
// Build the channel to get the terminal inputs from a different thread.
let (ready_tx, ready_rx) = glib::MainContext::channel(glib::PRIORITY_DEFAULT);
// Start the keyboard handling thread
thread::spawn(move || handle_keyboard(ready_tx));
// Build the pipeline
let pipeline = gst::parse_launch(
"playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm")?;
let main_loop = glib::MainLoop::new(None, false);
let main_loop_clone = main_loop.clone();
let pipeline_weak = pipeline.downgrade();
// Start playing
pipeline.set_state(gst::State::Playing)?;
ready_rx.attach(Some(&main_loop.context()), move |command: Command| {
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::Continue(true),
};
match command {
Command::UpdateChannel(ref name, increase) => {
let balance = pipeline
.dynamic_cast_ref::<gst_video::ColorBalance>()
.unwrap();
update_color_channel(name, increase, balance);
print_current_values(&pipeline);
}
Command::Quit => {
main_loop_clone.quit();
}
}
glib::Continue(true)
});
// Handle bus errors / EOS correctly
let main_loop_clone = main_loop.clone();
let bus = pipeline.bus().unwrap();
let pipeline_weak = pipeline.downgrade();
bus.add_watch(move |_bus, message| {
use gst::MessageView;
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::Continue(true),
};
match message.view() {
MessageView::Error(err) => {
eprintln!(
"Error received from element {:?} {}",
err.src().map(|s| s.path_string()),
err.error()
);
eprintln!("Debugging information: {:?}", err.debug());
main_loop_clone.quit();
Continue(false)
}
MessageView::Eos(..) => {
println!("Reached end of stream");
pipeline
.set_state(gst::State::Ready)
.expect("Unable to set the pipeline to the `Ready` state");
main_loop_clone.quit();
Continue(false)
}
_ => Continue(true),
}
})?;
// Print initial values for all channels
print_current_values(&pipeline);
// Run the GLib main loop
main_loop.run();
pipeline.set_state(gst::State::Null)?;
Ok(())
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
match tutorials_common::run(tutorial_main) {
Ok(_) => {}
Err(err) => eprintln!("Failed: {}", err),
};
}

View file

@ -0,0 +1,90 @@
use glib::FlagsClass;
use gst::prelude::*;
use anyhow::Error;
#[path = "../tutorials-common.rs"]
mod tutorials_common;
fn filter_vis_features(feature: &gst::PluginFeature) -> bool {
match feature.downcast_ref::<gst::ElementFactory>() {
Some(factory) => {
let klass = factory.metadata(&gst::ELEMENT_METADATA_KLASS).unwrap();
klass.contains("Visualization")
}
None => false,
}
}
fn tutorial_main() -> Result<(), Error> {
// Initialize GStreamer
gst::init()?;
// Get a list of all visualization plugins
let registry = gst::Registry::get();
let list = registry.features_filtered(&filter_vis_features, false);
let mut selected_factory: Option<gst::ElementFactory> = None;
// Print their names
println!("Available visualization plugins:");
for feature in list {
let factory = feature.downcast::<gst::ElementFactory>().unwrap();
let name = factory.metadata(&gst::ELEMENT_METADATA_LONGNAME).unwrap();
println!(" {}", name);
if selected_factory.is_none() && name.starts_with("GOOM") {
selected_factory = Some(factory);
}
}
// Don't proceed if no visualization plugins were found
let vis_factory = selected_factory.expect("No visualization plugins found.");
// We have now selected a factory for the visualization element
let name = vis_factory
.metadata(&gst::ELEMENT_METADATA_LONGNAME)
.unwrap();
println!("Selected {}", name);
let vis_plugin = vis_factory.create(None).unwrap();
// Build the pipeline
let pipeline = gst::parse_launch("playbin uri=http://radio.hbr1.com:19800/ambient.ogg")?;
// Set the visualization flag
let flags = pipeline.property_value("flags");
let flags_class = FlagsClass::new(flags.type_()).unwrap();
let flags = flags_class
.builder_with_value(flags)
.unwrap()
.set_by_nick("vis")
.build()
.unwrap();
pipeline.set_property_from_value("flags", &flags);
// Set vis plugin for playbin2
pipeline.set_property("vis-plugin", &vis_plugin);
// Start playing
pipeline.set_state(gst::State::Playing)?;
// Wait until an EOS or error message appears
let bus = pipeline.bus().unwrap();
let _msg = bus.timed_pop_filtered(
gst::ClockTime::NONE,
&[gst::MessageType::Error, gst::MessageType::Eos],
);
// Clean up
pipeline.set_state(gst::State::Null)?;
Ok(())
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
match tutorials_common::run(tutorial_main) {
Ok(_) => {}
Err(err) => eprintln!("Failed: {}", err),
};
}

View file

@ -0,0 +1,65 @@
use gst::prelude::*;
use anyhow::Error;
#[path = "../tutorials-common.rs"]
mod tutorials_common;
fn tutorial_main() -> Result<(), Error> {
// Initialize GStreamer
gst::init()?;
// Build the pipeline
let pipeline = gst::parse_launch(
"playbin uri=https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_trailer-480p.webm")?;
// Create elements that go inside the sink bin
let equalizer = gst::ElementFactory::make("equalizer-3bands", Some("equalizer"))
.expect("Could not create equalizer element.");
let convert = gst::ElementFactory::make("audioconvert", Some("convert"))
.expect("Could not create audioconvert element.");
let sink = gst::ElementFactory::make("autoaudiosink", Some("audio_sink"))
.expect("Could not create autoaudiosink element.");
// Create the sink bin, add the elements and link them
let bin = gst::Bin::new(Some("audio_sink_bin"));
bin.add_many(&[&equalizer, &convert, &sink]).unwrap();
gst::Element::link_many(&[&equalizer, &convert, &sink]).expect("Failed to link elements.");
let pad = equalizer
.static_pad("sink")
.expect("Failed to get a static pad from equalizer.");
let ghost_pad = gst::GhostPad::with_target(Some("sink"), &pad).unwrap();
ghost_pad.set_active(true)?;
bin.add_pad(&ghost_pad)?;
// Configure the equalizer
equalizer.set_property("band1", -24.0);
equalizer.set_property("band2", -24.0);
pipeline.set_property("audio-sink", &bin);
// Set to PLAYING
pipeline.set_state(gst::State::Playing)?;
// Wait until an EOS or error message appears
let bus = pipeline.bus().unwrap();
let _msg = bus.timed_pop_filtered(
gst::ClockTime::NONE,
&[gst::MessageType::Error, gst::MessageType::Eos],
);
// Clean up
pipeline.set_state(gst::State::Null)?;
Ok(())
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
match tutorials_common::run(tutorial_main) {
Ok(_) => {}
Err(err) => eprintln!("Failed: {}", err),
};
}