gst-plugins-rs/net/webrtc/examples/whipserver.rs
Taruntej Kanakamalla ac9ef0a8d2 net/webrtc: Example for whipserver
rudimentary sample to test multiple WHIP client connections
2024-04-17 17:27:51 +05:30

124 lines
3.6 KiB
Rust

use std::process::exit;
use anyhow::Error;
use clap::Parser;
use gst::prelude::*;
#[derive(Parser, Debug)]
struct Args {
host_addr: String,
}
fn link_video(pad: &gst::Pad, pipeline: &gst::Pipeline) {
let q = gst::ElementFactory::make_with_name(
"queue",
Some(format!("queue_{}", pad.name()).as_str()),
)
.unwrap();
// let vconv = gst::ElementFactory::make_with_name("videoconvert", Some(format!("vconv_{}",pad.name()).as_str())).unwrap();
let vsink = gst::ElementFactory::make_with_name(
"autovideosink",
Some(format!("vsink_{}", pad.name()).as_str()),
)
.unwrap();
pipeline.add_many([&q, &vsink]).unwrap();
gst::Element::link_many([&q, &vsink]).unwrap();
let qsinkpad = q.static_pad("sink").unwrap();
pad.link(&qsinkpad).expect("linking should work");
q.sync_state_with_parent().unwrap();
// vconv.sync_state_with_parent().unwrap();
vsink.sync_state_with_parent().unwrap();
}
fn unlink_video(pad: &gst::Pad, pipeline: &gst::Pipeline) {
let q = pipeline
.by_name(format!("queue_{}", pad.name()).as_str())
.unwrap();
// let vconv = pipeline.by_name(format!("vconv_{}",pad.name()).as_str()).unwrap();
let vsink = pipeline
.by_name(format!("vsink_{}", pad.name()).as_str())
.unwrap();
q.set_state(gst::State::Null).unwrap();
// vconv.set_state(gst::State::Null).unwrap();
vsink.set_state(gst::State::Null).unwrap();
pipeline.remove_many([&q, &vsink]).unwrap();
}
fn link_audio(_pad: &gst::Pad) {}
fn main() -> Result<(), Error> {
gst::init()?;
let args = Args::parse();
let pipeline = gst::Pipeline::builder().build();
let ws = gst::ElementFactory::make("whipserversrc").build()?;
ws.dynamic_cast_ref::<gst::ChildProxy>()
.unwrap()
.set_child_property("signaller::host-addr", &args.host_addr);
ws.set_property("enable-data-channel-navigation", true);
let pipe = pipeline.clone();
ws.connect_pad_added(move |_ws, pad| {
if pad.name().contains("video_") {
link_video(pad, &pipe);
} else if pad.name().contains("audio_") {
} else {
println!("unknown pad type {}", pad.name());
}
});
let pipe = pipeline.clone();
ws.connect_pad_removed(move |_ws, pad| {
if pad.name().contains("video_") {
unlink_video(pad, &pipe);
} else if pad.name().contains("audio_") {
} else {
println!("unknown pad type {}", pad.name());
}
});
pipeline.add(&ws)?;
pipeline.set_state(gst::State::Playing)?;
let p = pipeline.clone();
ctrlc::set_handler(move || {
p.set_state(gst::State::Null).unwrap();
exit(0);
})
.expect("Error setting Ctrl-C handler");
let bus = pipeline.bus().expect("Pipeline should have a bus");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => {
println!("EOS");
break;
}
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
eprintln!(
"Got error from {}: {} ({})",
msg.src()
.map(|s| String::from(s.path_string()))
.unwrap_or_else(|| "None".into()),
err.error(),
err.debug().unwrap_or_else(|| "".into()),
);
break;
}
_ => (),
}
}
pipeline.set_state(gst::State::Null)?;
Ok(())
}