mirror of
https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs.git
synced 2025-03-28 03:25:28 +00:00
net/webrtc: add whipclient example
Add a simple example producing both audio and video to make it work with the whipserver example Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1938>
This commit is contained in:
parent
0135aea9e4
commit
362216f40b
3 changed files with 87 additions and 0 deletions
|
@ -129,3 +129,7 @@ required-features = [ "whip" ]
|
|||
|
||||
[[example]]
|
||||
name = "webrtcsink-define-encoder-bitrates"
|
||||
|
||||
[[example]]
|
||||
name = "whipclient"
|
||||
required-features = [ "whip" ]
|
||||
|
|
|
@ -271,3 +271,22 @@ https://github.com/M0Rf30/android-udev-rules
|
|||
cargo r --example webrtc-precise-sync-send
|
||||
```
|
||||
4. Click the `Refresh` button on the Producer List view of the app.
|
||||
|
||||
# webrtchttp examples
|
||||
|
||||
Collection of webrtchttp examples
|
||||
|
||||
## whipserversrc and whipclientsink
|
||||
|
||||
An couple of examples to demonstrate how a simple pipeline with both audio and video
|
||||
use these elements
|
||||
|
||||
1.- Run the WHIP server
|
||||
```shell
|
||||
cargo r --example whipserver --features whip http://127.0.0.1:8190
|
||||
```
|
||||
|
||||
2.- Run the WHIP client
|
||||
```shell
|
||||
cargo r --example whipclient --features whip http://127.0.0.1:8190/whip/endpoint
|
||||
```
|
||||
|
|
64
net/webrtc/examples/whipclient.rs
Normal file
64
net/webrtc/examples/whipclient.rs
Normal file
|
@ -0,0 +1,64 @@
|
|||
use anyhow::Error;
|
||||
use clap::Parser;
|
||||
use gst::prelude::*;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
struct Args {
|
||||
whip_endpoint: String,
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Error> {
|
||||
gst::init()?;
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
let pipeline = gst::Pipeline::builder().build();
|
||||
|
||||
let videotestsrc = gst::ElementFactory::make("videotestsrc").build()?;
|
||||
let audiotestsrc = gst::ElementFactory::make("audiotestsrc").build()?;
|
||||
let vqueue = gst::ElementFactory::make("queue").build()?;
|
||||
let aqueue = gst::ElementFactory::make("queue").build()?;
|
||||
let whipclientsink = gst::ElementFactory::make("whipclientsink").build()?;
|
||||
whipclientsink
|
||||
.dynamic_cast_ref::<gst::ChildProxy>()
|
||||
.unwrap()
|
||||
.set_child_property("signaller::whip-endpoint", args.whip_endpoint);
|
||||
|
||||
pipeline.add_many([&videotestsrc, &vqueue, &whipclientsink])?;
|
||||
gst::Element::link_many([&videotestsrc, &vqueue, &whipclientsink])?;
|
||||
|
||||
pipeline.add_many([&audiotestsrc, &aqueue])?;
|
||||
gst::Element::link_many([&audiotestsrc, &aqueue, &whipclientsink])?;
|
||||
|
||||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let bus = pipeline.bus().expect("Pipeline should have a bus");
|
||||
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => {
|
||||
println!("EOS");
|
||||
break;
|
||||
}
|
||||
MessageView::Error(err) => {
|
||||
pipeline.set_state(gst::State::Null)?;
|
||||
eprintln!(
|
||||
"Got error from {}: {} ({})",
|
||||
msg.src()
|
||||
.map(|s| String::from(s.path_string()))
|
||||
.unwrap_or_else(|| "None".into()),
|
||||
err.error(),
|
||||
err.debug().unwrap_or_else(|| "".into()),
|
||||
);
|
||||
break;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
pipeline.set_state(gst::State::Null)?;
|
||||
|
||||
Ok(())
|
||||
}
|
Loading…
Reference in a new issue