webrtc: add raw payload support

This commit adds support for raw payloads such as L24 audio to `webrtcsink` &
`webrtcsrc`.

Most changes take place within the `Codec` helper structure:

* A `Codec` can now advertise a depayloader. This also ensures that a format
  not only can be decoded when necessary, but it can also be depayloaded in the
  first place.
* It is possible to declare raw `Codec`s, meaning that their caps are compatible
  with a payloader and a depayloader without the need for an encoder and decoder.
* Previous accessor `has_decoder` was renamed as `can_be_received` to account
  for codecs which can be handled by an available depayloader with or without
  the need for a decoder.
* New codecs were added for the following formats:
  * L24, L16, L8 audio.
  * RAW video.

The `webrtc-precise-sync` examples were updated to demonstrate streaming of raw
audio or video.
This commit is contained in:
François Laignel 2024-04-16 18:49:01 +02:00
parent 542030fd82
commit 60f7f4a664
7 changed files with 363 additions and 89 deletions

View file

@ -8045,7 +8045,7 @@
"construct": false, "construct": false,
"construct-only": false, "construct-only": false,
"controllable": false, "controllable": false,
"default": "audio/x-opus", "default": "audio/x-opus; audio/x-raw, format=(string)S24BE, layout=(GstAudioLayout)interleaved; audio/x-raw, format=(string)S16BE, layout=(GstAudioLayout)interleaved; audio/x-raw, format=(string)U8, layout=(GstAudioLayout)interleaved",
"mutable": "ready", "mutable": "ready",
"readable": true, "readable": true,
"type": "GstCaps", "type": "GstCaps",
@ -8228,7 +8228,7 @@
"construct": false, "construct": false,
"construct-only": false, "construct-only": false,
"controllable": false, "controllable": false,
"default": "video/x-vp8; video/x-h264; video/x-vp9; video/x-h265", "default": "video/x-vp8; video/x-h264; video/x-vp9; video/x-h265; video/x-raw, format=(string){ RGB, RGBA, BGR, BGRA, AYUV, UYVY, I420, Y41B, UYVP }",
"mutable": "ready", "mutable": "ready",
"readable": true, "readable": true,
"type": "GstCaps", "type": "GstCaps",
@ -8355,7 +8355,7 @@
"kind": "object", "kind": "object",
"properties": { "properties": {
"audio-codecs": { "audio-codecs": {
"blurb": "Names of audio codecs to be be used during the SDP negotiation. Valid values: [OPUS]", "blurb": "Names of audio codecs to be be used during the SDP negotiation. Valid values: [OPUS, L24, L16, L8]",
"conditionally-available": false, "conditionally-available": false,
"construct": false, "construct": false,
"construct-only": false, "construct-only": false,
@ -8435,7 +8435,7 @@
"writable": true "writable": true
}, },
"video-codecs": { "video-codecs": {
"blurb": "Names of video codecs to be be used during the SDP negotiation. Valid values: [VP8, H264, VP9, H265]", "blurb": "Names of video codecs to be be used during the SDP negotiation. Valid values: [VP8, H264, VP9, H265, RAW]",
"conditionally-available": false, "conditionally-available": false,
"construct": false, "construct": false,
"construct-only": false, "construct-only": false,

View file

@ -34,6 +34,8 @@ mode and an example based on RTSP instead of WebRTC.
The examples can also be used for [RFC 7273] NTP or PTP clock signalling and The examples can also be used for [RFC 7273] NTP or PTP clock signalling and
synchronization. synchronization.
Finally, raw payloads (e.g. L24 audio) can be negotiated.
[RFC 6051]: https://datatracker.ietf.org/doc/html/rfc6051 [RFC 6051]: https://datatracker.ietf.org/doc/html/rfc6051
[RFC 7273]: https://datatracker.ietf.org/doc/html/rfc7273 [RFC 7273]: https://datatracker.ietf.org/doc/html/rfc7273
[Instantaneous RTP synchronization...]: https://coaxion.net/blog/2022/05/instantaneous-rtp-synchronization-retrieval-of-absolute-sender-clock-times-with-gstreamer/ [Instantaneous RTP synchronization...]: https://coaxion.net/blog/2022/05/instantaneous-rtp-synchronization-retrieval-of-absolute-sender-clock-times-with-gstreamer/
@ -129,3 +131,31 @@ cargo r --example webrtc-precise-sync-recv -- --expect-clock-signalling
cargo r --example webrtc-precise-sync-send -- --clock ntp --do-clock-signalling \ cargo r --example webrtc-precise-sync-send -- --clock ntp --do-clock-signalling \
--video-streams 0 --audio-streams 2 --video-streams 0 --audio-streams 2
``` ```
### Raw payload
The sender can be instructed to send raw payloads.
This command will stream two stereo L24 streams:
```shell
cargo r --example webrtc-precise-sync-send -- \
--video-streams 0 --audio-streams 2 \
--audio-caps 'audio/x-raw,format=S24BE,rate=48000,channels=2'
```
Launch the receiver with:
```shell
cargo r --example webrtc-precise-sync-recv
```
This can be used to stream multiple RAW video streams forcing width and
allowing fallback to VP8 & OPUS if remote doesn't support raw payloads:
```shell
cargo r --example webrtc-precise-sync-send -- \
--video-streams 2 --audio-streams 1 \
--video-caps 'video/x-raw,format=I420,width=400;video/x-vp8' \
--audio-caps 'audio/x-raw,format=S24BE,rate=48000,channels=2;video/x-opus'
```

View file

@ -43,6 +43,18 @@ struct Args {
#[clap(long, help = "RTP jitterbuffer latency (ms)", default_value = "40")] #[clap(long, help = "RTP jitterbuffer latency (ms)", default_value = "40")]
pub rtp_latency: u32, pub rtp_latency: u32,
#[clap(
long,
help = "Force accepted audio codecs. See 'webrtcsrc' 'audio-codecs' property (ex. 'OPUS'). Accepts several occurrences."
)]
pub audio_codecs: Vec<String>,
#[clap(
long,
help = "Force accepted video codecs. See 'webrtcsrc' 'video-codecs' property (ex. 'VP8'). Accepts several occurrences."
)]
pub video_codecs: Vec<String>,
#[clap(long, help = "Signalling server host", default_value = "localhost")] #[clap(long, help = "Signalling server host", default_value = "localhost")]
pub server: String, pub server: String,
@ -123,6 +135,14 @@ fn spawn_consumer(
webrtcsrc.set_property("do-retransmission", false); webrtcsrc.set_property("do-retransmission", false);
} }
if !args.audio_codecs.is_empty() {
webrtcsrc.set_property("audio-codecs", gst::Array::new(&args.audio_codecs));
}
if !args.video_codecs.is_empty() {
webrtcsrc.set_property("video-codecs", gst::Array::new(&args.video_codecs));
}
bin.add(&webrtcsrc).context("Adding webrtcsrc")?; bin.add(&webrtcsrc).context("Adding webrtcsrc")?;
let signaller = webrtcsrc.property::<gst::glib::Object>("signaller"); let signaller = webrtcsrc.property::<gst::glib::Object>("signaller");

View file

@ -45,6 +45,12 @@ struct Args {
)] )]
pub video_streams: usize, pub video_streams: usize,
#[clap(
long,
help = "Force audio caps (ex. for L24 'audio/x-raw,format=S24BE,rate=48000,channels=2')"
)]
pub audio_caps: Option<String>,
#[clap(long, help = "Force video caps (ex. 'video/x-h264')")] #[clap(long, help = "Force video caps (ex. 'video/x-h264')")]
pub video_caps: Option<String>, pub video_caps: Option<String>,
@ -135,6 +141,8 @@ impl App {
} }
async fn prepare(&mut self) -> anyhow::Result<()> { async fn prepare(&mut self) -> anyhow::Result<()> {
use std::str::FromStr;
debug!("Preparing"); debug!("Preparing");
self.pipeline = Some(gst::Pipeline::new()); self.pipeline = Some(gst::Pipeline::new());
@ -217,6 +225,19 @@ impl App {
}); });
} }
let raw_audio_caps = if let Some(ref audio_caps) = self.args.audio_caps {
let caps = gst::Caps::from_str(audio_caps).context("Parsing audio caps")?;
webrtcsink.set_property("audio-caps", &caps);
// Reuse the first user defined caps for the raw caps
let mut s = caps.structure(0).expect("parsed above").to_owned();
s.set_name("audio/x-raw");
Some(gst::Caps::from(s))
} else {
None
};
for idx in 0..self.args.audio_streams { for idx in 0..self.args.audio_streams {
let audiosrc = gst::ElementFactory::make("audiotestsrc") let audiosrc = gst::ElementFactory::make("audiotestsrc")
.property("is-live", true) .property("is-live", true)
@ -226,11 +247,47 @@ impl App {
.context("Creating audiotestsrc")?; .context("Creating audiotestsrc")?;
self.pipeline().add(&audiosrc).context("Adding audiosrc")?; self.pipeline().add(&audiosrc).context("Adding audiosrc")?;
audiosrc if let Some(ref raw_caps) = raw_audio_caps {
.link_pads(None, &webrtcsink, Some("audio_%u")) audiosrc
.context("Linking audiosrc")?; .link_pads_filtered(None, &webrtcsink, Some("audio_%u"), raw_caps)
.context("Linking audiosrc")?;
} else {
audiosrc
.link_pads(None, &webrtcsink, Some("audio_%u"))
.context("Linking audiosrc")?;
}
} }
let raw_video_caps = {
let mut raw_video_caps = if let Some(ref video_caps) = self.args.video_caps {
let caps = gst::Caps::from_str(video_caps).context("Parsing video caps")?;
webrtcsink.set_property("video-caps", &caps);
// Reuse the first user defined caps for the raw caps
let mut s = caps.structure(0).expect("parsed above").to_owned();
s.set_name("video/x-raw");
gst::Caps::from(s)
} else {
gst::Caps::new_empty_simple("video/x-raw")
};
// If no width / height are specified, set something big enough
let caps_mut = raw_video_caps.make_mut();
let s = caps_mut.structure_mut(0).expect("created above");
match (s.get::<i32>("width").ok(), s.get::<i32>("height").ok()) {
(None, None) => {
s.set("width", 800i32);
s.set("height", 600i32);
}
(Some(width), None) => s.set("height", 3 * width / 4),
(None, Some(height)) => s.set("width", 4 * height / 3),
_ => (),
}
raw_video_caps
};
for idx in 0..self.args.video_streams { for idx in 0..self.args.video_streams {
let videosrc = gst::ElementFactory::make("videotestsrc") let videosrc = gst::ElementFactory::make("videotestsrc")
.property("is-live", true) .property("is-live", true)
@ -247,13 +304,7 @@ impl App {
.expect("adding video elements"); .expect("adding video elements");
videosrc videosrc
.link_filtered( .link_filtered(&video_overlay, &raw_video_caps)
&video_overlay,
&gst::Caps::builder("video/x-raw")
.field("width", 800i32)
.field("height", 600i32)
.build(),
)
.context("Linking videosrc to timeoverlay")?; .context("Linking videosrc to timeoverlay")?;
video_overlay video_overlay
@ -261,10 +312,6 @@ impl App {
.context("Linking video overlay")?; .context("Linking video overlay")?;
} }
if let Some(ref video_caps) = self.args.video_caps {
webrtcsink.set_property("video-caps", &gst::Caps::builder(video_caps).build());
}
Ok(()) Ok(())
} }

View file

@ -427,7 +427,7 @@ impl Clone for DecodingInfo {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct EncodingInfo { struct EncodingInfo {
encoder: gst::ElementFactory, encoder: Option<gst::ElementFactory>,
payloader: gst::ElementFactory, payloader: gst::ElementFactory,
output_filter: Option<gst::Caps>, output_filter: Option<gst::Caps>,
} }
@ -449,16 +449,27 @@ impl Codec {
stream_type: gst::StreamType, stream_type: gst::StreamType,
caps: &gst::Caps, caps: &gst::Caps,
decoders: &glib::List<gst::ElementFactory>, decoders: &glib::List<gst::ElementFactory>,
depayloaders: &glib::List<gst::ElementFactory>,
encoders: &glib::List<gst::ElementFactory>, encoders: &glib::List<gst::ElementFactory>,
payloaders: &glib::List<gst::ElementFactory>, payloaders: &glib::List<gst::ElementFactory>,
) -> Self { ) -> Self {
let has_decoder = Self::has_decoder_for_caps(caps, decoders); let has_decoder = Self::has_decoder_for_caps(caps, decoders);
let has_depayloader = Self::has_depayloader_for_codec(name, depayloaders);
let decoding_info = if has_depayloader && has_decoder {
Some(DecodingInfo {
has_decoder: AtomicBool::new(has_decoder),
})
} else {
None
};
let encoder = Self::get_encoder_for_caps(caps, encoders); let encoder = Self::get_encoder_for_caps(caps, encoders);
let payloader = Self::get_payloader_for_codec(name, payloaders); let payloader = Self::get_payloader_for_codec(name, payloaders);
let encoding_info = if let (Some(encoder), Some(payloader)) = (encoder, payloader) { let encoding_info = if let (Some(encoder), Some(payloader)) = (encoder, payloader) {
Some(EncodingInfo { Some(EncodingInfo {
encoder, encoder: Some(encoder),
payloader, payloader,
output_filter: None, output_filter: None,
}) })
@ -471,11 +482,39 @@ impl Codec {
stream_type, stream_type,
name: name.into(), name: name.into(),
payload_type: None, payload_type: None,
decoding_info,
encoding_info,
}
}
decoding_info: Some(DecodingInfo { pub fn new_raw(
has_decoder: AtomicBool::new(has_decoder), name: &str,
}), stream_type: gst::StreamType,
caps: &gst::Caps,
depayloaders: &glib::List<gst::ElementFactory>,
payloaders: &glib::List<gst::ElementFactory>,
) -> Self {
let decoding_info = if Self::has_depayloader_for_codec(name, depayloaders) {
Some(DecodingInfo {
has_decoder: AtomicBool::new(false),
})
} else {
None
};
let payloader = Self::get_payloader_for_codec(name, payloaders);
let encoding_info = payloader.map(|payloader| EncodingInfo {
encoder: None,
payloader,
output_filter: None,
});
Self {
caps: caps.clone(),
stream_type,
name: name.into(),
payload_type: None,
decoding_info,
encoding_info, encoding_info,
} }
} }
@ -488,34 +527,16 @@ impl Codec {
self.payload_type = Some(pt); self.payload_type = Some(pt);
} }
pub fn new_decoding( pub fn can_be_received(&self) -> bool {
name: &str,
stream_type: gst::StreamType,
caps: &gst::Caps,
decoders: &glib::List<gst::ElementFactory>,
) -> Self {
let has_decoder = Self::has_decoder_for_caps(caps, decoders);
Self {
caps: caps.clone(),
stream_type,
name: name.into(),
payload_type: None,
decoding_info: Some(DecodingInfo {
has_decoder: AtomicBool::new(has_decoder),
}),
encoding_info: None,
}
}
pub fn has_decoder(&self) -> bool {
if self.decoding_info.is_none() { if self.decoding_info.is_none() {
return false; return false;
} }
let decoder_info = self.decoding_info.as_ref().unwrap(); let decoder_info = self.decoding_info.as_ref().unwrap();
if is_raw_caps(&self.caps) {
return true;
}
if decoder_info.has_decoder.load(Ordering::SeqCst) { if decoder_info.has_decoder.load(Ordering::SeqCst) {
true true
} else if Self::has_decoder_for_caps( } else if Self::has_decoder_for_caps(
@ -599,6 +620,40 @@ impl Codec {
}) })
} }
fn has_depayloader_for_codec(
codec: &str,
depayloaders: &glib::List<gst::ElementFactory>,
) -> bool {
depayloaders.iter().any(|factory| {
factory.static_pad_templates().iter().any(|template| {
let template_caps = template.caps();
if template.direction() != gst::PadDirection::Sink || template_caps.is_any() {
return false;
}
template_caps.iter().any(|s| {
s.has_field("encoding-name")
&& s.get::<gst::List>("encoding-name").map_or_else(
|_| {
if let Ok(encoding_name) = s.get::<&str>("encoding-name") {
encoding_name == codec
} else {
false
}
},
|encoding_names| {
encoding_names.iter().any(|v| {
v.get::<&str>()
.map_or(false, |encoding_name| encoding_name == codec)
})
},
)
})
})
})
}
pub fn is_video(&self) -> bool { pub fn is_video(&self) -> bool {
matches!(self.stream_type, gst::StreamType::VIDEO) matches!(self.stream_type, gst::StreamType::VIDEO)
} }
@ -608,11 +663,13 @@ impl Codec {
} }
pub fn build_encoder(&self) -> Option<Result<gst::Element, Error>> { pub fn build_encoder(&self) -> Option<Result<gst::Element, Error>> {
self.encoding_info.as_ref().map(|info| { self.encoding_info.as_ref().and_then(|info| {
info.encoder info.encoder.as_ref().map(|encoder| {
.create() encoder
.build() .create()
.with_context(|| format!("Creating encoder {}", info.encoder.name())) .build()
.with_context(|| format!("Creating encoder {}", encoder.name()))
})
}) })
} }
@ -655,13 +712,17 @@ impl Codec {
} }
pub fn encoder_factory(&self) -> Option<gst::ElementFactory> { pub fn encoder_factory(&self) -> Option<gst::ElementFactory> {
self.encoding_info.as_ref().map(|info| info.encoder.clone()) self.encoding_info
.as_ref()
.and_then(|info| info.encoder.clone())
} }
pub fn encoder_name(&self) -> Option<String> { pub fn encoder_name(&self) -> Option<String> {
self.encoding_info self.encoding_info.as_ref().and_then(|info| {
.as_ref() info.encoder
.map(|info| info.encoder.name().to_string()) .as_ref()
.map(|encoder| encoder.name().to_string())
})
} }
pub fn set_output_filter(&mut self, caps: gst::Caps) { pub fn set_output_filter(&mut self, caps: gst::Caps) {
@ -713,6 +774,36 @@ impl Codec {
pub static AUDIO_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::new_empty_simple("audio/x-raw")); pub static AUDIO_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::new_empty_simple("audio/x-raw"));
pub static OPUS_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::new_empty_simple("audio/x-opus")); pub static OPUS_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::new_empty_simple("audio/x-opus"));
pub static L24_CAPS: Lazy<gst::Caps> = Lazy::new(|| {
gst::Caps::builder_full()
.structure(
gst::Structure::builder("audio/x-raw")
.field("format", gst_audio::AudioFormat::S24be.to_str())
.field("layout", glib::gstr!("interleaved"))
.build(),
)
.build()
});
pub static L16_CAPS: Lazy<gst::Caps> = Lazy::new(|| {
gst::Caps::builder_full()
.structure(
gst::Structure::builder("audio/x-raw")
.field("format", gst_audio::AudioFormat::S16be.to_str())
.field("layout", glib::gstr!("interleaved"))
.build(),
)
.build()
});
pub static L8_CAPS: Lazy<gst::Caps> = Lazy::new(|| {
gst::Caps::builder_full()
.structure(
gst::Structure::builder("audio/x-raw")
.field("format", gst_audio::AudioFormat::U8.to_str())
.field("layout", glib::gstr!("interleaved"))
.build(),
)
.build()
});
pub static VIDEO_CAPS: Lazy<gst::Caps> = Lazy::new(|| { pub static VIDEO_CAPS: Lazy<gst::Caps> = Lazy::new(|| {
gst::Caps::builder_full_with_any_features() gst::Caps::builder_full_with_any_features()
@ -723,6 +814,32 @@ pub static VP8_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::new_empty_simple(
pub static VP9_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::new_empty_simple("video/x-vp9")); pub static VP9_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::new_empty_simple("video/x-vp9"));
pub static H264_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::new_empty_simple("video/x-h264")); pub static H264_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::new_empty_simple("video/x-h264"));
pub static H265_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::new_empty_simple("video/x-h265")); pub static H265_CAPS: Lazy<gst::Caps> = Lazy::new(|| gst::Caps::new_empty_simple("video/x-h265"));
pub static VRAW_CAPS: Lazy<gst::Caps> = Lazy::new(|| {
gst::Caps::builder_full()
.structure(
gst::Structure::builder("video/x-raw")
.field(
"format",
gst::List::new(
[
gst_video::VideoFormat::Rgb,
gst_video::VideoFormat::Rgba,
gst_video::VideoFormat::Bgr,
gst_video::VideoFormat::Bgra,
gst_video::VideoFormat::Ayuv,
gst_video::VideoFormat::Uyvy,
gst_video::VideoFormat::I420,
gst_video::VideoFormat::Y41b,
gst_video::VideoFormat::Uyvp,
]
.into_iter()
.map(|f| f.to_str()),
),
)
.build(),
)
.build()
});
pub static RTP_CAPS: Lazy<gst::Caps> = pub static RTP_CAPS: Lazy<gst::Caps> =
Lazy::new(|| gst::Caps::new_empty_simple("application/x-rtp")); Lazy::new(|| gst::Caps::new_empty_simple("application/x-rtp"));
@ -750,7 +867,7 @@ impl Codecs {
Self(codecs.values().cloned().collect()) Self(codecs.values().cloned().collect())
} }
pub fn find_for_encoded_caps(&self, caps: &gst::Caps) -> Option<Codec> { pub fn find_for_payloadable_caps(&self, caps: &gst::Caps) -> Option<Codec> {
self.iter() self.iter()
.find(|codec| codec.caps.can_intersect(caps) && codec.encoding_info.is_some()) .find(|codec| codec.caps.can_intersect(caps) && codec.encoding_info.is_some())
.cloned() .cloned()
@ -763,6 +880,11 @@ static CODECS: Lazy<Codecs> = Lazy::new(|| {
gst::Rank::MARGINAL, gst::Rank::MARGINAL,
); );
let depayloaders = gst::ElementFactory::factories_with_type(
gst::ElementFactoryType::DEPAYLOADER,
gst::Rank::MARGINAL,
);
let encoders = gst::ElementFactory::factories_with_type( let encoders = gst::ElementFactory::factories_with_type(
gst::ElementFactoryType::ENCODER, gst::ElementFactoryType::ENCODER,
gst::Rank::MARGINAL, gst::Rank::MARGINAL,
@ -779,14 +901,37 @@ static CODECS: Lazy<Codecs> = Lazy::new(|| {
gst::StreamType::AUDIO, gst::StreamType::AUDIO,
&OPUS_CAPS, &OPUS_CAPS,
&decoders, &decoders,
&depayloaders,
&encoders, &encoders,
&payloaders, &payloaders,
), ),
Codec::new_raw(
"L24",
gst::StreamType::AUDIO,
&L24_CAPS,
&depayloaders,
&payloaders,
),
Codec::new_raw(
"L16",
gst::StreamType::AUDIO,
&L16_CAPS,
&depayloaders,
&payloaders,
),
Codec::new_raw(
"L8",
gst::StreamType::AUDIO,
&L8_CAPS,
&depayloaders,
&payloaders,
),
Codec::new( Codec::new(
"VP8", "VP8",
gst::StreamType::VIDEO, gst::StreamType::VIDEO,
&VP8_CAPS, &VP8_CAPS,
&decoders, &decoders,
&depayloaders,
&encoders, &encoders,
&payloaders, &payloaders,
), ),
@ -795,6 +940,7 @@ static CODECS: Lazy<Codecs> = Lazy::new(|| {
gst::StreamType::VIDEO, gst::StreamType::VIDEO,
&H264_CAPS, &H264_CAPS,
&decoders, &decoders,
&depayloaders,
&encoders, &encoders,
&payloaders, &payloaders,
), ),
@ -803,6 +949,7 @@ static CODECS: Lazy<Codecs> = Lazy::new(|| {
gst::StreamType::VIDEO, gst::StreamType::VIDEO,
&VP9_CAPS, &VP9_CAPS,
&decoders, &decoders,
&depayloaders,
&encoders, &encoders,
&payloaders, &payloaders,
), ),
@ -811,9 +958,17 @@ static CODECS: Lazy<Codecs> = Lazy::new(|| {
gst::StreamType::VIDEO, gst::StreamType::VIDEO,
&H265_CAPS, &H265_CAPS,
&decoders, &decoders,
&depayloaders,
&encoders, &encoders,
&payloaders, &payloaders,
), ),
Codec::new_raw(
"RAW",
gst::StreamType::VIDEO,
&VRAW_CAPS,
&depayloaders,
&payloaders,
),
]) ])
}); });
@ -899,7 +1054,6 @@ impl Codecs {
} }
pub fn is_raw_caps(caps: &gst::Caps) -> bool { pub fn is_raw_caps(caps: &gst::Caps) -> bool {
assert!(caps.is_fixed());
["video/x-raw", "audio/x-raw"].contains(&caps.structure(0).unwrap().name().as_str()) ["video/x-raw", "audio/x-raw"].contains(&caps.structure(0).unwrap().name().as_str())
} }

View file

@ -845,7 +845,7 @@ impl PayloadChainBuilder {
codec = self.codec, codec = self.codec,
); );
let needs_encoding = is_raw_caps(&self.input_caps); let needs_encoding = !is_raw_caps(&self.codec.caps) && is_raw_caps(&self.input_caps);
let mut elements: Vec<gst::Element> = Vec::new(); let mut elements: Vec<gst::Element> = Vec::new();
let (raw_filter, encoder) = if needs_encoding { let (raw_filter, encoder) = if needs_encoding {
@ -3503,13 +3503,40 @@ impl BaseWebRTCSink {
output_caps: gst::Caps, output_caps: gst::Caps,
codecs: &Codecs, codecs: &Codecs,
) -> Result<(), Error> { ) -> Result<(), Error> {
let futs = if let Some(codec) = codecs.find_for_encoded_caps(&discovery_info.caps) { let futs = if is_raw_caps(&discovery_info.caps) {
let sink_caps = discovery_info.caps.clone();
let is_video = match sink_caps.structure(0).unwrap().name().as_str() {
"video/x-raw" => true,
"audio/x-raw" => false,
_ => anyhow::bail!("expected audio or video raw caps: {sink_caps}"),
};
codecs
.iter()
.filter(|codec| {
codec.is_video() == is_video
&& (!is_raw_caps(&codec.caps) || codec.caps.can_intersect(&sink_caps))
})
.map(|codec| {
BaseWebRTCSink::run_discovery_pipeline(
element,
&name,
&discovery_info,
codec.clone(),
sink_caps.clone(),
&output_caps,
ExtensionConfigurationType::Auto,
)
})
.collect()
} else if let Some(codec) = codecs.find_for_payloadable_caps(&discovery_info.caps) {
let mut caps = discovery_info.caps.clone(); let mut caps = discovery_info.caps.clone();
gst::info!( gst::info!(
CAT, CAT,
obj: element, obj: element,
"Stream is already encoded with codec {}, still need to payload it", "Stream already conforms to {}, still need to payload it",
codec.name codec.name
); );
@ -3525,29 +3552,7 @@ impl BaseWebRTCSink {
ExtensionConfigurationType::Auto, ExtensionConfigurationType::Auto,
)] )]
} else { } else {
let sink_caps = discovery_info.caps.clone(); anyhow::bail!("Unsupported caps: {}", discovery_info.caps);
let is_video = match sink_caps.structure(0).unwrap().name().as_str() {
"video/x-raw" => true,
"audio/x-raw" => false,
_ => anyhow::bail!("Unsupported caps: {}", discovery_info.caps),
};
codecs
.iter()
.filter(|codec| codec.is_video() == is_video)
.map(|codec| {
BaseWebRTCSink::run_discovery_pipeline(
element,
&name,
&discovery_info,
codec.clone(),
sink_caps.clone(),
&output_caps,
ExtensionConfigurationType::Auto,
)
})
.collect()
}; };
let mut payloader_caps = gst::Caps::new_empty(); let mut payloader_caps = gst::Caps::new_empty();
@ -4258,6 +4263,8 @@ impl GstObjectImpl for BaseWebRTCSink {}
impl ElementImpl for BaseWebRTCSink { impl ElementImpl for BaseWebRTCSink {
fn pad_templates() -> &'static [gst::PadTemplate] { fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| { static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
// Ignore specific raw caps from Codecs: they are covered by video/x-raw & audio/x-raw
let mut caps_builder = gst::Caps::builder_full() let mut caps_builder = gst::Caps::builder_full()
.structure(gst::Structure::builder("video/x-raw").build()) .structure(gst::Structure::builder("video/x-raw").build())
.structure_with_features( .structure_with_features(
@ -4277,7 +4284,10 @@ impl ElementImpl for BaseWebRTCSink {
gst::CapsFeatures::new([D3D11_MEMORY_FEATURE]), gst::CapsFeatures::new([D3D11_MEMORY_FEATURE]),
); );
for codec in Codecs::video_codecs() { for codec in Codecs::video_codecs()
.iter()
.filter(|codec| !is_raw_caps(&codec.caps))
{
caps_builder = caps_builder.structure(codec.caps.structure(0).unwrap().to_owned()); caps_builder = caps_builder.structure(codec.caps.structure(0).unwrap().to_owned());
} }
@ -4292,7 +4302,10 @@ impl ElementImpl for BaseWebRTCSink {
let mut caps_builder = let mut caps_builder =
gst::Caps::builder_full().structure(gst::Structure::builder("audio/x-raw").build()); gst::Caps::builder_full().structure(gst::Structure::builder("audio/x-raw").build());
for codec in Codecs::audio_codecs() { for codec in Codecs::audio_codecs()
.iter()
.filter(|codec| !is_raw_caps(&codec.caps))
{
caps_builder = caps_builder.structure(codec.caps.structure(0).unwrap().to_owned()); caps_builder = caps_builder.structure(codec.caps.structure(0).unwrap().to_owned());
} }
let audio_pad_template = gst::PadTemplate::with_gtype( let audio_pad_template = gst::PadTemplate::with_gtype(

View file

@ -274,11 +274,11 @@ impl Default for Settings {
meta: Default::default(), meta: Default::default(),
audio_codecs: Codecs::audio_codecs() audio_codecs: Codecs::audio_codecs()
.into_iter() .into_iter()
.filter(|codec| codec.has_decoder()) .filter(|codec| codec.can_be_received())
.collect(), .collect(),
video_codecs: Codecs::video_codecs() video_codecs: Codecs::video_codecs()
.into_iter() .into_iter()
.filter(|codec| codec.has_decoder()) .filter(|codec| codec.can_be_received())
.collect(), .collect(),
enable_data_channel_navigation: DEFAULT_ENABLE_DATA_CHANNEL_NAVIGATION, enable_data_channel_navigation: DEFAULT_ENABLE_DATA_CHANNEL_NAVIGATION,
do_retransmission: DEFAULT_DO_RETRANSMISSION, do_retransmission: DEFAULT_DO_RETRANSMISSION,
@ -1027,11 +1027,18 @@ impl BaseWebRTCSrc {
impl ElementImpl for BaseWebRTCSrc { impl ElementImpl for BaseWebRTCSrc {
fn pad_templates() -> &'static [gst::PadTemplate] { fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| { static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
use crate::utils::is_raw_caps;
// Ignore specific raw caps from Codecs: they are covered by VIDEO_CAPS & AUDIO_CAPS
let mut video_caps_builder = gst::Caps::builder_full() let mut video_caps_builder = gst::Caps::builder_full()
.structure_with_any_features(VIDEO_CAPS.structure(0).unwrap().to_owned()) .structure_with_any_features(VIDEO_CAPS.structure(0).unwrap().to_owned())
.structure(RTP_CAPS.structure(0).unwrap().to_owned()); .structure(RTP_CAPS.structure(0).unwrap().to_owned());
for codec in Codecs::video_codecs() { for codec in Codecs::video_codecs()
.iter()
.filter(|codec| !is_raw_caps(&codec.caps))
{
video_caps_builder = video_caps_builder =
video_caps_builder.structure(codec.caps.structure(0).unwrap().to_owned()); video_caps_builder.structure(codec.caps.structure(0).unwrap().to_owned());
} }
@ -1040,7 +1047,10 @@ impl ElementImpl for BaseWebRTCSrc {
.structure_with_any_features(AUDIO_CAPS.structure(0).unwrap().to_owned()) .structure_with_any_features(AUDIO_CAPS.structure(0).unwrap().to_owned())
.structure(RTP_CAPS.structure(0).unwrap().to_owned()); .structure(RTP_CAPS.structure(0).unwrap().to_owned());
for codec in Codecs::audio_codecs() { for codec in Codecs::audio_codecs()
.iter()
.filter(|codec| !is_raw_caps(&codec.caps))
{
audio_caps_builder = audio_caps_builder =
audio_caps_builder.structure(codec.caps.structure(0).unwrap().to_owned()); audio_caps_builder.structure(codec.caps.structure(0).unwrap().to_owned());
} }