Merge branch '0.12-backports' into '0.12'

0.12 backports

See merge request gstreamer/gst-plugins-rs!1556
This commit is contained in:
Sebastian Dröge 2024-04-29 09:50:45 +00:00
commit b964391330
29 changed files with 1634 additions and 812 deletions

606
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -130,6 +130,7 @@ gdk-wayland = { package = "gdk4-wayland", git = "https://github.com/gtk-rs/gtk4-
gdk-x11 = { package = "gdk4-x11", git = "https://github.com/gtk-rs/gtk4-rs", branch = "0.8", version = "0.8"}
gdk-win32 = { package = "gdk4-win32", git = "https://github.com/gtk-rs/gtk4-rs", branch = "0.8", version = "0.8"}
gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-allocators = { package = "gstreamer-allocators", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-app = { package = "gstreamer-app", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-audio = { package = "gstreamer-audio", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-base = { package = "gstreamer-base", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }

View file

@ -649,7 +649,7 @@ impl BaseTransformImpl for HrtfRender {
if direction == gst::PadDirection::Sink {
s.set("channels", 2);
s.set("channel-mask", 0x3);
s.set("channel-mask", gst::Bitmask(0x3));
} else {
let settings = self.settings.lock().unwrap();
if let Some(objs) = &settings.spatial_objects {

View file

@ -102,11 +102,6 @@ version = "0.21"
name = "socket2"
version = "0.4"
# Various crates depend on an older version of syn
[[bans.skip]]
name = "syn"
version = "1.0"
# Various crates depend on an older version of bitflags
[[bans.skip]]
name = "bitflags"

View file

@ -79,6 +79,18 @@
"type": "gchararray",
"writable": true
},
"force-path-style": {
"blurb": "Force client to use path-style addressing for buckets",
"conditionally-available": false,
"construct": false,
"construct-only": false,
"controllable": false,
"default": "false",
"mutable": "null",
"readable": true,
"type": "gboolean",
"writable": true
},
"hlssink": {
"blurb": "The underlying HLS sink being used",
"conditionally-available": false,
@ -315,6 +327,18 @@
"type": "gboolean",
"writable": true
},
"force-path-style": {
"blurb": "Force client to use path-style addressing for buckets",
"conditionally-available": false,
"construct": false,
"construct-only": false,
"controllable": false,
"default": "false",
"mutable": "null",
"readable": true,
"type": "gboolean",
"writable": true
},
"key": {
"blurb": "The key of the file to write",
"conditionally-available": false,
@ -529,6 +553,18 @@
"type": "gchararray",
"writable": true
},
"force-path-style": {
"blurb": "Force client to use path-style addressing for buckets",
"conditionally-available": false,
"construct": false,
"construct-only": false,
"controllable": false,
"default": "false",
"mutable": "null",
"readable": true,
"type": "gboolean",
"writable": true
},
"key": {
"blurb": "The key of the file to write",
"conditionally-available": false,
@ -747,6 +783,18 @@
"type": "gchararray",
"writable": true
},
"force-path-style": {
"blurb": "Force client to use path-style addressing for buckets",
"conditionally-available": false,
"construct": false,
"construct-only": false,
"controllable": false,
"default": "false",
"mutable": "null",
"readable": true,
"type": "gboolean",
"writable": true
},
"request-timeout": {
"blurb": "Timeout for each S3 request (in ms, set to -1 for infinity)",
"conditionally-available": false,
@ -2038,7 +2086,7 @@
"long-name": "ISOFMP4Mux",
"pad-templates": {
"sink_%%u": {
"caps": "video/x-h264:\n stream-format: { (string)avc, (string)avc3 }\n alignment: au\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-h265:\n stream-format: { (string)hvc1, (string)hev1 }\n alignment: au\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-vp8:\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-vp9:\n profile: { (string)0, (string)1, (string)2, (string)3 }\n chroma-format: { (string)4:2:0, (string)4:2:2, (string)4:4:4 }\n bit-depth-luma: { (uint)8, (uint)10, (uint)12 }\nbit-depth-chroma: { (uint)8, (uint)10, (uint)12 }\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-av1:\n stream-format: obu-stream\n alignment: tu\n profile: { (string)main, (string)high, (string)professional }\n chroma-format: { (string)4:0:0, (string)4:2:0, (string)4:2:2, (string)4:4:4 }\n bit-depth-luma: { (uint)8, (uint)10, (uint)12 }\nbit-depth-chroma: { (uint)8, (uint)10, (uint)12 }\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\naudio/mpeg:\n mpegversion: 4\n stream-format: raw\n channels: [ 1, 65535 ]\n rate: [ 1, 2147483647 ]\naudio/x-opus:\nchannel-mapping-family: [ 0, 255 ]\n channels: [ 1, 8 ]\n rate: [ 1, 2147483647 ]\n",
"caps": "video/x-h264:\n stream-format: { (string)avc, (string)avc3 }\n alignment: au\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-h265:\n stream-format: { (string)hvc1, (string)hev1 }\n alignment: au\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-vp8:\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-vp9:\n profile: { (string)0, (string)1, (string)2, (string)3 }\n chroma-format: { (string)4:2:0, (string)4:2:2, (string)4:4:4 }\n bit-depth-luma: { (uint)8, (uint)10, (uint)12 }\nbit-depth-chroma: { (uint)8, (uint)10, (uint)12 }\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-av1:\n stream-format: obu-stream\n alignment: tu\n profile: { (string)main, (string)high, (string)professional }\n chroma-format: { (string)4:0:0, (string)4:2:0, (string)4:2:2, (string)4:4:4 }\n bit-depth-luma: { (uint)8, (uint)10, (uint)12 }\nbit-depth-chroma: { (uint)8, (uint)10, (uint)12 }\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\naudio/mpeg:\n mpegversion: 4\n stream-format: raw\n channels: [ 1, 65535 ]\n rate: [ 1, 2147483647 ]\naudio/x-opus:\nchannel-mapping-family: [ 0, 255 ]\n channels: [ 1, 8 ]\n rate: [ 1, 2147483647 ]\naudio/x-flac:\n framed: true\n channels: [ 1, 8 ]\n rate: [ 1, 655350 ]\n",
"direction": "sink",
"presence": "request",
"type": "GstFMP4MuxPad"
@ -2357,11 +2405,14 @@
"GInitiallyUnowned",
"GObject"
],
"interfaces": [
"GstChildProxy"
],
"klass": "Sink/Video",
"long-name": "GTK 4 Paintable Sink",
"pad-templates": {
"sink": {
"caps": "video/x-raw:\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(memory:GLMemory, meta:GstVideoOverlayComposition):\n format: { RGBA, RGB }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n texture-target: 2D\n\nvideo/x-raw(memory:GLMemory):\n format: { RGBA, RGB }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n texture-target: 2D\n\nvideo/x-raw(memory:SystemMemory, meta:GstVideoOverlayComposition):\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(meta:GstVideoOverlayComposition):\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n",
"caps": "video/x-raw(memory:GLMemory, meta:GstVideoOverlayComposition):\n format: { RGBA, RGB }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n texture-target: 2D\n\nvideo/x-raw(memory:GLMemory):\n format: { RGBA, RGB }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n texture-target: 2D\n\nvideo/x-raw(memory:SystemMemory, meta:GstVideoOverlayComposition):\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n\nvideo/x-raw(meta:GstVideoOverlayComposition):\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\nvideo/x-raw:\n format: { BGRA, ARGB, RGBA, ABGR, RGB, BGR }\n width: [ 1, 2147483647 ]\n height: [ 1, 2147483647 ]\n framerate: [ 0/1, 2147483647/1 ]\n",
"direction": "sink",
"presence": "always"
}
@ -3251,7 +3302,7 @@
"klass": "Codec/Muxer",
"pad-templates": {
"sink_%%u": {
"caps": "video/x-h264:\n stream-format: { (string)avc, (string)avc3 }\n alignment: au\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-h265:\n stream-format: { (string)hvc1, (string)hev1 }\n alignment: au\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-vp8:\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-vp9:\n profile: { (string)0, (string)1, (string)2, (string)3 }\n chroma-format: { (string)4:2:0, (string)4:2:2, (string)4:4:4 }\n bit-depth-luma: { (uint)8, (uint)10, (uint)12 }\nbit-depth-chroma: { (uint)8, (uint)10, (uint)12 }\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-av1:\n stream-format: obu-stream\n alignment: tu\n profile: { (string)main, (string)high, (string)professional }\n chroma-format: { (string)4:0:0, (string)4:2:0, (string)4:2:2, (string)4:4:4 }\n bit-depth-luma: { (uint)8, (uint)10, (uint)12 }\nbit-depth-chroma: { (uint)8, (uint)10, (uint)12 }\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\naudio/mpeg:\n mpegversion: 4\n stream-format: raw\n channels: [ 1, 65535 ]\n rate: [ 1, 2147483647 ]\naudio/x-opus:\nchannel-mapping-family: [ 0, 255 ]\n channels: [ 1, 8 ]\n rate: [ 1, 2147483647 ]\n",
"caps": "video/x-h264:\n stream-format: { (string)avc, (string)avc3 }\n alignment: au\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-h265:\n stream-format: { (string)hvc1, (string)hev1 }\n alignment: au\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-vp8:\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-vp9:\n profile: { (string)0, (string)1, (string)2, (string)3 }\n chroma-format: { (string)4:2:0, (string)4:2:2, (string)4:4:4 }\n bit-depth-luma: { (uint)8, (uint)10, (uint)12 }\nbit-depth-chroma: { (uint)8, (uint)10, (uint)12 }\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\nvideo/x-av1:\n stream-format: obu-stream\n alignment: tu\n profile: { (string)main, (string)high, (string)professional }\n chroma-format: { (string)4:0:0, (string)4:2:0, (string)4:2:2, (string)4:4:4 }\n bit-depth-luma: { (uint)8, (uint)10, (uint)12 }\nbit-depth-chroma: { (uint)8, (uint)10, (uint)12 }\n width: [ 1, 65535 ]\n height: [ 1, 65535 ]\naudio/mpeg:\n mpegversion: 4\n stream-format: raw\n channels: [ 1, 65535 ]\n rate: [ 1, 2147483647 ]\naudio/x-opus:\nchannel-mapping-family: [ 0, 255 ]\n channels: [ 1, 8 ]\n rate: [ 1, 2147483647 ]\naudio/x-flac:\n framed: true\n channels: [ 1, 8 ]\n rate: [ 1, 655350 ]\n",
"direction": "sink",
"presence": "request",
"type": "GstRsMP4MuxPad"

View file

@ -301,6 +301,23 @@ if get_option('gtk4').allowed()
gtk4_features += 'winegl'
endif
endif
gst_allocators_dep = dependency('gstreamer-allocators-1.0', version: '>=1.24', required: false)
gtk_dep = dependency('gtk4', version: '>=4.6', required: get_option('gtk4'))
if gtk_dep.found()
if host_system == 'linux' and gtk_dep.version().version_compare('>=4.14') and gst_allocators_dep.found()
gtk4_features += 'dmabuf'
endif
if gtk_dep.version().version_compare('>=4.14')
gtk4_features += 'gtk_v4_14'
elif gtk_dep.version().version_compare('>=4.12')
gtk4_features += 'gtk_v4_12'
elif gtk_dep.version().version_compare('>=4.10')
gtk4_features += 'gtk_v4_10'
endif
endif
plugins += {
'gtk4': {
'library': 'libgstgtk4',

View file

@ -9,6 +9,7 @@
use gst::prelude::*;
use anyhow::{anyhow, bail, Context, Error};
use std::convert::TryFrom;
use super::Buffer;
@ -604,9 +605,8 @@ fn write_tkhd(
// Volume
let s = stream.caps.structure(0).unwrap();
match s.name().as_str() {
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
v.extend((1u16 << 8).to_be_bytes())
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => v.extend((1u16 << 8).to_be_bytes()),
_ => v.extend(0u16.to_be_bytes()),
}
@ -745,9 +745,8 @@ fn write_hdlr(
let (handler_type, name) = match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => (b"vide", b"VideoHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
(b"soun", b"SoundHandler\0".as_slice())
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => (b"soun", b"SoundHandler\0".as_slice()),
"application/x-onvif-metadata" => (b"meta", b"MetadataHandler\0".as_slice()),
_ => unreachable!(),
};
@ -777,7 +776,8 @@ fn write_minf(
// Flags are always 1 for unspecified reasons
write_full_box(v, b"vmhd", FULL_BOX_VERSION_0, 1, |v| write_vmhd(v, cfg))?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => {
write_full_box(v, b"smhd", FULL_BOX_VERSION_0, FULL_BOX_FLAGS_NONE, |v| {
write_smhd(v, cfg)
})?
@ -886,9 +886,8 @@ fn write_stsd(
match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => write_visual_sample_entry(v, cfg, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_audio_sample_entry(v, cfg, stream)?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => write_audio_sample_entry(v, cfg, stream)?,
"application/x-onvif-metadata" => write_xml_meta_data_sample_entry(v, cfg, stream)?,
_ => unreachable!(),
}
@ -1262,6 +1261,7 @@ fn write_audio_sample_entry(
let fourcc = match s.name().as_str() {
"audio/mpeg" => b"mp4a",
"audio/x-opus" => b"Opus",
"audio/x-flac" => b"fLaC",
"audio/x-alaw" => b"alaw",
"audio/x-mulaw" => b"ulaw",
"audio/x-adpcm" => {
@ -1280,6 +1280,10 @@ fn write_audio_sample_entry(
let bitrate = s.get::<i32>("bitrate").context("no ADPCM bitrate field")?;
(bitrate / 8000) as u16
}
"audio/x-flac" => with_flac_metadata(&stream.caps, |streaminfo, _| {
1 + (u16::from_be_bytes([streaminfo[16], streaminfo[17]]) >> 4 & 0b11111)
})
.context("FLAC metadata error")?,
_ => 16u16,
};
@ -1322,6 +1326,9 @@ fn write_audio_sample_entry(
"audio/x-opus" => {
write_dops(v, &stream.caps)?;
}
"audio/x-flac" => {
write_dfla(v, &stream.caps)?;
}
"audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
// Nothing to do here
}
@ -1516,6 +1523,35 @@ fn write_dops(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
})
}
fn with_flac_metadata<R>(
caps: &gst::Caps,
cb: impl FnOnce(&[u8], &[gst::glib::SendValue]) -> R,
) -> Result<R, Error> {
let caps = caps.structure(0).unwrap();
let header = caps.get::<gst::ArrayRef>("streamheader").unwrap();
let (streaminfo, remainder) = header.as_ref().split_first().unwrap();
let streaminfo = streaminfo.get::<&gst::BufferRef>().unwrap();
let streaminfo = streaminfo.map_readable().unwrap();
// 13 bytes for the Ogg/FLAC prefix and 38 for the streaminfo itself.
match <&[_; 13 + 38]>::try_from(streaminfo.as_slice()) {
Ok(i) if i.starts_with(b"\x7FFLAC\x01\x00") => Ok(cb(&i[13..], remainder)),
Ok(_) | Err(_) => bail!("Unknown streamheader format"),
}
}
fn write_dfla(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
write_full_box(v, b"dfLa", 0, 0, move |v| {
with_flac_metadata(caps, |streaminfo, remainder| {
v.extend(streaminfo);
for metadata in remainder {
let metadata = metadata.get::<&gst::BufferRef>().unwrap();
let metadata = metadata.map_readable().unwrap();
v.extend(&metadata[..]);
}
})
})
}
fn write_xml_meta_data_sample_entry(
v: &mut Vec<u8>,
_cfg: &super::HeaderConfiguration,

View file

@ -205,6 +205,8 @@ struct Stream {
caps: gst::Caps,
/// Whether this stream is intra-only and has frame reordering.
delta_frames: DeltaFrames,
/// Whether this stream might have header frames without timestamps that should be ignored.
discard_header_buffers: bool,
/// Currently queued GOPs, including incomplete ones.
queued_gops: VecDeque<Gop>,
@ -271,11 +273,17 @@ pub(crate) struct FMP4Mux {
impl FMP4Mux {
/// Checks if a buffer is valid according to the stream configuration.
fn check_buffer(
buffer: &gst::BufferRef,
sinkpad: &super::FMP4MuxPad,
delta_frames: super::DeltaFrames,
) -> Result<(), gst::FlowError> {
fn check_buffer(buffer: &gst::BufferRef, stream: &Stream) -> Result<(), gst::FlowError> {
let Stream {
sinkpad,
delta_frames,
discard_header_buffers,
..
} = stream;
if *discard_header_buffers && buffer.flags().contains(gst::BufferFlags::HEADER) {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
if delta_frames.requires_dts() && buffer.dts().is_none() {
gst::error!(CAT, obj: sinkpad, "Require DTS for video streams");
return Err(gst::FlowError::Error);
@ -314,12 +322,10 @@ impl FMP4Mux {
}
// Pop buffer here, it will be stored in the pre-queue after calculating its timestamps
let mut buffer = match stream.sinkpad.pop_buffer() {
None => return Ok(None),
Some(buffer) => buffer,
let Some(mut buffer) = stream.sinkpad.pop_buffer() else {
return Ok(None);
};
Self::check_buffer(&buffer, &stream.sinkpad, stream.delta_frames)?;
Self::check_buffer(&buffer, stream)?;
let segment = match stream.sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
@ -2555,6 +2561,7 @@ impl FMP4Mux {
let s = caps.structure(0).unwrap();
let mut delta_frames = DeltaFrames::IntraOnly;
let mut discard_header_buffers = false;
match s.name().as_str() {
"video/x-h264" | "video/x-h265" => {
if !s.has_field_with_type("codec_data", gst::Buffer::static_type()) {
@ -2598,6 +2605,13 @@ impl FMP4Mux {
return Err(gst::FlowError::NotNegotiated);
}
}
"audio/x-flac" => {
discard_header_buffers = true;
if let Err(e) = s.get::<gst::ArrayRef>("streamheader") {
gst::error!(CAT, obj: pad, "Muxing FLAC into MP4 needs streamheader: {}", e);
return Err(gst::FlowError::NotNegotiated);
};
}
"audio/x-alaw" | "audio/x-mulaw" => (),
"audio/x-adpcm" => (),
"application/x-onvif-metadata" => (),
@ -2608,6 +2622,7 @@ impl FMP4Mux {
sinkpad: pad,
caps,
delta_frames,
discard_header_buffers,
pre_queue: VecDeque::new(),
queued_gops: VecDeque::new(),
fragment_filled: false,
@ -3465,6 +3480,11 @@ impl ElementImpl for ISOFMP4Mux {
.field("channels", gst::IntRange::new(1i32, 8))
.field("rate", gst::IntRange::new(1, i32::MAX))
.build(),
gst::Structure::builder("audio/x-flac")
.field("framed", true)
.field("channels", gst::IntRange::<i32>::new(1, 8))
.field("rate", gst::IntRange::<i32>::new(1, 10 * u16::MAX as i32))
.build(),
]
.into_iter()
.collect::<gst::Caps>(),

View file

@ -19,6 +19,33 @@ fn init() {
});
}
fn to_completion(pipeline: &gst::Pipeline) {
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in pipeline.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
fn test_buffer_flags_single_stream(cmaf: bool, set_dts: bool, caps: gst::Caps) {
let mut h = if cmaf {
gst_check::Harness::new("cmafmux")
@ -1993,3 +2020,21 @@ fn test_chunking_single_stream_gops_after_fragment_end_after_next_chunk_end() {
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Eos);
}
#[test]
fn test_roundtrip_vp9_flac() {
init();
let pipeline = gst::parse::launch(
r#"
videotestsrc num-buffers=99 ! vp9enc ! vp9parse ! mux.
audiotestsrc num-buffers=149 ! flacenc ! flacparse ! mux.
isofmp4mux name=mux ! qtdemux name=demux
demux.audio_0 ! queue ! flacdec ! fakesink
demux.video_0 ! queue ! vp9dec ! fakesink
"#,
)
.unwrap();
let pipeline = pipeline.downcast().unwrap();
to_completion(&pipeline);
}

View file

@ -9,7 +9,7 @@
use gst::prelude::*;
use anyhow::{anyhow, bail, Context, Error};
use std::convert::TryFrom;
use std::str::FromStr;
fn write_box<T, F: FnOnce(&mut Vec<u8>) -> Result<T, Error>>(
@ -382,9 +382,8 @@ fn write_tkhd(
// Volume
let s = stream.caps.structure(0).unwrap();
match s.name().as_str() {
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
v.extend((1u16 << 8).to_be_bytes())
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => v.extend((1u16 << 8).to_be_bytes()),
_ => v.extend(0u16.to_be_bytes()),
}
@ -514,9 +513,8 @@ fn write_hdlr(
let (handler_type, name) = match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => (b"vide", b"VideoHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
(b"soun", b"SoundHandler\0".as_slice())
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => (b"soun", b"SoundHandler\0".as_slice()),
"application/x-onvif-metadata" => (b"meta", b"MetadataHandler\0".as_slice()),
_ => unreachable!(),
};
@ -546,7 +544,8 @@ fn write_minf(
// Flags are always 1 for unspecified reasons
write_full_box(v, b"vmhd", FULL_BOX_VERSION_0, 1, |v| write_vmhd(v, header))?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => {
write_full_box(v, b"smhd", FULL_BOX_VERSION_0, FULL_BOX_FLAGS_NONE, |v| {
write_smhd(v, header)
})?
@ -703,9 +702,8 @@ fn write_stsd(
match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => write_visual_sample_entry(v, header, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_audio_sample_entry(v, header, stream)?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => write_audio_sample_entry(v, header, stream)?,
"application/x-onvif-metadata" => write_xml_meta_data_sample_entry(v, header, stream)?,
_ => unreachable!(),
}
@ -1079,6 +1077,7 @@ fn write_audio_sample_entry(
let fourcc = match s.name().as_str() {
"audio/mpeg" => b"mp4a",
"audio/x-opus" => b"Opus",
"audio/x-flac" => b"fLaC",
"audio/x-alaw" => b"alaw",
"audio/x-mulaw" => b"ulaw",
"audio/x-adpcm" => {
@ -1097,6 +1096,10 @@ fn write_audio_sample_entry(
let bitrate = s.get::<i32>("bitrate").context("no ADPCM bitrate field")?;
(bitrate / 8000) as u16
}
"audio/x-flac" => with_flac_metadata(&stream.caps, |streaminfo, _| {
1 + (u16::from_be_bytes([streaminfo[16], streaminfo[17]]) >> 4 & 0b11111)
})
.context("FLAC metadata error")?,
_ => 16u16,
};
@ -1139,6 +1142,9 @@ fn write_audio_sample_entry(
"audio/x-opus" => {
write_dops(v, &stream.caps)?;
}
"audio/x-flac" => {
write_dfla(v, &stream.caps)?;
}
"audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
// Nothing to do here
}
@ -1333,6 +1339,35 @@ fn write_dops(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
})
}
fn with_flac_metadata<R>(
caps: &gst::Caps,
cb: impl FnOnce(&[u8], &[gst::glib::SendValue]) -> R,
) -> Result<R, Error> {
let caps = caps.structure(0).unwrap();
let header = caps.get::<gst::ArrayRef>("streamheader").unwrap();
let (streaminfo, remainder) = header.as_ref().split_first().unwrap();
let streaminfo = streaminfo.get::<&gst::BufferRef>().unwrap();
let streaminfo = streaminfo.map_readable().unwrap();
// 13 bytes for the Ogg/FLAC prefix and 38 for the streaminfo itself.
match <&[_; 13 + 38]>::try_from(streaminfo.as_slice()) {
Ok(i) if i.starts_with(b"\x7FFLAC\x01\x00") => Ok(cb(&i[13..], remainder)),
Ok(_) | Err(_) => bail!("Unknown streamheader format"),
}
}
fn write_dfla(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
write_full_box(v, b"dfLa", 0, 0, move |v| {
with_flac_metadata(caps, |streaminfo, remainder| {
v.extend(streaminfo);
for metadata in remainder {
let metadata = metadata.get::<&gst::BufferRef>().unwrap();
let metadata = metadata.map_readable().unwrap();
v.extend(&metadata[..]);
}
})
})
}
fn write_xml_meta_data_sample_entry(
v: &mut Vec<u8>,
_header: &super::Header,

View file

@ -108,6 +108,8 @@ struct Stream {
caps: gst::Caps,
/// Whether this stream is intra-only and has frame reordering.
delta_frames: super::DeltaFrames,
/// Whether this stream might have header frames without timestamps that should be ignored.
discard_header_buffers: bool,
/// Already written out chunks with their samples for this stream
chunks: Vec<super::Chunk>,
@ -165,7 +167,12 @@ impl MP4Mux {
buffer: &gst::BufferRef,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
discard_headers: bool,
) -> Result<(), gst::FlowError> {
if discard_headers && buffer.flags().contains(gst::BufferFlags::HEADER) {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
if delta_frames.requires_dts() && buffer.dts().is_none() {
gst::error!(CAT, obj: sinkpad, "Require DTS for video streams");
return Err(gst::FlowError::Error);
@ -188,6 +195,7 @@ impl MP4Mux {
&self,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
discard_headers: bool,
pre_queue: &mut VecDeque<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>,
running_time_utc_time_mapping: &Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
) -> Result<Option<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>, gst::FlowError> {
@ -195,13 +203,10 @@ impl MP4Mux {
return Ok(Some((segment.clone(), buffer.clone())));
}
let mut buffer = match sinkpad.peek_buffer() {
None => return Ok(None),
Some(buffer) => buffer,
let Some(mut buffer) = sinkpad.peek_buffer() else {
return Ok(None);
};
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
Self::check_buffer(&buffer, sinkpad, delta_frames, discard_headers)?;
let mut segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
None => {
@ -276,19 +281,20 @@ impl MP4Mux {
fn pop_buffer(
&self,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
pre_queue: &mut VecDeque<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>,
running_time_utc_time_mapping: &mut Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
stream: &mut Stream,
) -> Result<Option<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>, gst::FlowError> {
let Stream {
sinkpad, pre_queue, ..
} = stream;
// In ONVIF mode we need to get UTC times for each buffer and synchronize based on that.
// Queue up to 6s of data to get the first UTC time and then backdate.
if self.obj().class().as_ref().variant == super::Variant::ONVIF
&& running_time_utc_time_mapping.is_none()
&& stream.running_time_utc_time_mapping.is_none()
{
if let Some((last, first)) = Option::zip(pre_queue.back(), pre_queue.front()) {
// Existence of PTS/DTS checked below
let (last, first) = if delta_frames.requires_dts() {
let (last, first) = if stream.delta_frames.requires_dts() {
(
last.0.to_running_time_full(last.1.dts()).unwrap(),
first.0.to_running_time_full(first.1.dts()).unwrap(),
@ -312,19 +318,20 @@ impl MP4Mux {
}
}
let buffer = match sinkpad.pop_buffer() {
None => {
if sinkpad.is_eos() {
gst::error!(CAT, obj: sinkpad, "Got no UTC time before EOS");
return Err(gst::FlowError::Error);
} else {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
let Some(buffer) = sinkpad.pop_buffer() else {
if sinkpad.is_eos() {
gst::error!(CAT, obj: sinkpad, "Got no UTC time before EOS");
return Err(gst::FlowError::Error);
} else {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
Some(buffer) => buffer,
};
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
Self::check_buffer(
&buffer,
sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
)?;
let segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
@ -350,7 +357,7 @@ impl MP4Mux {
);
let mapping = (running_time, utc_time);
*running_time_utc_time_mapping = Some(mapping);
stream.running_time_utc_time_mapping = Some(mapping);
// Push the buffer onto the pre-queue and re-timestamp it and all other buffers
// based on the mapping above.
@ -391,7 +398,7 @@ impl MP4Mux {
// Fall through below and pop the first buffer finally
}
if let Some((segment, buffer)) = pre_queue.pop_front() {
if let Some((segment, buffer)) = stream.pre_queue.pop_front() {
return Ok(Some((segment, buffer)));
}
@ -400,23 +407,26 @@ impl MP4Mux {
// for calculating the duration to the previous buffer, and then put into the pre-queue
// - or this is the very first buffer and we just put it into the queue overselves above
if self.obj().class().as_ref().variant == super::Variant::ONVIF {
if sinkpad.is_eos() {
if stream.sinkpad.is_eos() {
return Ok(None);
}
unreachable!();
}
let buffer = match sinkpad.pop_buffer() {
None => return Ok(None),
Some(buffer) => buffer,
let Some(buffer) = stream.sinkpad.pop_buffer() else {
return Ok(None);
};
Self::check_buffer(
&buffer,
&stream.sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
)?;
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
let segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
let segment = match stream.sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
None => {
gst::error!(CAT, obj: sinkpad, "Got buffer before segment");
gst::error!(CAT, obj: stream.sinkpad, "Got buffer before segment");
return Err(gst::FlowError::Error);
}
};
@ -442,6 +452,12 @@ impl MP4Mux {
Some(PendingBuffer {
duration: Some(_), ..
}) => return Ok(()),
Some(PendingBuffer { ref buffer, .. })
if stream.discard_header_buffers
&& buffer.flags().contains(gst::BufferFlags::HEADER) =>
{
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
Some(PendingBuffer {
timestamp,
pts,
@ -449,13 +465,15 @@ impl MP4Mux {
ref mut duration,
..
}) => {
// Already have a pending buffer but no duration, so try to get that now
let (segment, buffer) = match self.peek_buffer(
let peek_outcome = self.peek_buffer(
&stream.sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
&mut stream.pre_queue,
&stream.running_time_utc_time_mapping,
)? {
)?;
// Already have a pending buffer but no duration, so try to get that now
let (segment, buffer) = match peek_outcome {
Some(res) => res,
None => {
if stream.sinkpad.is_eos() {
@ -532,12 +550,7 @@ impl MP4Mux {
None => {
// Have no buffer queued at all yet
let (segment, buffer) = match self.pop_buffer(
&stream.sinkpad,
stream.delta_frames,
&mut stream.pre_queue,
&mut stream.running_time_utc_time_mapping,
)? {
let (segment, buffer) = match self.pop_buffer(stream)? {
Some(res) => res,
None => {
if stream.sinkpad.is_eos() {
@ -870,6 +883,7 @@ impl MP4Mux {
let s = caps.structure(0).unwrap();
let mut delta_frames = super::DeltaFrames::IntraOnly;
let mut discard_header_buffers = false;
match s.name().as_str() {
"video/x-h264" | "video/x-h265" => {
if !s.has_field_with_type("codec_data", gst::Buffer::static_type()) {
@ -913,6 +927,13 @@ impl MP4Mux {
return Err(gst::FlowError::NotNegotiated);
}
}
"audio/x-flac" => {
discard_header_buffers = true;
if let Err(e) = s.get::<gst::ArrayRef>("streamheader") {
gst::error!(CAT, obj: pad, "Muxing FLAC into MP4 needs streamheader: {}", e);
return Err(gst::FlowError::NotNegotiated);
};
}
"audio/x-alaw" | "audio/x-mulaw" => (),
"audio/x-adpcm" => (),
"application/x-onvif-metadata" => (),
@ -924,6 +945,7 @@ impl MP4Mux {
pre_queue: VecDeque::new(),
caps,
delta_frames,
discard_header_buffers,
chunks: Vec::new(),
pending_buffer: None,
queued_chunk_time: gst::ClockTime::ZERO,
@ -1523,6 +1545,11 @@ impl ElementImpl for ISOMP4Mux {
.field("channels", gst::IntRange::new(1i32, 8))
.field("rate", gst::IntRange::new(1, i32::MAX))
.build(),
gst::Structure::builder("audio/x-flac")
.field("framed", true)
.field("channels", gst::IntRange::<i32>::new(1, 8))
.field("rate", gst::IntRange::<i32>::new(1, 10 * u16::MAX as i32))
.build(),
]
.into_iter()
.collect::<gst::Caps>(),

View file

@ -7,6 +7,8 @@
// SPDX-License-Identifier: MPL-2.0
//
use std::path::Path;
use gst::prelude::*;
use gst_pbutils::prelude::*;
@ -20,33 +22,57 @@ fn init() {
});
}
#[test]
fn test_basic() {
init();
struct Pipeline(gst::Pipeline);
impl std::ops::Deref for Pipeline {
type Target = gst::Pipeline;
struct Pipeline(gst::Pipeline);
impl std::ops::Deref for Pipeline {
type Target = gst::Pipeline;
fn deref(&self) -> &Self::Target {
&self.0
}
fn deref(&self) -> &Self::Target {
&self.0
}
impl Drop for Pipeline {
fn drop(&mut self) {
let _ = self.0.set_state(gst::State::Null);
}
}
impl Drop for Pipeline {
fn drop(&mut self) {
let _ = self.0.set_state(gst::State::Null);
}
}
let pipeline = match gst::parse::launch(
"videotestsrc num-buffers=99 ! x264enc ! mux. \
audiotestsrc num-buffers=140 ! fdkaacenc ! mux. \
isomp4mux name=mux ! filesink name=sink \
",
) {
Ok(pipeline) => Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap()),
Err(_) => return,
impl Pipeline {
fn into_completion(self) {
self.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in self.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
}
self.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
}
fn test_basic_with(video_enc: &str, audio_enc: &str, cb: impl FnOnce(&Path)) {
let Ok(pipeline) = gst::parse::launch(&format!(
"videotestsrc num-buffers=99 ! {video_enc} ! mux. \
audiotestsrc num-buffers=140 ! {audio_enc} ! mux. \
isomp4mux name=mux ! filesink name=sink"
)) else {
println!("could not build encoding pipeline");
return;
};
let pipeline = Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap());
let dir = tempfile::TempDir::new().unwrap();
let mut location = dir.path().to_owned();
@ -54,73 +80,75 @@ fn test_basic() {
let sink = pipeline.by_name("sink").unwrap();
sink.set_property("location", location.to_str().expect("Non-UTF8 filename"));
pipeline.into_completion();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in pipeline.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
drop(pipeline);
let discoverer = gst_pbutils::Discoverer::new(gst::ClockTime::from_seconds(5))
.expect("Failed to create discoverer");
let info = discoverer
.discover_uri(
url::Url::from_file_path(&location)
.expect("Failed to convert filename to URL")
.as_str(),
)
.expect("Failed to discover MP4 file");
assert_eq!(info.duration(), Some(gst::ClockTime::from_mseconds(3_300)));
let audio_streams = info.audio_streams();
assert_eq!(audio_streams.len(), 1);
let audio_stream = &audio_streams[0];
assert_eq!(audio_stream.channels(), 1);
assert_eq!(audio_stream.sample_rate(), 44_100);
let caps = audio_stream.caps().unwrap();
assert!(
caps.can_intersect(
&gst::Caps::builder("audio/mpeg")
.any_features()
.field("mpegversion", 4i32)
.build()
),
"Unexpected audio caps {caps:?}"
);
let video_streams = info.video_streams();
assert_eq!(video_streams.len(), 1);
let video_stream = &video_streams[0];
assert_eq!(video_stream.width(), 320);
assert_eq!(video_stream.height(), 240);
assert_eq!(video_stream.framerate(), gst::Fraction::new(30, 1));
assert_eq!(video_stream.par(), gst::Fraction::new(1, 1));
assert!(!video_stream.is_interlaced());
let caps = video_stream.caps().unwrap();
assert!(
caps.can_intersect(&gst::Caps::builder("video/x-h264").any_features().build()),
"Unexpected video caps {caps:?}"
);
cb(&location)
}
#[test]
fn test_basic_x264_aac() {
init();
test_basic_with("x264enc", "fdkaacenc", |location| {
let discoverer = gst_pbutils::Discoverer::new(gst::ClockTime::from_seconds(5))
.expect("Failed to create discoverer");
let info = discoverer
.discover_uri(
url::Url::from_file_path(location)
.expect("Failed to convert filename to URL")
.as_str(),
)
.expect("Failed to discover MP4 file");
assert_eq!(info.duration(), Some(gst::ClockTime::from_mseconds(3_300)));
let audio_streams = info.audio_streams();
assert_eq!(audio_streams.len(), 1);
let audio_stream = &audio_streams[0];
assert_eq!(audio_stream.channels(), 1);
assert_eq!(audio_stream.sample_rate(), 44_100);
let caps = audio_stream.caps().unwrap();
assert!(
caps.can_intersect(
&gst::Caps::builder("audio/mpeg")
.any_features()
.field("mpegversion", 4i32)
.build()
),
"Unexpected audio caps {caps:?}"
);
let video_streams = info.video_streams();
assert_eq!(video_streams.len(), 1);
let video_stream = &video_streams[0];
assert_eq!(video_stream.width(), 320);
assert_eq!(video_stream.height(), 240);
assert_eq!(video_stream.framerate(), gst::Fraction::new(30, 1));
assert_eq!(video_stream.par(), gst::Fraction::new(1, 1));
assert!(!video_stream.is_interlaced());
let caps = video_stream.caps().unwrap();
assert!(
caps.can_intersect(&gst::Caps::builder("video/x-h264").any_features().build()),
"Unexpected video caps {caps:?}"
);
})
}
#[test]
fn test_roundtrip_vp9_flac() {
init();
test_basic_with("vp9enc ! vp9parse", "flacenc ! flacparse", |location| {
let Ok(pipeline) = gst::parse::launch(
"filesrc name=src ! qtdemux name=demux \
demux.audio_0 ! queue ! flacdec ! fakesink \
demux.video_0 ! queue ! vp9dec ! fakesink",
) else {
panic!("could not build decoding pipeline")
};
let pipeline = Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap());
pipeline
.by_name("src")
.unwrap()
.set_property("location", location.display().to_string());
pipeline.into_completion();
})
}

View file

@ -39,6 +39,7 @@ const S3_CHANNEL_SIZE: usize = 32;
const S3_ACL_DEFAULT: ObjectCannedAcl = ObjectCannedAcl::Private;
const DEFAULT_RETRY_ATTEMPTS: u32 = 5;
const DEFAULT_TIMEOUT_IN_MSECS: u64 = 15000;
const DEFAULT_FORCE_PATH_STYLE: bool = false;
struct Settings {
access_key: Option<String>,
@ -57,6 +58,7 @@ struct Settings {
video_sink: bool,
config: Option<SdkConfig>,
endpoint_uri: Option<String>,
force_path_style: bool,
}
impl Default for Settings {
@ -79,6 +81,7 @@ impl Default for Settings {
video_sink: false,
config: None,
endpoint_uri: None,
force_path_style: DEFAULT_FORCE_PATH_STYLE,
}
}
}
@ -376,6 +379,7 @@ impl S3HlsSink {
let sdk_config = settings.config.as_ref().expect("SDK config must be set");
let config_builder = config::Builder::from(sdk_config)
.force_path_style(settings.force_path_style)
.region(settings.s3_region.clone())
.retry_config(RetryConfig::standard().with_max_attempts(settings.retry_attempts));
@ -529,6 +533,11 @@ impl ObjectImpl for S3HlsSink {
.blurb("The S3 endpoint URI to use")
.mutable_ready()
.build(),
glib::ParamSpecBoolean::builder("force-path-style")
.nick("Force path style")
.blurb("Force client to use path-style addressing for buckets")
.default_value(DEFAULT_FORCE_PATH_STYLE)
.build(),
]
});
@ -586,6 +595,9 @@ impl ObjectImpl for S3HlsSink {
.get::<Option<String>>()
.expect("type checked upstream");
}
"force-path-style" => {
settings.force_path_style = value.get::<bool>().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
@ -606,6 +618,7 @@ impl ObjectImpl for S3HlsSink {
"request-timeout" => (settings.request_timeout.as_millis() as u64).to_value(),
"stats" => self.create_stats().to_value(),
"endpoint-uri" => settings.endpoint_uri.to_value(),
"force-path-style" => settings.force_path_style.to_value(),
_ => unimplemented!(),
}
}

View file

@ -38,6 +38,7 @@ use crate::s3utils::{self, duration_from_millis, duration_to_millis, WaitError};
use super::OnError;
const DEFAULT_FORCE_PATH_STYLE: bool = false;
const DEFAULT_RETRY_ATTEMPTS: u32 = 5;
const DEFAULT_BUFFER_SIZE: u64 = 5 * 1024 * 1024;
const DEFAULT_MULTIPART_UPLOAD_ON_ERROR: OnError = OnError::DoNothing;
@ -114,6 +115,7 @@ struct Settings {
multipart_upload_on_error: OnError,
request_timeout: Duration,
endpoint_uri: Option<String>,
force_path_style: bool,
}
impl Settings {
@ -168,6 +170,7 @@ impl Default for Settings {
multipart_upload_on_error: DEFAULT_MULTIPART_UPLOAD_ON_ERROR,
request_timeout: Duration::from_millis(DEFAULT_REQUEST_TIMEOUT_MSEC),
endpoint_uri: None,
force_path_style: DEFAULT_FORCE_PATH_STYLE,
}
}
}
@ -524,6 +527,7 @@ impl S3Sink {
})?;
let config_builder = config::Builder::from(&sdk_config)
.force_path_style(settings.force_path_style)
.retry_config(RetryConfig::standard().with_max_attempts(settings.retry_attempts));
let config = if let Some(ref uri) = settings.endpoint_uri {
@ -775,6 +779,11 @@ impl ObjectImpl for S3Sink {
.nick("content-disposition")
.blurb("Content-Disposition header to set for uploaded object")
.build(),
glib::ParamSpecBoolean::builder("force-path-style")
.nick("Force path style")
.blurb("Force client to use path-style addressing for buckets")
.default_value(DEFAULT_FORCE_PATH_STYLE)
.build(),
]
});
@ -888,6 +897,9 @@ impl ObjectImpl for S3Sink {
.get::<Option<String>>()
.expect("type checked upstream");
}
"force-path-style" => {
settings.force_path_style = value.get::<bool>().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
@ -929,6 +941,7 @@ impl ObjectImpl for S3Sink {
"endpoint-uri" => settings.endpoint_uri.to_value(),
"content-type" => settings.content_type.to_value(),
"content-disposition" => settings.content_disposition.to_value(),
"force-path-style" => settings.force_path_style.to_value(),
_ => unimplemented!(),
}
}

View file

@ -36,6 +36,7 @@ const DEFAULT_FLUSH_INTERVAL_BUFFERS: u64 = 1;
const DEFAULT_FLUSH_INTERVAL_BYTES: u64 = 0;
const DEFAULT_FLUSH_INTERVAL_TIME: gst::ClockTime = gst::ClockTime::from_nseconds(0);
const DEFAULT_FLUSH_ON_ERROR: bool = false;
const DEFAULT_FORCE_PATH_STYLE: bool = false;
// General setting for create / abort requests
const DEFAULT_REQUEST_TIMEOUT_MSEC: u64 = 15_000;
@ -80,6 +81,7 @@ struct Settings {
retry_attempts: u32,
request_timeout: Duration,
endpoint_uri: Option<String>,
force_path_style: bool,
flush_interval_buffers: u64,
flush_interval_bytes: u64,
flush_interval_time: Option<gst::ClockTime>,
@ -136,6 +138,7 @@ impl Default for Settings {
retry_attempts: DEFAULT_RETRY_ATTEMPTS,
request_timeout: Duration::from_millis(DEFAULT_REQUEST_TIMEOUT_MSEC),
endpoint_uri: None,
force_path_style: DEFAULT_FORCE_PATH_STYLE,
flush_interval_buffers: DEFAULT_FLUSH_INTERVAL_BUFFERS,
flush_interval_bytes: DEFAULT_FLUSH_INTERVAL_BYTES,
flush_interval_time: Some(DEFAULT_FLUSH_INTERVAL_TIME),
@ -293,6 +296,7 @@ impl S3PutObjectSink {
})?;
let config_builder = config::Builder::from(&sdk_config)
.force_path_style(settings.force_path_style)
.retry_config(RetryConfig::standard().with_max_attempts(settings.retry_attempts));
let config = if let Some(ref uri) = settings.endpoint_uri {
@ -446,6 +450,11 @@ impl ObjectImpl for S3PutObjectSink {
.blurb("Whether to write out the data on error (like stopping without an EOS)")
.default_value(DEFAULT_FLUSH_ON_ERROR)
.build(),
glib::ParamSpecBoolean::builder("force-path-style")
.nick("Force path style")
.blurb("Force client to use path-style addressing for buckets")
.default_value(DEFAULT_FORCE_PATH_STYLE)
.build(),
]
});
@ -542,6 +551,9 @@ impl ObjectImpl for S3PutObjectSink {
"flush-on-error" => {
settings.flush_on_error = value.get::<bool>().expect("type checked upstream");
}
"force-path-style" => {
settings.force_path_style = value.get::<bool>().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
@ -575,6 +587,7 @@ impl ObjectImpl for S3PutObjectSink {
"flush-interval-bytes" => settings.flush_interval_bytes.to_value(),
"flush-interval-time" => settings.flush_interval_time.to_value(),
"flush-on-error" => settings.flush_on_error.to_value(),
"force-path-style" => settings.force_path_style.to_value(),
_ => unimplemented!(),
}
}

View file

@ -29,6 +29,7 @@ use gst_base::subclass::prelude::*;
use crate::s3url::*;
use crate::s3utils::{self, duration_from_millis, duration_to_millis, WaitError};
const DEFAULT_FORCE_PATH_STYLE: bool = false;
const DEFAULT_RETRY_ATTEMPTS: u32 = 5;
const DEFAULT_REQUEST_TIMEOUT_MSEC: u64 = 15000;
const DEFAULT_RETRY_DURATION_MSEC: u64 = 60_000;
@ -53,6 +54,7 @@ struct Settings {
retry_attempts: u32,
request_timeout: Duration,
endpoint_uri: Option<String>,
force_path_style: bool,
}
impl Default for Settings {
@ -66,6 +68,7 @@ impl Default for Settings {
retry_attempts: DEFAULT_RETRY_ATTEMPTS,
request_timeout: duration,
endpoint_uri: None,
force_path_style: DEFAULT_FORCE_PATH_STYLE,
}
}
}
@ -128,6 +131,7 @@ impl S3Src {
})?;
let config_builder = config::Builder::from(&sdk_config)
.force_path_style(settings.force_path_style)
.retry_config(RetryConfig::standard().with_max_attempts(settings.retry_attempts));
let config = if let Some(ref uri) = settings.endpoint_uri {
@ -316,6 +320,11 @@ impl ObjectImpl for S3Src {
.nick("S3 endpoint URI")
.blurb("The S3 endpoint URI to use")
.build(),
glib::ParamSpecBoolean::builder("force-path-style")
.nick("Force path style")
.blurb("Force client to use path-style addressing for buckets")
.default_value(DEFAULT_FORCE_PATH_STYLE)
.build(),
]
});
@ -365,6 +374,9 @@ impl ObjectImpl for S3Src {
.get::<Option<String>>()
.expect("type checked upstream");
}
"force-path-style" => {
settings.force_path_style = value.get::<bool>().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
@ -391,6 +403,7 @@ impl ObjectImpl for S3Src {
}
"retry-attempts" => settings.retry_attempts.to_value(),
"endpoint-uri" => settings.endpoint_uri.to_value(),
"force-path-style" => settings.force_path_style.to_value(),
_ => unimplemented!(),
}
}

View file

@ -26,6 +26,7 @@ use std::{
fmt::Debug,
mem,
sync::Mutex,
time::Instant,
};
use time::Duration;
@ -268,7 +269,7 @@ struct Detector {
last_received_packets: BTreeMap<u64, Packet>, // Order by seqnums, front is the newest, back is the oldest
// Last loss update
last_loss_update: Option<time::Instant>,
last_loss_update: Option<Instant>,
// Moving average of the packet loss
loss_average: f64,
@ -280,13 +281,13 @@ struct Detector {
// Threshold fields
threshold: Duration,
last_threshold_update: Option<time::Instant>,
last_threshold_update: Option<Instant>,
num_deltas: i64,
// Overuse related fields
increasing_counter: u32,
last_overuse_estimate: Duration,
last_use_detector_update: time::Instant,
last_use_detector_update: Instant,
increasing_duration: Duration,
// round-trip-time estimations
@ -337,7 +338,7 @@ impl Detector {
last_threshold_update: None,
num_deltas: 0,
last_use_detector_update: time::Instant::now(),
last_use_detector_update: Instant::now(),
increasing_counter: 0,
last_overuse_estimate: Duration::ZERO,
increasing_duration: Duration::ZERO,
@ -519,11 +520,14 @@ impl Detector {
}
fn compute_loss_average(&mut self, loss_fraction: f64) {
let now = time::Instant::now();
let now = Instant::now();
if let Some(ref last_update) = self.last_loss_update {
self.loss_average = loss_fraction
+ (-(now - *last_update).whole_milliseconds() as f64).exp()
+ (-Duration::try_from(now - *last_update)
.unwrap()
.whole_milliseconds() as f64)
.exp()
* (self.loss_average - loss_fraction);
}
@ -588,7 +592,7 @@ impl Detector {
const K_D: f64 = 0.00018; // Table1. Coefficient for the adaptive threshold
const MAX_TIME_DELTA: Duration = Duration::milliseconds(100);
let now = time::Instant::now();
let now = Instant::now();
if self.last_threshold_update.is_none() {
self.last_threshold_update = Some(now);
}
@ -604,7 +608,9 @@ impl Detector {
} else {
K_U
};
let time_delta = (now - self.last_threshold_update.unwrap()).min(MAX_TIME_DELTA);
let time_delta = Duration::try_from(now - self.last_threshold_update.unwrap())
.unwrap()
.min(MAX_TIME_DELTA);
let d = abs_estimate - self.threshold;
let add = k * d.whole_milliseconds() as f64 * time_delta.whole_milliseconds() as f64;
@ -616,7 +622,7 @@ impl Detector {
fn overuse_filter(&mut self) {
let (th_usage, estimate) = self.compare_threshold();
let now = time::Instant::now();
let now = Instant::now();
let delta = now - self.last_use_detector_update;
self.last_use_detector_update = now;
match th_usage {
@ -695,14 +701,14 @@ struct State {
/// Used in additive mode to track last control time, influences
/// calculation of added value according to gcc section 5.5
last_increase_on_delay: Option<time::Instant>,
last_decrease_on_delay: time::Instant,
last_increase_on_delay: Option<Instant>,
last_decrease_on_delay: Instant,
/// Bitrate target based on loss for all video streams.
target_bitrate_on_loss: Bitrate,
last_increase_on_loss: time::Instant,
last_decrease_on_loss: time::Instant,
last_increase_on_loss: Instant,
last_decrease_on_loss: Instant,
/// Exponential moving average, updated when bitrate is
/// decreased
@ -723,7 +729,7 @@ struct State {
budget_offset: i64,
flow_return: Result<gst::FlowSuccess, gst::FlowError>,
last_push: time::Instant,
last_push: Instant,
}
impl Default for State {
@ -731,11 +737,11 @@ impl Default for State {
Self {
target_bitrate_on_delay: DEFAULT_ESTIMATED_BITRATE,
target_bitrate_on_loss: DEFAULT_ESTIMATED_BITRATE,
last_increase_on_loss: time::Instant::now(),
last_decrease_on_loss: time::Instant::now(),
last_increase_on_loss: Instant::now(),
last_decrease_on_loss: Instant::now(),
ema: Default::default(),
last_increase_on_delay: None,
last_decrease_on_delay: time::Instant::now(),
last_decrease_on_delay: Instant::now(),
min_bitrate: DEFAULT_MIN_BITRATE,
max_bitrate: DEFAULT_MAX_BITRATE,
detector: Detector::new(),
@ -744,7 +750,7 @@ impl Default for State {
last_control_op: BandwidthEstimationOp::Increase("Initial increase".into()),
flow_return: Err(gst::FlowError::Flushing),
clock_entry: None,
last_push: time::Instant::now(),
last_push: Instant::now(),
budget_offset: 0,
}
}
@ -753,8 +759,8 @@ impl Default for State {
impl State {
// 4. sending engine implementing a "leaky bucket"
fn create_buffer_list(&mut self, bwe: &super::BandwidthEstimator) -> BufferList {
let now = time::Instant::now();
let elapsed = now - self.last_push;
let now = Instant::now();
let elapsed = Duration::try_from(now - self.last_push).unwrap();
let mut budget = (elapsed.whole_nanoseconds() as i64)
.mul_div_round(
self.estimated_bitrate as i64,
@ -803,7 +809,7 @@ impl State {
}
fn compute_increased_rate(&mut self, bwe: &super::BandwidthEstimator) -> Option<Bitrate> {
let now = time::Instant::now();
let now = Instant::now();
let target_bitrate = self.target_bitrate_on_delay as f64;
let effective_bitrate = self.detector.effective_bitrate();
let time_since_last_update_ms = match self.last_increase_on_delay {
@ -813,7 +819,7 @@ impl State {
return None;
}
(now - prev).whole_milliseconds() as f64
Duration::try_from(now - prev).unwrap().whole_milliseconds() as f64
}
};
@ -950,7 +956,7 @@ impl State {
fn loss_control(&mut self, bwe: &super::BandwidthEstimator) -> bool {
let loss_ratio = self.detector.loss_ratio();
let now = time::Instant::now();
let now = Instant::now();
if loss_ratio > LOSS_DECREASE_THRESHOLD
&& (now - self.last_decrease_on_loss) > LOSS_UPDATE_INTERVAL
@ -993,7 +999,7 @@ impl State {
_ => (),
},
NetworkUsage::Over => {
let now = time::Instant::now();
let now = Instant::now();
if now - self.last_decrease_on_delay > DELAY_UPDATE_INTERVAL {
let effective_bitrate = self.detector.effective_bitrate();
let target =
@ -1096,7 +1102,9 @@ impl BandwidthEstimator {
if !list.is_empty() {
if let Err(err) = bwe.imp().push_list(list) {
gst::error!(CAT, obj: bwe, "pause task, reason: {err:?}");
if err != gst::FlowError::Flushing {
gst::error!(CAT, obj: bwe, "pause task, reason: {err:?}");
}
pause()
}
}

View file

@ -413,10 +413,11 @@ impl CongestionController {
let fec_percentage = (fec_ratio * 50f64) as u32;
for encoder in encoders.iter_mut() {
encoder.set_bitrate(element, target_bitrate);
encoder
.transceiver
.set_property("fec-percentage", fec_percentage);
if encoder.set_bitrate(element, target_bitrate).is_ok() {
encoder
.transceiver
.set_property("fec-percentage", fec_percentage);
}
}
}
}

View file

@ -952,8 +952,24 @@ impl VideoEncoder {
})
}
fn bitrate(&self) -> i32 {
match self.factory_name.as_str() {
fn is_bitrate_supported(factory_name: &str) -> bool {
matches!(
factory_name,
"vp8enc"
| "vp9enc"
| "x264enc"
| "nvh264enc"
| "vaapih264enc"
| "vaapivp8enc"
| "qsvh264enc"
| "nvv4l2h264enc"
| "nvv4l2vp8enc"
| "nvv4l2vp9enc"
)
}
fn bitrate(&self) -> Result<i32, WebRTCSinkError> {
let bitrate = match self.factory_name.as_str() {
"vp8enc" | "vp9enc" => self.element.property::<i32>("target-bitrate"),
"x264enc" | "nvh264enc" | "vaapih264enc" | "vaapivp8enc" | "qsvh264enc" => {
(self.element.property::<u32>("bitrate") * 1000) as i32
@ -961,8 +977,10 @@ impl VideoEncoder {
"nvv4l2h264enc" | "nvv4l2vp8enc" | "nvv4l2vp9enc" => {
(self.element.property::<u32>("bitrate")) as i32
}
factory => unimplemented!("Factory {} is currently not supported", factory),
}
_ => return Err(WebRTCSinkError::BitrateNotSupported),
};
Ok(bitrate)
}
fn scale_height_round_2(&self, height: i32) -> i32 {
@ -979,16 +997,21 @@ impl VideoEncoder {
(width + 1) & !1
}
pub(crate) fn set_bitrate(&mut self, element: &super::BaseWebRTCSink, bitrate: i32) {
pub(crate) fn set_bitrate(
&mut self,
element: &super::BaseWebRTCSink,
bitrate: i32,
) -> Result<(), WebRTCSinkError> {
match self.factory_name.as_str() {
"vp8enc" | "vp9enc" => self.element.set_property("target-bitrate", bitrate),
"x264enc" | "nvh264enc" | "vaapih264enc" | "vaapivp8enc" | "qsvh264enc" => self
.element
.set_property("bitrate", (bitrate / 1000) as u32),
"x264enc" | "nvh264enc" | "vaapih264enc" | "vaapivp8enc" | "qsvh264enc" => {
self.element
.set_property("bitrate", (bitrate / 1000) as u32);
}
"nvv4l2h264enc" | "nvv4l2vp8enc" | "nvv4l2vp9enc" => {
self.element.set_property("bitrate", bitrate as u32)
}
factory => unimplemented!("Factory {} is currently not supported", factory),
_ => return Err(WebRTCSinkError::BitrateNotSupported),
}
let current_caps = self.filter.property::<gst::Caps>("caps");
@ -1052,11 +1075,13 @@ impl VideoEncoder {
self.filter.set_property("caps", caps);
}
Ok(())
}
fn gather_stats(&self) -> gst::Structure {
gst::Structure::builder("application/x-webrtcsink-video-encoder-stats")
.field("bitrate", self.bitrate())
.field("bitrate", self.bitrate().unwrap_or(0i32))
.field("mitigation-mode", self.mitigation_mode)
.field("codec-name", self.codec_name.as_str())
.field(
@ -1339,19 +1364,21 @@ impl Session {
WebRTCSinkCongestionControl::Disabled => {
// If congestion control is disabled, we simply use the highest
// known "safe" value for the bitrate.
enc.set_bitrate(element, self.cc_info.max_bitrate as i32);
let _ = enc.set_bitrate(element, self.cc_info.max_bitrate as i32);
enc.transceiver.set_property("fec-percentage", 50u32);
}
WebRTCSinkCongestionControl::Homegrown => {
if let Some(congestion_controller) = self.congestion_controller.as_mut() {
congestion_controller.target_bitrate_on_delay += enc.bitrate();
congestion_controller.target_bitrate_on_loss =
congestion_controller.target_bitrate_on_delay;
enc.transceiver.set_property("fec-percentage", 0u32);
if let Ok(bitrate) = enc.bitrate() {
congestion_controller.target_bitrate_on_delay += bitrate;
congestion_controller.target_bitrate_on_loss =
congestion_controller.target_bitrate_on_delay;
enc.transceiver.set_property("fec-percentage", 0u32);
}
} else {
/* If congestion control is disabled, we simply use the highest
* known "safe" value for the bitrate. */
enc.set_bitrate(element, self.cc_info.max_bitrate as i32);
let _ = enc.set_bitrate(element, self.cc_info.max_bitrate as i32);
enc.transceiver.set_property("fec-percentage", 50u32);
}
}
@ -1497,6 +1524,7 @@ impl BaseWebRTCSink {
fn configure_congestion_control(
&self,
payloader: &gst::Element,
codec: &Codec,
extension_configuration_type: ExtensionConfigurationType,
) -> Result<(), Error> {
if let ExtensionConfigurationType::Skip = extension_configuration_type {
@ -1505,6 +1533,16 @@ impl BaseWebRTCSink {
let settings = self.settings.lock().unwrap();
if codec.is_video() {
if let Some(enc_name) = codec.encoder_name().as_deref() {
if !VideoEncoder::is_bitrate_supported(enc_name) {
gst::error!(CAT, imp: self, "Bitrate handling is not supported yet for {enc_name}");
return Ok(());
}
}
}
if settings.cc_info.heuristic == WebRTCSinkCongestionControl::Disabled {
return Ok(());
}
@ -1620,7 +1658,7 @@ impl BaseWebRTCSink {
payloader.set_property("ssrc", ssrc);
}
self.configure_congestion_control(payloader, extension_configuration_type)
self.configure_congestion_control(payloader, codec, extension_configuration_type)
}
fn generate_ssrc(
@ -2951,10 +2989,11 @@ impl BaseWebRTCSink {
}
for encoder in session.encoders.iter_mut() {
encoder.set_bitrate(element, encoders_bitrate);
encoder
.transceiver
.set_property("fec-percentage", (fec_percentage as u32).min(100));
if encoder.set_bitrate(element, encoders_bitrate).is_ok() {
encoder
.transceiver
.set_property("fec-percentage", (fec_percentage as u32).min(100));
}
}
}
}
@ -3400,7 +3439,7 @@ impl BaseWebRTCSink {
let is_video = match sink_caps.structure(0).unwrap().name().as_str() {
"video/x-raw" => true,
"audio/x-raw" => false,
_ => panic!("expected audio or video raw caps: {sink_caps}"),
_ => anyhow::bail!("Unsupported caps: {}", discovery_info.caps),
};
codecs

View file

@ -87,6 +87,8 @@ pub enum WebRTCSinkError {
peer_id: String,
details: String,
},
#[error("Bitrate handling currently not supported for requested encoder")]
BitrateNotSupported,
}
impl Default for BaseWebRTCSink {

View file

@ -240,7 +240,7 @@ impl ObjectImpl for BaseWebRTCSrc {
*/
glib::subclass::Signal::builder("request-encoded-filter")
.param_types([
String::static_type(),
Option::<String>::static_type(),
String::static_type(),
Option::<gst::Caps>::static_type(),
])

View file

@ -17,6 +17,7 @@ gst = { workspace = true, features = ["v1_16"] }
gst-base.workspace = true
gst-video.workspace = true
gst-gl = { workspace = true, features = ["v1_16"], optional = true }
gst-allocators = { workspace = true, features = ["v1_24"], optional = true }
gst-gl-wayland = { workspace = true, features = ["v1_16"], optional = true }
gst-gl-x11 = { workspace = true, features = ["v1_16"], optional = true }
@ -50,6 +51,7 @@ wayland = ["gtk/v4_6", "gdk-wayland", "gst-gl", "gst-gl-wayland"]
x11glx = ["gtk/v4_6", "gdk-x11", "gst-gl", "gst-gl-x11"]
x11egl = ["gtk/v4_6", "gdk-x11", "gst-gl", "gst-gl-egl"]
winegl = ["gdk-win32/egl", "gst-gl-egl"]
dmabuf = ["gst-allocators", "wayland", "gtk_v4_14", "gst-video/v1_24"]
capi = []
doc = ["gst/v1_18"]
gtk_v4_10 = ["gtk/v4_10"]

View file

@ -1,10 +1,20 @@
# Gtk 4 Sink & Paintable
# GTK 4 Sink & Paintable
GTK 4 provides `gtk::Video` & `gtk::Picture` for rendering media such as videos. As the default `gtk::Video` widget doesn't
offer the possibility to use a custom `gst::Pipeline`. The plugin provides a `gst_video::VideoSink` along with a `gdk::Paintable` that's capable of rendering the sink's frames.
The Sink can generate GL Textures if the system is capable of it, but it needs to be compiled
with either `wayland`, `x11glx` or `x11egl` cargo features.
The sink can generate GL Textures if the system is capable of it, but it needs
to be compiled with either `wayland`, `x11glx` or `x11egl` cargo features. On
Windows and macOS this is enabled by default.
Additionally, the sink can render DMABufs directly on Linux if GTK 4.14 or
newer is used. For this the `dmabuf` feature needs to be enabled.
Depending on the GTK version that is used and should be supported as minimum,
new features or more efficient processing can be opted in with the `gtk_v4_10`,
`gtk_v4_12` and `gtk_v4_14` features. The minimum GTK version required by the
sink is GTK 4.4 on Linux without GL support, and 4.6 on Windows and macOS, and
on Linux with GL support.
# Flatpak Integration
@ -44,7 +54,7 @@ To build and include the plugin in a Flatpak manifest, you can add the following
{
"type": "git",
"url": "https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs",
"branch": "0.10"
"branch": "0.12"
}
],
"build-options": {

View file

@ -6,13 +6,6 @@ use gtk::{gdk, gio, glib};
use std::cell::RefCell;
fn create_ui(app: &gtk::Application) {
let window = gtk::ApplicationWindow::new(app);
window.set_default_size(640, 480);
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
let picture = gtk::Picture::new();
let label = gtk::Label::new(Some("Position: 00:00:00"));
let pipeline = gst::Pipeline::new();
let overlay = gst::ElementFactory::make("clockoverlay")
@ -64,8 +57,26 @@ fn create_ui(app: &gtk::Application) {
src.link_filtered(&overlay, &caps).unwrap();
overlay.link(&sink).unwrap();
let window = gtk::ApplicationWindow::new(app);
window.set_default_size(640, 480);
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
let picture = gtk::Picture::new();
picture.set_paintable(Some(&paintable));
vbox.append(&picture);
#[cfg(feature = "gtk_v4_14")]
{
let offload = gtk::GraphicsOffload::new(Some(&picture));
offload.set_enabled(gtk::GraphicsOffloadEnabled::Enabled);
vbox.append(&offload);
}
#[cfg(not(feature = "gtk_v4_14"))]
{
vbox.append(&picture);
}
let label = gtk::Label::new(Some("Position: 00:00:00"));
vbox.append(&label);
window.set_child(Some(&vbox));

View file

@ -14,7 +14,61 @@ use gst_video::prelude::*;
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
use gst_gl::prelude::*;
use gtk::{gdk, glib};
use std::collections::{HashMap, HashSet};
use std::{
collections::{HashMap, HashSet},
ops,
};
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum VideoInfo {
VideoInfo(gst_video::VideoInfo),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
DmaDrm(gst_video::VideoInfoDmaDrm),
}
impl From<gst_video::VideoInfo> for VideoInfo {
fn from(v: gst_video::VideoInfo) -> Self {
VideoInfo::VideoInfo(v)
}
}
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
impl From<gst_video::VideoInfoDmaDrm> for VideoInfo {
fn from(v: gst_video::VideoInfoDmaDrm) -> Self {
VideoInfo::DmaDrm(v)
}
}
impl ops::Deref for VideoInfo {
type Target = gst_video::VideoInfo;
fn deref(&self) -> &Self::Target {
match self {
VideoInfo::VideoInfo(info) => info,
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
VideoInfo::DmaDrm(info) => info,
}
}
}
impl VideoInfo {
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
fn dma_drm(&self) -> Option<&gst_video::VideoInfoDmaDrm> {
match self {
VideoInfo::VideoInfo(..) => None,
VideoInfo::DmaDrm(info) => Some(info),
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum TextureCacheId {
Memory(usize),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
GL(usize),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
DmaBuf([i32; 4]),
}
#[derive(Debug)]
enum MappedFrame {
@ -24,6 +78,17 @@ enum MappedFrame {
frame: gst_gl::GLVideoFrame<gst_gl::gl_video_frame::Readable>,
wrapped_context: gst_gl::GLContext,
},
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
DmaBuf {
buffer: gst::Buffer,
info: gst_video::VideoInfoDmaDrm,
n_planes: u32,
fds: [i32; 4],
offsets: [usize; 4],
strides: [usize; 4],
width: u32,
height: u32,
},
}
impl MappedFrame {
@ -32,6 +97,8 @@ impl MappedFrame {
MappedFrame::SysMem(frame) => frame.buffer(),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
MappedFrame::GL { frame, .. } => frame.buffer(),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
MappedFrame::DmaBuf { buffer, .. } => buffer,
}
}
@ -40,6 +107,8 @@ impl MappedFrame {
MappedFrame::SysMem(frame) => frame.width(),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
MappedFrame::GL { frame, .. } => frame.width(),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
MappedFrame::DmaBuf { info, .. } => info.width(),
}
}
@ -48,6 +117,8 @@ impl MappedFrame {
MappedFrame::SysMem(frame) => frame.height(),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
MappedFrame::GL { frame, .. } => frame.height(),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
MappedFrame::DmaBuf { info, .. } => info.height(),
}
}
@ -56,6 +127,8 @@ impl MappedFrame {
MappedFrame::SysMem(frame) => frame.format_info(),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
MappedFrame::GL { frame, .. } => frame.format_info(),
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
MappedFrame::DmaBuf { info, .. } => info.format_info(),
}
}
}
@ -108,16 +181,16 @@ fn video_format_to_memory_format(f: gst_video::VideoFormat) -> gdk::MemoryFormat
fn video_frame_to_memory_texture(
frame: gst_video::VideoFrame<gst_video::video_frame::Readable>,
cached_textures: &mut HashMap<usize, gdk::Texture>,
used_textures: &mut HashSet<usize>,
cached_textures: &mut HashMap<TextureCacheId, gdk::Texture>,
used_textures: &mut HashSet<TextureCacheId>,
) -> (gdk::Texture, f64) {
let texture_id = frame.plane_data(0).unwrap().as_ptr() as usize;
let ptr = frame.plane_data(0).unwrap().as_ptr() as usize;
let pixel_aspect_ratio =
(frame.info().par().numer() as f64) / (frame.info().par().denom() as f64);
if let Some(texture) = cached_textures.get(&texture_id) {
used_textures.insert(texture_id);
if let Some(texture) = cached_textures.get(&TextureCacheId::Memory(ptr)) {
used_textures.insert(TextureCacheId::Memory(ptr));
return (texture.clone(), pixel_aspect_ratio);
}
@ -135,8 +208,8 @@ fn video_frame_to_memory_texture(
)
.upcast::<gdk::Texture>();
cached_textures.insert(texture_id, texture.clone());
used_textures.insert(texture_id);
cached_textures.insert(TextureCacheId::Memory(ptr), texture.clone());
used_textures.insert(TextureCacheId::Memory(ptr));
(texture, pixel_aspect_ratio)
}
@ -144,8 +217,8 @@ fn video_frame_to_memory_texture(
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
fn video_frame_to_gl_texture(
frame: gst_gl::GLVideoFrame<gst_gl::gl_video_frame::Readable>,
cached_textures: &mut HashMap<usize, gdk::Texture>,
used_textures: &mut HashSet<usize>,
cached_textures: &mut HashMap<TextureCacheId, gdk::Texture>,
used_textures: &mut HashSet<TextureCacheId>,
gdk_context: &gdk::GLContext,
#[allow(unused)] wrapped_context: &gst_gl::GLContext,
) -> (gdk::Texture, f64) {
@ -154,8 +227,8 @@ fn video_frame_to_gl_texture(
let pixel_aspect_ratio =
(frame.info().par().numer() as f64) / (frame.info().par().denom() as f64);
if let Some(texture) = cached_textures.get(&(texture_id)) {
used_textures.insert(texture_id);
if let Some(texture) = cached_textures.get(&TextureCacheId::GL(texture_id)) {
used_textures.insert(TextureCacheId::GL(texture_id));
return (texture.clone(), pixel_aspect_ratio);
}
@ -237,18 +310,64 @@ fn video_frame_to_gl_texture(
.upcast::<gdk::Texture>()
};
cached_textures.insert(texture_id, texture.clone());
used_textures.insert(texture_id);
cached_textures.insert(TextureCacheId::GL(texture_id), texture.clone());
used_textures.insert(TextureCacheId::GL(texture_id));
(texture, pixel_aspect_ratio)
}
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
#[allow(clippy::too_many_arguments)]
fn video_frame_to_dmabuf_texture(
buffer: gst::Buffer,
cached_textures: &mut HashMap<TextureCacheId, gdk::Texture>,
used_textures: &mut HashSet<TextureCacheId>,
info: &gst_video::VideoInfoDmaDrm,
n_planes: u32,
fds: &[i32; 4],
offsets: &[usize; 4],
strides: &[usize; 4],
width: u32,
height: u32,
) -> Result<(gdk::Texture, f64), glib::Error> {
let pixel_aspect_ratio = (info.par().numer() as f64) / (info.par().denom() as f64);
if let Some(texture) = cached_textures.get(&TextureCacheId::DmaBuf(*fds)) {
used_textures.insert(TextureCacheId::DmaBuf(*fds));
return Ok((texture.clone(), pixel_aspect_ratio));
}
let builder = gdk::DmabufTextureBuilder::new();
builder.set_display(&gdk::Display::default().unwrap());
builder.set_fourcc(info.fourcc());
builder.set_modifier(info.modifier());
builder.set_width(width);
builder.set_height(height);
builder.set_n_planes(n_planes);
for plane in 0..(n_planes as usize) {
builder.set_fd(plane as u32, fds[plane]);
builder.set_offset(plane as u32, offsets[plane] as u32);
builder.set_stride(plane as u32, strides[plane] as u32);
}
let texture = unsafe {
builder.build_with_release_func(move || {
drop(buffer);
})?
};
cached_textures.insert(TextureCacheId::DmaBuf(*fds), texture.clone());
used_textures.insert(TextureCacheId::DmaBuf(*fds));
Ok((texture, pixel_aspect_ratio))
}
impl Frame {
pub(crate) fn into_textures(
self,
#[allow(unused_variables)] gdk_context: Option<&gdk::GLContext>,
cached_textures: &mut HashMap<usize, gdk::Texture>,
) -> Vec<Texture> {
cached_textures: &mut HashMap<TextureCacheId, gdk::Texture>,
) -> Result<Vec<Texture>, glib::Error> {
let mut textures = Vec::with_capacity(1 + self.overlays.len());
let mut used_textures = HashSet::with_capacity(1 + self.overlays.len());
@ -278,6 +397,28 @@ impl Frame {
&wrapped_context,
)
}
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
MappedFrame::DmaBuf {
buffer,
info,
n_planes,
fds,
offsets,
strides,
width,
height,
} => video_frame_to_dmabuf_texture(
buffer,
cached_textures,
&mut used_textures,
&info,
n_planes,
&fds,
&offsets,
&strides,
width,
height,
)?,
};
textures.push(Texture {
@ -309,14 +450,14 @@ impl Frame {
// Remove textures that were not used this time
cached_textures.retain(|id, _| used_textures.contains(id));
textures
Ok(textures)
}
}
impl Frame {
pub(crate) fn new(
buffer: &gst::Buffer,
info: &gst_video::VideoInfo,
info: &VideoInfo,
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))] wrapped_context: Option<
&gst_gl::GLContext,
>,
@ -327,77 +468,125 @@ impl Frame {
// Empty buffers get filtered out in show_frame
debug_assert!(buffer.n_memory() > 0);
let mut frame;
#[allow(unused_mut)]
let mut frame = None;
#[cfg(not(any(target_os = "macos", target_os = "windows", feature = "gst-gl")))]
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
{
frame = Self {
frame: MappedFrame::SysMem(
// Check we received a buffer with dmabuf memory and if so do some checks before
// passing it onwards
if frame.is_none()
&& buffer
.peek_memory(0)
.is_memory_type::<gst_allocators::DmaBufMemory>()
{
if let Some((vmeta, info)) =
Option::zip(buffer.meta::<gst_video::VideoMeta>(), info.dma_drm())
{
let mut fds = [-1i32; 4];
let mut offsets = [0; 4];
let mut strides = [0; 4];
let n_planes = vmeta.n_planes() as usize;
let vmeta_offsets = vmeta.offset();
let vmeta_strides = vmeta.stride();
for plane in 0..n_planes {
let Some((range, skip)) =
buffer.find_memory(vmeta_offsets[plane]..(vmeta_offsets[plane] + 1))
else {
break;
};
let mem = buffer.peek_memory(range.start);
let Some(mem) = mem.downcast_memory_ref::<gst_allocators::DmaBufMemory>()
else {
break;
};
let fd = mem.fd();
fds[plane] = fd;
offsets[plane] = mem.offset() + skip;
strides[plane] = vmeta_strides[plane] as usize;
}
// All fds valid?
if fds[0..n_planes].iter().all(|fd| *fd != -1) {
frame = Some(MappedFrame::DmaBuf {
buffer: buffer.clone(),
info: info.clone(),
n_planes: n_planes as u32,
fds,
offsets,
strides,
width: vmeta.width(),
height: vmeta.height(),
});
}
}
}
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
{
if frame.is_none() {
// Check we received a buffer with GL memory and if the context of that memory
// can share with the wrapped context around the GDK GL context.
//
// If not it has to be uploaded to the GPU.
let memory_ctx = buffer
.peek_memory(0)
.downcast_memory_ref::<gst_gl::GLBaseMemory>()
.and_then(|m| {
let ctx = m.context();
if wrapped_context
.map_or(false, |wrapped_context| wrapped_context.can_share(ctx))
{
Some(ctx)
} else {
None
}
});
if let Some(memory_ctx) = memory_ctx {
// If there is no GLSyncMeta yet then we need to add one here now, which requires
// obtaining a writable buffer.
let mapped_frame = if buffer.meta::<gst_gl::GLSyncMeta>().is_some() {
gst_gl::GLVideoFrame::from_buffer_readable(buffer.clone(), info)
.map_err(|_| gst::FlowError::Error)?
} else {
let mut buffer = buffer.clone();
{
let buffer = buffer.make_mut();
gst_gl::GLSyncMeta::add(buffer, memory_ctx);
}
gst_gl::GLVideoFrame::from_buffer_readable(buffer, info)
.map_err(|_| gst::FlowError::Error)?
};
// Now that it's guaranteed that there is a sync meta and the frame is mapped, set
// a sync point so we can ensure that the texture is ready later when making use of
// it as gdk::GLTexture.
let meta = mapped_frame.buffer().meta::<gst_gl::GLSyncMeta>().unwrap();
meta.set_sync_point(memory_ctx);
frame = Some(MappedFrame::GL {
frame: mapped_frame,
wrapped_context: wrapped_context.unwrap().clone(),
});
}
}
}
}
let mut frame = Self {
frame: match frame {
Some(frame) => frame,
None => MappedFrame::SysMem(
gst_video::VideoFrame::from_buffer_readable(buffer.clone(), info)
.map_err(|_| gst::FlowError::Error)?,
),
overlays: vec![],
};
}
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
{
// Check we received a buffer with GL memory and if the context of that memory
// can share with the wrapped context around the GDK GL context.
//
// If not it has to be uploaded to the GPU.
let memory_ctx = buffer
.peek_memory(0)
.downcast_memory_ref::<gst_gl::GLBaseMemory>()
.and_then(|m| {
let ctx = m.context();
if wrapped_context
.map_or(false, |wrapped_context| wrapped_context.can_share(ctx))
{
Some(ctx)
} else {
None
}
});
if let Some(memory_ctx) = memory_ctx {
// If there is no GLSyncMeta yet then we need to add one here now, which requires
// obtaining a writable buffer.
let mapped_frame = if buffer.meta::<gst_gl::GLSyncMeta>().is_some() {
gst_gl::GLVideoFrame::from_buffer_readable(buffer.clone(), info)
.map_err(|_| gst::FlowError::Error)?
} else {
let mut buffer = buffer.clone();
{
let buffer = buffer.make_mut();
gst_gl::GLSyncMeta::add(buffer, memory_ctx);
}
gst_gl::GLVideoFrame::from_buffer_readable(buffer, info)
.map_err(|_| gst::FlowError::Error)?
};
// Now that it's guaranteed that there is a sync meta and the frame is mapped, set
// a sync point so we can ensure that the texture is ready later when making use of
// it as gdk::GLTexture.
let meta = mapped_frame.buffer().meta::<gst_gl::GLSyncMeta>().unwrap();
meta.set_sync_point(memory_ctx);
frame = Self {
frame: MappedFrame::GL {
frame: mapped_frame,
wrapped_context: wrapped_context.unwrap().clone(),
},
overlays: vec![],
};
} else {
frame = Self {
frame: MappedFrame::SysMem(
gst_video::VideoFrame::from_buffer_readable(buffer.clone(), info)
.map_err(|_| gst::FlowError::Error)?,
),
overlays: vec![],
};
}
}
},
overlays: vec![],
};
frame.overlays = frame
.frame

View file

@ -1,7 +1,7 @@
//
// Copyright (C) 2021 Bilal Elmoussaoui <bil.elmoussaoui@gmail.com>
// Copyright (C) 2021 Jordan Petridis <jordan@centricular.com>
// Copyright (C) 2021 Sebastian Dröge <sebastian@centricular.com>
// Copyright (C) 2021-2024 Sebastian Dröge <sebastian@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
@ -62,7 +62,7 @@ pub(crate) static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
pub struct PaintableSink {
paintable: Mutex<Option<ThreadGuard<Paintable>>>,
window: Mutex<Option<ThreadGuard<gtk::Window>>>,
info: Mutex<Option<gst_video::VideoInfo>>,
info: Mutex<Option<super::frame::VideoInfo>>,
sender: Mutex<Option<async_channel::Sender<SinkEvent>>>,
pending_frame: Mutex<Option<Frame>>,
cached_caps: Mutex<Option<gst::Caps>>,
@ -82,6 +82,7 @@ impl ObjectSubclass for PaintableSink {
const NAME: &'static str = "GstGtk4PaintableSink";
type Type = super::PaintableSink;
type ParentType = gst_video::VideoSink;
type Interfaces = (gst::ChildProxy,);
}
impl ObjectImpl for PaintableSink {
@ -110,12 +111,14 @@ impl ObjectImpl for PaintableSink {
return None::<&gdk::Paintable>.to_value();
}
let mut paintable = self.paintable.lock().unwrap();
if paintable.is_none() {
self.create_paintable(&mut paintable);
let mut paintable_guard = self.paintable.lock().unwrap();
let mut created = false;
if paintable_guard.is_none() {
created = true;
self.create_paintable(&mut paintable_guard);
}
let paintable = match &*paintable {
let paintable = match &*paintable_guard {
Some(ref paintable) => paintable,
None => {
gst::error!(CAT, imp: self, "Failed to create paintable");
@ -124,16 +127,31 @@ impl ObjectImpl for PaintableSink {
};
// Getter must be called from the main thread
if paintable.is_owner() {
paintable.get_ref().to_value()
} else {
if !paintable.is_owner() {
gst::error!(
CAT,
imp: self,
"Can't retrieve Paintable from non-main thread"
);
None::<&gdk::Paintable>.to_value()
return None::<&gdk::Paintable>.to_value();
}
let paintable = paintable.get_ref().clone();
drop(paintable_guard);
if created {
let self_ = self.to_owned();
glib::MainContext::default().invoke(move || {
let paintable_guard = self_.paintable.lock().unwrap();
if let Some(paintable) = &*paintable_guard {
let paintable_clone = paintable.get_ref().clone();
drop(paintable_guard);
self_.obj().child_added(&paintable_clone, "paintable");
}
});
}
paintable.to_value()
}
_ => unimplemented!(),
}
@ -163,53 +181,99 @@ impl ElementImpl for PaintableSink {
{
let caps = caps.get_mut().unwrap();
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
{
for features in [
[
gst_allocators::CAPS_FEATURE_MEMORY_DMABUF,
gst_video::CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
]
.as_slice(),
[gst_allocators::CAPS_FEATURE_MEMORY_DMABUF].as_slice(),
] {
let c = gst_video::VideoCapsBuilder::new()
.format(gst_video::VideoFormat::DmaDrm)
.features(features.iter().copied())
.build();
caps.append(c);
}
}
for features in [
None,
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
Some(gst::CapsFeatures::new([
"memory:GLMemory",
"meta:GstVideoOverlayComposition",
gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY,
gst_video::CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
])),
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
Some(gst::CapsFeatures::new(["memory:GLMemory"])),
Some(gst::CapsFeatures::new([
gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY,
])),
Some(gst::CapsFeatures::new([
"memory:SystemMemory",
"meta:GstVideoOverlayComposition",
gst_video::CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
])),
Some(gst::CapsFeatures::new(["meta:GstVideoOverlayComposition"])),
Some(gst::CapsFeatures::new([
gst_video::CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION,
])),
None,
] {
const GL_FORMATS: &[gst_video::VideoFormat] =
&[gst_video::VideoFormat::Rgba, gst_video::VideoFormat::Rgb];
const NON_GL_FORMATS: &[gst_video::VideoFormat] = &[
gst_video::VideoFormat::Bgra,
gst_video::VideoFormat::Argb,
gst_video::VideoFormat::Rgba,
gst_video::VideoFormat::Abgr,
gst_video::VideoFormat::Rgb,
gst_video::VideoFormat::Bgr,
];
let formats = if features
.as_ref()
.is_some_and(|features| features.contains("memory:GLMemory"))
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
{
GL_FORMATS
} else {
NON_GL_FORMATS
};
const GL_FORMATS: &[gst_video::VideoFormat] =
&[gst_video::VideoFormat::Rgba, gst_video::VideoFormat::Rgb];
const NON_GL_FORMATS: &[gst_video::VideoFormat] = &[
gst_video::VideoFormat::Bgra,
gst_video::VideoFormat::Argb,
gst_video::VideoFormat::Rgba,
gst_video::VideoFormat::Abgr,
gst_video::VideoFormat::Rgb,
gst_video::VideoFormat::Bgr,
];
let mut c = gst_video::video_make_raw_caps(formats).build();
let formats = if features.as_ref().is_some_and(|features| {
features.contains(gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY)
}) {
GL_FORMATS
} else {
NON_GL_FORMATS
};
if let Some(features) = features {
let c = c.get_mut().unwrap();
let mut c = gst_video::video_make_raw_caps(formats).build();
if features.contains("memory:GLMemory") {
c.set("texture-target", "2D")
if let Some(features) = features {
let c = c.get_mut().unwrap();
if features.contains(gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY) {
c.set("texture-target", "2D")
}
c.set_features_simple(Some(features));
}
c.set_features_simple(Some(features));
caps.append(c);
}
#[cfg(not(any(
target_os = "macos",
target_os = "windows",
feature = "gst-gl"
)))]
{
const FORMATS: &[gst_video::VideoFormat] = &[
gst_video::VideoFormat::Bgra,
gst_video::VideoFormat::Argb,
gst_video::VideoFormat::Rgba,
gst_video::VideoFormat::Abgr,
gst_video::VideoFormat::Rgb,
gst_video::VideoFormat::Bgr,
];
caps.append(c);
let mut c = gst_video::video_make_raw_caps(FORMATS).build();
if let Some(features) = features {
let c = c.get_mut().unwrap();
c.set_features_simple(Some(features));
}
caps.append(c);
}
}
}
@ -244,18 +308,31 @@ impl ElementImpl for PaintableSink {
}
}
let mut paintable = self.paintable.lock().unwrap();
if paintable.is_none() {
self.create_paintable(&mut paintable);
let mut paintable_guard = self.paintable.lock().unwrap();
let mut created = false;
if paintable_guard.is_none() {
created = true;
self.create_paintable(&mut paintable_guard);
}
if paintable.is_none() {
if paintable_guard.is_none() {
gst::error!(CAT, imp: self, "Failed to create paintable");
return Err(gst::StateChangeError);
}
drop(paintable);
drop(paintable_guard);
if created {
let self_ = self.to_owned();
glib::MainContext::default().invoke(move || {
let paintable_guard = self_.paintable.lock().unwrap();
if let Some(paintable) = &*paintable_guard {
let paintable_clone = paintable.get_ref().clone();
drop(paintable_guard);
self_.obj().child_added(&paintable_clone, "paintable");
}
});
}
// Notify the pipeline about the GL display and wrapped context so that any other
// elements in the pipeline ideally use the same / create GL contexts that are
@ -361,8 +438,21 @@ impl BaseSinkImpl for PaintableSink {
fn set_caps(&self, caps: &gst::Caps) -> Result<(), gst::LoggableError> {
gst::debug!(CAT, imp: self, "Setting caps {caps:?}");
let video_info = gst_video::VideoInfo::from_caps(caps)
.map_err(|_| gst::loggable_error!(CAT, "Invalid caps"))?;
#[allow(unused_mut)]
let mut video_info = None;
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
{
if let Ok(info) = gst_video::VideoInfoDmaDrm::from_caps(caps) {
video_info = Some(info.into());
}
}
let video_info = match video_info {
Some(info) => info,
None => gst_video::VideoInfo::from_caps(caps)
.map_err(|_| gst::loggable_error!(CAT, "Invalid caps"))?
.into(),
};
self.info.lock().unwrap().replace(video_info);
@ -516,10 +606,11 @@ impl PaintableSink {
match action {
SinkEvent::FrameChanged => {
let Some(frame) = self.pending_frame() else {
return glib::ControlFlow::Continue;
};
gst::trace!(CAT, imp: self, "Frame changed");
paintable
.get_ref()
.handle_frame_changed(self.pending_frame())
paintable.get_ref().handle_frame_changed(&self.obj(), frame);
}
}
@ -530,13 +621,59 @@ impl PaintableSink {
#[allow(unused_mut)]
let mut tmp_caps = Self::pad_templates()[0].caps().clone();
#[cfg(all(target_os = "linux", feature = "dmabuf"))]
{
let formats = utils::invoke_on_main_thread(move || {
let Some(display) = gdk::Display::default() else {
return vec![];
};
let dmabuf_formats = display.dmabuf_formats();
let mut formats = vec![];
let n_formats = dmabuf_formats.n_formats();
for i in 0..n_formats {
let (fourcc, modifier) = dmabuf_formats.format(i);
if fourcc == 0 || modifier == (u64::MAX >> 8) {
continue;
}
formats.push(gst_video::dma_drm_fourcc_to_string(fourcc, modifier));
}
formats
});
if formats.is_empty() {
// Filter out dmabufs caps from the template pads if we have no supported formats
if !matches!(&*GL_CONTEXT.lock().unwrap(), GLContext::Initialized { .. }) {
tmp_caps = tmp_caps
.iter_with_features()
.filter(|(_, features)| {
!features.contains(gst_allocators::CAPS_FEATURE_MEMORY_DMABUF)
})
.map(|(s, c)| (s.to_owned(), c.to_owned()))
.collect::<gst::Caps>();
}
} else {
let tmp_caps = tmp_caps.make_mut();
for (s, f) in tmp_caps.iter_with_features_mut() {
if f.contains(gst_allocators::CAPS_FEATURE_MEMORY_DMABUF) {
s.set("drm-format", gst::List::new(&formats));
}
}
}
}
#[cfg(any(target_os = "macos", target_os = "windows", feature = "gst-gl"))]
{
// Filter out GL caps from the template pads if we have no context
if !matches!(&*GL_CONTEXT.lock().unwrap(), GLContext::Initialized { .. }) {
tmp_caps = tmp_caps
.iter_with_features()
.filter(|(_, features)| !features.contains("memory:GLMemory"))
.filter(|(_, features)| {
!features.contains(gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY)
})
.map(|(s, c)| (s.to_owned(), c.to_owned()))
.collect::<gst::Caps>();
}
@ -564,7 +701,17 @@ impl PaintableSink {
let window = gtk::Window::new();
let picture = gtk::Picture::new();
picture.set_paintable(Some(&paintable));
window.set_child(Some(&picture));
#[cfg(feature = "gtk_v4_14")]
{
let offload = gtk::GraphicsOffload::new(Some(&picture));
offload.set_enabled(gtk::GraphicsOffloadEnabled::Enabled);
window.set_child(Some(&offload));
}
#[cfg(not(feature = "gtk_v4_14"))]
{
window.set_child(Some(&picture));
}
window.set_default_size(640, 480);
window.connect_close_request({
@ -1073,3 +1220,33 @@ impl PaintableSink {
}
}
}
impl ChildProxyImpl for PaintableSink {
fn child_by_index(&self, index: u32) -> Option<glib::Object> {
if index != 0 {
return None;
}
let paintable = self.paintable.lock().unwrap();
paintable
.as_ref()
.filter(|p| p.is_owner())
.map(|p| p.get_ref().upcast_ref::<glib::Object>().clone())
}
fn child_by_name(&self, name: &str) -> Option<glib::Object> {
if name == "paintable" {
return self.child_by_index(0);
}
None
}
fn children_count(&self) -> u32 {
let paintable = self.paintable.lock().unwrap();
if paintable.is_some() {
1
} else {
0
}
}
}

View file

@ -22,7 +22,8 @@ enum SinkEvent {
glib::wrapper! {
pub struct PaintableSink(ObjectSubclass<imp::PaintableSink>)
@extends gst_video::VideoSink, gst_base::BaseSink, gst::Element, gst::Object;
@extends gst_video::VideoSink, gst_base::BaseSink, gst::Element, gst::Object,
@implements gst::ChildProxy;
}
impl PaintableSink {

View file

@ -31,12 +31,13 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
#[derive(Debug)]
pub struct Paintable {
paintables: RefCell<Vec<Texture>>,
cached_textures: RefCell<HashMap<usize, gdk::Texture>>,
cached_textures: RefCell<HashMap<super::super::frame::TextureCacheId, gdk::Texture>>,
gl_context: RefCell<Option<gdk::GLContext>>,
background_color: Cell<gdk::RGBA>,
#[cfg(feature = "gtk_v4_10")]
scaling_filter: Cell<gsk::ScalingFilter>,
use_scaling_filter: Cell<bool>,
force_aspect_ratio: Cell<bool>,
#[cfg(not(feature = "gtk_v4_10"))]
premult_shader: gsk::GLShader,
}
@ -51,6 +52,7 @@ impl Default for Paintable {
#[cfg(feature = "gtk_v4_10")]
scaling_filter: Cell::new(gsk::ScalingFilter::Linear),
use_scaling_filter: Cell::new(false),
force_aspect_ratio: Cell::new(false),
#[cfg(not(feature = "gtk_v4_10"))]
premult_shader: gsk::GLShader::from_bytes(&glib::Bytes::from_static(include_bytes!(
"premult.glsl"
@ -94,6 +96,11 @@ impl ObjectImpl for Paintable {
.blurb("Use selected scaling filter or GTK default for rendering")
.default_value(false)
.build(),
glib::ParamSpecBoolean::builder("force-aspect-ratio")
.nick("Force Aspect Ratio")
.blurb("When enabled, scaling will respect original aspect ratio")
.default_value(true)
.build(),
]
});
@ -117,6 +124,7 @@ impl ObjectImpl for Paintable {
"scaling-filter" => self.scaling_filter.get().to_value(),
#[cfg(feature = "gtk_v4_10")]
"use-scaling-filter" => self.use_scaling_filter.get().to_value(),
"force-aspect-ratio" => self.force_aspect_ratio.get().to_value(),
_ => unimplemented!(),
}
}
@ -139,6 +147,7 @@ impl ObjectImpl for Paintable {
"scaling-filter" => self.scaling_filter.set(value.get().unwrap()),
#[cfg(feature = "gtk_v4_10")]
"use-scaling-filter" => self.use_scaling_filter.set(value.get().unwrap()),
"force-aspect-ratio" => self.force_aspect_ratio.set(value.get().unwrap()),
_ => unimplemented!(),
}
}
@ -173,40 +182,66 @@ impl PaintableImpl for Paintable {
let snapshot = snapshot.downcast_ref::<gtk::Snapshot>().unwrap();
let background_color = self.background_color.get();
let force_aspect_ratio = self.force_aspect_ratio.get();
let paintables = self.paintables.borrow();
if !paintables.is_empty() {
gst::trace!(CAT, imp: self, "Snapshotting frame");
let (frame_width, frame_height) =
paintables.first().map(|p| (p.width, p.height)).unwrap();
let mut scale_x = width / frame_width as f64;
let mut scale_y = height / frame_height as f64;
let mut trans_x = 0.0;
let mut trans_y = 0.0;
// TODO: Property for keeping aspect ratio or not
if (scale_x - scale_y).abs() > f64::EPSILON {
if scale_x > scale_y {
trans_x =
((frame_width as f64 * scale_x) - (frame_width as f64 * scale_y)) / 2.0;
scale_x = scale_y;
} else {
trans_y =
((frame_height as f64 * scale_y) - (frame_height as f64 * scale_x)) / 2.0;
scale_y = scale_x;
}
}
let Some(first_paintable) = paintables.first() else {
gst::trace!(CAT, imp: self, "Snapshotting black frame");
snapshot.append_color(
&background_color,
&graphene::Rect::new(0f32, 0f32, width as f32, height as f32),
);
snapshot.translate(&graphene::Point::new(trans_x as f32, trans_y as f32));
return;
};
for Texture {
gst::trace!(CAT, imp: self, "Snapshotting frame");
// The first paintable is the actual video frame and defines the overall size.
//
// Based on its size relative to the snapshot width/height, all other paintables are
// scaled accordingly.
let (frame_width, frame_height) = (first_paintable.width, first_paintable.height);
let mut scale_x = width / frame_width as f64;
let mut scale_y = height / frame_height as f64;
// Usually the caller makes sure that the aspect ratio is preserved. To enforce this here
// optionally, we scale the frame equally in both directions and center it. In addition the
// background color is drawn behind the frame to fill the gaps.
//
// This is not done by default for performance reasons and usually would draw a <1px
// background.
if force_aspect_ratio {
let mut trans_x = 0.0;
let mut trans_y = 0.0;
if (scale_x - scale_y).abs() > f64::EPSILON {
if scale_x > scale_y {
trans_x = (width - (frame_width as f64 * scale_y)) / 2.0;
scale_x = scale_y;
} else {
trans_y = (height - (frame_height as f64 * scale_x)) / 2.0;
scale_y = scale_x;
}
}
if !background_color.is_clear() && (trans_x > f64::EPSILON || trans_y > f64::EPSILON) {
snapshot.append_color(
&background_color,
&graphene::Rect::new(0f32, 0f32, width as f32, height as f32),
);
}
snapshot.translate(&graphene::Point::new(trans_x as f32, trans_y as f32));
}
// Make immutable
let scale_x = scale_x;
let scale_y = scale_y;
for (
idx,
Texture {
texture,
x,
y,
@ -214,151 +249,159 @@ impl PaintableImpl for Paintable {
height: paintable_height,
global_alpha,
has_alpha,
} in &*paintables
},
) in paintables.iter().enumerate()
{
snapshot.push_opacity(*global_alpha as f64);
let bounds = if !force_aspect_ratio && idx == 0 {
// While this should end up with width again, be explicit in this case to avoid
// rounding errors and fill the whole area with the video frame.
graphene::Rect::new(0.0, 0.0, width as f32, height as f32)
} else {
// Scale texture position and size with the same scale factor as the main video
// frame, and make sure to not render outside (0, 0, width, height).
let x = f32::clamp(*x * scale_x as f32, 0.0, width as f32);
let y = f32::clamp(*y * scale_y as f32, 0.0, height as f32);
let texture_width = f32::min(*paintable_width * scale_x as f32, width as f32);
let texture_height = f32::min(*paintable_height * scale_y as f32, height as f32);
graphene::Rect::new(x, y, texture_width, texture_height)
};
// Only premultiply GL textures that expect to be in premultiplied RGBA format.
//
// For GTK 4.14 or newer we use the correct format directly when building the
// texture, but only if a GLES3+ context is used. In that case the NGL renderer is
// used by GTK, which supports non-premultiplied formats correctly and fast.
//
// For GTK 4.10-4.12, or 4.14 and newer if a GLES2 context is used, we use a
// self-mask to pre-multiply the alpha.
//
// For GTK before 4.10, we use a GL shader and hope that it works.
#[cfg(feature = "gtk_v4_10")]
{
snapshot.push_opacity(*global_alpha as f64);
let texture_width = *paintable_width * scale_x as f32;
let texture_height = *paintable_height * scale_y as f32;
let x = *x * scale_x as f32;
let y = *y * scale_y as f32;
let bounds = graphene::Rect::new(x, y, texture_width, texture_height);
// Only premultiply GL textures that expect to be in premultiplied RGBA format.
//
// For GTK 4.14 or newer we use the correct format directly when building the
// texture, but only if a GLES3+ context is used. In that case the NGL renderer is
// used by GTK, which supports non-premultiplied formats correctly and fast.
//
// For GTK 4.10-4.12, or 4.14 and newer if a GLES2 context is used, we use a
// self-mask to pre-multiply the alpha.
//
// For GTK before 4.10, we use a GL shader and hope that it works.
#[cfg(feature = "gtk_v4_10")]
{
let context_requires_premult = {
#[cfg(feature = "gtk_v4_14")]
{
self.gl_context.borrow().as_ref().map_or(false, |context| {
context.api() != gdk::GLAPI::GLES || context.version().0 < 3
})
}
#[cfg(not(feature = "gtk_v4_14"))]
{
true
}
};
let do_premult =
context_requires_premult && texture.is::<gdk::GLTexture>() && *has_alpha;
if do_premult {
snapshot.push_mask(gsk::MaskMode::Alpha);
if self.use_scaling_filter.get() {
#[cfg(feature = "gtk_v4_10")]
snapshot.append_scaled_texture(
texture,
self.scaling_filter.get(),
&bounds,
);
} else {
snapshot.append_texture(texture, &bounds);
}
snapshot.pop(); // pop mask
// color matrix to set alpha of the source to 1.0 as it was
// already applied via the mask just above.
snapshot.push_color_matrix(
&graphene::Matrix::from_float({
[
1.0, 0.0, 0.0, 0.0, //
0.0, 1.0, 0.0, 0.0, //
0.0, 0.0, 1.0, 0.0, //
0.0, 0.0, 0.0, 0.0,
]
}),
&graphene::Vec4::new(0.0, 0.0, 0.0, 1.0),
);
let context_requires_premult = {
#[cfg(feature = "gtk_v4_14")]
{
self.gl_context.borrow().as_ref().map_or(false, |context| {
context.api() != gdk::GLAPI::GLES || context.version().0 < 3
})
}
#[cfg(not(feature = "gtk_v4_14"))]
{
true
}
};
let do_premult =
context_requires_premult && texture.is::<gdk::GLTexture>() && *has_alpha;
if do_premult {
snapshot.push_mask(gsk::MaskMode::Alpha);
if self.use_scaling_filter.get() {
#[cfg(feature = "gtk_v4_10")]
snapshot.append_scaled_texture(texture, self.scaling_filter.get(), &bounds);
} else {
snapshot.append_texture(texture, &bounds);
}
snapshot.pop(); // pop mask
if do_premult {
snapshot.pop(); // pop color matrix
snapshot.pop(); // pop mask 2
}
}
#[cfg(not(feature = "gtk_v4_10"))]
{
let do_premult =
texture.is::<gdk::GLTexture>() && *has_alpha && gtk::micro_version() < 13;
if do_premult {
snapshot.push_gl_shader(
&self.premult_shader,
&bounds,
gsk::ShaderArgsBuilder::new(&self.premult_shader, None).to_args(),
);
}
if self.use_scaling_filter.get() {
#[cfg(feature = "gtk_v4_10")]
snapshot.append_scaled_texture(texture, self.scaling_filter.get(), &bounds);
} else {
snapshot.append_texture(texture, &bounds);
}
if do_premult {
snapshot.gl_shader_pop_texture(); // pop texture appended above from the shader
snapshot.pop(); // pop shader
}
// color matrix to set alpha of the source to 1.0 as it was
// already applied via the mask just above.
snapshot.push_color_matrix(
&graphene::Matrix::from_float({
[
1.0, 0.0, 0.0, 0.0, //
0.0, 1.0, 0.0, 0.0, //
0.0, 0.0, 1.0, 0.0, //
0.0, 0.0, 0.0, 0.0,
]
}),
&graphene::Vec4::new(0.0, 0.0, 0.0, 1.0),
);
}
snapshot.pop(); // pop opacity
if self.use_scaling_filter.get() {
#[cfg(feature = "gtk_v4_10")]
snapshot.append_scaled_texture(texture, self.scaling_filter.get(), &bounds);
} else {
snapshot.append_texture(texture, &bounds);
}
if do_premult {
snapshot.pop(); // pop color matrix
snapshot.pop(); // pop mask 2
}
}
} else {
gst::trace!(CAT, imp: self, "Snapshotting black frame");
snapshot.append_color(
&background_color,
&graphene::Rect::new(0f32, 0f32, width as f32, height as f32),
);
#[cfg(not(feature = "gtk_v4_10"))]
{
let do_premult =
texture.is::<gdk::GLTexture>() && *has_alpha && gtk::micro_version() < 13;
if do_premult {
snapshot.push_gl_shader(
&self.premult_shader,
&bounds,
gsk::ShaderArgsBuilder::new(&self.premult_shader, None).to_args(),
);
}
if self.use_scaling_filter.get() {
#[cfg(feature = "gtk_v4_10")]
snapshot.append_scaled_texture(texture, self.scaling_filter.get(), &bounds);
} else {
snapshot.append_texture(texture, &bounds);
}
if do_premult {
snapshot.gl_shader_pop_texture(); // pop texture appended above from the shader
snapshot.pop(); // pop shader
}
}
snapshot.pop(); // pop opacity
}
}
}
impl Paintable {
pub(super) fn handle_frame_changed(&self, frame: Option<Frame>) {
pub(super) fn handle_frame_changed(&self, sink: &crate::PaintableSink, frame: Frame) {
let context = self.gl_context.borrow();
if let Some(frame) = frame {
gst::trace!(CAT, imp: self, "Received new frame");
let new_paintables =
frame.into_textures(context.as_ref(), &mut self.cached_textures.borrow_mut());
let new_size = new_paintables
.first()
.map(|p| (f32::round(p.width) as u32, f32::round(p.height) as u32))
.unwrap();
gst::trace!(CAT, imp: self, "Received new frame");
let old_paintables = self.paintables.replace(new_paintables);
let old_size = old_paintables
.first()
.map(|p| (f32::round(p.width) as u32, f32::round(p.height) as u32));
let new_paintables =
match frame.into_textures(context.as_ref(), &mut self.cached_textures.borrow_mut()) {
Ok(textures) => textures,
Err(err) => {
gst::element_error!(
sink,
gst::ResourceError::Failed,
["Failed to transform frame into textures: {err}"]
);
return;
}
};
if Some(new_size) != old_size {
gst::debug!(
CAT,
imp: self,
"Size changed from {old_size:?} to {new_size:?}",
);
self.obj().invalidate_size();
}
let new_size = new_paintables
.first()
.map(|p| (f32::round(p.width) as u32, f32::round(p.height) as u32))
.unwrap();
self.obj().invalidate_contents();
let old_paintables = self.paintables.replace(new_paintables);
let old_size = old_paintables
.first()
.map(|p| (f32::round(p.width) as u32, f32::round(p.height) as u32));
if Some(new_size) != old_size {
gst::debug!(
CAT,
imp: self,
"Size changed from {old_size:?} to {new_size:?}",
);
self.obj().invalidate_size();
}
self.obj().invalidate_contents();
}
pub(super) fn handle_flush_frames(&self) {

View file

@ -30,8 +30,8 @@ impl Paintable {
}
impl Paintable {
pub(crate) fn handle_frame_changed(&self, frame: Option<Frame>) {
self.imp().handle_frame_changed(frame);
pub(crate) fn handle_frame_changed(&self, sink: &crate::PaintableSink, frame: Frame) {
self.imp().handle_frame_changed(sink, frame);
}
pub(crate) fn handle_flush_frames(&self) {