Compare commits

..

13 commits

Author SHA1 Message Date
Sebastian Dröge 2f2aac55a3 Update version to 0.12.1 2024-02-13 13:02:27 +02:00
Sebastian Dröge 31dfcd0a78 Update CHANGELOG.md for 0.12.1 2024-02-13 13:01:46 +02:00
Sebastian Dröge b3e233f0c5 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-13 12:37:23 +02:00
Sebastian Dröge 58a065caf3 textwrap: Remove unnecessary to_string() in debug output of a string
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-13 12:35:40 +02:00
Jordan Yelloz 606352d7cf webrtcsink: Added sinkpad with "msid" property
This forwards to the webrtcbin sinkpad's msid when specified.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-12 18:11:42 +02:00
Sebastian Dröge aa2d056ea1 Update to async-tungstenite 0.25
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-12 18:11:31 +02:00
Sebastian Dröge 3f9d5cf2f0 gtk4: Create a window if running from gst-launch-1.0 or GST_GTK4_WINDOW=1 is set
Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/482

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-12 18:11:25 +02:00
Sebastian Dröge 149eff08b7 utils: Update for renamed clippy lint in 1.76
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-12 18:11:19 +02:00
Sebastian Dröge c4e3fff2a2 Update Cargo.lock
Downgrade clap_derive to 4.4.7 to not require Rust 1.74 or newer.
2024-02-08 20:52:14 +02:00
Sebastian Dröge 16e001e3f2 Update dependency versions for gtk-rs-core / gtk4-rs / gstreamer-rs and local crates 2024-02-08 19:40:08 +02:00
Sebastian Dröge af694e8bc1 ci: Use 0.22 branch of gstreamer-rs images templates 2024-02-08 19:35:05 +02:00
Sebastian Dröge 66f2969eb9 Update Cargo.lock 2024-02-08 19:33:32 +02:00
Sebastian Dröge 50efdf6a64 Update version to 0.12.0 2024-02-08 19:33:09 +02:00
425 changed files with 14250 additions and 67444 deletions

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
Cargo.lock
target
*~
*.bk

View file

@ -6,7 +6,7 @@ include:
file: '/templates/debian.yml'
- project: 'gstreamer/gstreamer-rs'
ref: main
ref: '0.22'
file: '/ci/images_template.yml'
- project: 'gstreamer/gstreamer'
@ -20,8 +20,6 @@ variables:
# to ensure that we are testing against the same thing as GStreamer itself.
# The tag name is included above from the main repo.
GSTREAMER_DOC_IMAGE: "registry.freedesktop.org/gstreamer/gstreamer/amd64/fedora:$FEDORA_TAG-main"
# Use the gstreamer image to trigger the cerbero job, same as the monorepo
CERBERO_TRIGGER_IMAGE: "registry.freedesktop.org/gstreamer/gstreamer/amd64/fedora:$FEDORA_TAG-main"
WINDOWS_BASE: "registry.freedesktop.org/gstreamer/gstreamer-rs/windows"
WINDOWS_RUST_MINIMUM_IMAGE: "$WINDOWS_BASE:$GST_RS_IMG_TAG-main-$GST_RS_MSRV"
WINDOWS_RUST_STABLE_IMAGE: "$WINDOWS_BASE:$GST_RS_IMG_TAG-main-$GST_RS_STABLE"
@ -52,7 +50,6 @@ trigger:
stage: 'trigger'
variables:
GIT_STRATEGY: none
tags: [ 'placeholder-job' ]
script:
- echo "Trigger job done, now running the pipeline."
rules:
@ -99,11 +96,11 @@ trigger:
- rustc --version
- cargo build --locked --color=always --workspace --all-targets
- RUST_BACKTRACE=1 G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets
- G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets
- cargo build --locked --color=always --workspace --all-targets --all-features --exclude gst-plugin-gtk4
- RUST_BACKTRACE=1 G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets --all-features --exclude gst-plugin-gtk4
- G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets --all-features --exclude gst-plugin-gtk4
- cargo build --locked --color=always --workspace --all-targets --no-default-features
- RUST_BACKTRACE=1 G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets --no-default-features
- G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets --no-default-features
test msrv:
extends:
@ -290,7 +287,6 @@ test windows stable:
rustfmt:
extends: '.debian:12-stable'
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- cargo fmt --version
@ -299,7 +295,6 @@ rustfmt:
typos:
extends: '.debian:12-stable'
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- typos
@ -307,7 +302,6 @@ typos:
gstwebrtc-api lint:
image: node:lts
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- cd net/webrtc/gstwebrtc-api
@ -317,12 +311,10 @@ gstwebrtc-api lint:
check commits:
extends: '.debian:12-stable'
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- ci-fairy check-commits --textwidth 0 --no-signed-off-by
- ci/check-for-symlinks.sh
- ci/check-meson-version.sh
clippy:
extends: '.debian:12-stable'
@ -392,34 +384,3 @@ coverage:
coverage_report:
coverage_format: cobertura
path: coverage.xml
cerbero trigger:
image: $CERBERO_TRIGGER_IMAGE
needs: [ "trigger" ]
variables:
# We will build this cerbero branch in the cerbero trigger CI
CERBERO_UPSTREAM_BRANCH: 'main'
script:
- ci/cerbero/trigger_cerbero_pipeline.py
rules:
# Never run post merge
- if: '$CI_PROJECT_NAMESPACE == "gstreamer"'
when: never
# Don't run if the only changes are files that cargo-c does not read
- if:
changes:
- "CHANGELOG.md"
- "README.md"
- "deny.toml"
- "rustfmt.toml"
- "typos.toml"
- "*.py"
- "*.sh"
- "Makefile"
- "meson.build"
- "meson_options.txt"
- "**/meson.build"
- "ci/*.sh"
- "ci/*.py"
when: never
- when: always

View file

@ -5,105 +5,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html),
specifically the [variant used by Rust](http://doc.crates.io/manifest.html#the-version-field).
## [0.12.7] - 2024-06-19
### Fixed
- aws, spotifyaudiosrc, reqwesthttpsrc, webrtchttp: Fix race condition when unlocking
- rtp: Allow any payload type for the AV1 RTP payloader/depayloader
- rtp: Various fixes to the AV1 RTP payloader/depayloader to work correctly
with Chrome and Pion
- meson: Various fixes to the meson-based build system around cargo
- webrtcsink: Use correct property names for configuring `av1enc`
- webrtcsink: Avoid lock poisoning when setting encoder properties
### Added
- ndi: Support for NDI SDK v6
- webrtcsink: Support for AV1 via `nvav1enc`, `av1enc` or `rav1enc`
### Changed
- Update to async-tungstenite 0.26
## [0.12.6] - 2024-05-23
### Fixed
- Various Rust 1.78 clippy warnings.
- gtk4paintablesink: Fix plugin description.
### Added
- fmp4mux / mp4mux: Add support for adding AV1 header OBUs into the MP4
headers.
- fmp4mux / mp4mux: Take track language from the tags if provided.
- gtk4paintablesink: Add GST_GTK4_WINDOW_FULLSCREEN environment variable to
create a fullscreen window for debugging purposes.
- gtk4paintablesink: Also create a window automatically when called from
gst-play-1.0.
- webrtc: Add support for insecure TLS connections.
- webrtcsink: Add VP9 parser after the encoder.
### Changed
- webrtcsink: Improve error when no discovery pipeline runs.
- rtpgccbwe: Improve debug output in various places.
## [0.12.5] - 2024-04-29
### Fixed
- hrtfrender: Use a bitmask instead of an int in the caps for the channel-mask.
- rtpgccbwe: Don't log an error when pushing a buffer list fails while stopping.
- webrtcsink: Don't panic in bitrate handling with unsupported encoders.
- webrtcsink: Don't panic if unsupported input caps are used.
- webrtcsrc: Allow a `None` producer-id in `request-encoded-filter` signal.
### Added
- aws: New property to support path-style addressing.
- fmp4mux / mp4mux: Support FLAC instead (f)MP4.
- gtk4: Support directly importing dmabufs with GTK 4.14.
- gtk4: Add force-aspect-ratio property similar to other video sinks.
## [0.12.4] - 2024-04-08
### Fixed
- aws: Use fixed behaviour version to ensure that updates to the AWS SDK don't
change any defaults configurations in unexpected ways.
- onvifmetadataparse: Fix possible deadlock on shutdown.
- webrtcsink: Set `perfect-timestamp=true` on audio encoders to work around
bugs in Chrome's audio decoders.
- Various clippy warnings.
### Changed
- reqwest: Update to reqwest 0.12.
- webrtchttp: Update to reqwest 0.12.
## [0.12.3] - 2024-03-21
### Fixed
- gtk4paintablesink: Fix scaling of texture position.
- janusvrwebrtcsink: Handle 64 bit numerical room ids.
- janusvrwebrtcsink: Don't include deprecated audio/video fields in publish
messages.
- janusvrwebrtcsink: Handle various other messages to avoid printing errors.
- livekitwebrtc: Fix shutdown behaviour.
- rtpgccbwe: Don't forward buffer lists with buffers from different SSRCs to
avoid breaking assumptions in rtpsession.
- sccparse: Ignore invalid timecodes during seeking.
- webrtcsink: Don't try parsing audio caps as video caps.
### Changed
- webrtc: Allow resolution and framerate changes.
- webrtcsrc: Make produce-peer-id optional.
### Added
- livekitwebrtcsrc: Add new LiveKit source element.
- regex: Add support for configuring regex behaviour.
- spotifyaudiosrc: Document how to use with non-Facebook accounts.
- webrtcsrc: Add `do-retransmission` property.
## [0.12.2] - 2024-02-26
### Fixed
- rtpgccbwe: Don't reset PTS/DTS to `None` as otherwise `rtpsession` won't be
able to generate valid RTCP.
- webrtcsink: Fix usage with 1.22.
### Added
- janusvrwebrtcsink: Add `secret-key` property.
- janusvrwebrtcsink: Allow for string room ids and add `string-ids` property.
- textwrap: Don't split on all whitespaces, especially not on non-breaking
whitespace.
## [0.12.1] - 2024-02-13
### Added
- gtk4: Create a window for testing purposes when running in `gst-launch-1.0`
@ -141,6 +42,7 @@ specifically the [variant used by Rust](http://doc.crates.io/manifest.html#the-v
- New `janusvrwebrtcsink` element for the Janus VideoRoom API.
- New `rtspsrc2` element.
- New `whipserversrc` element.
- gtk4: New `background-color` property for setting the color of the
background of the frame and the borders, if any.
- gtk4: New `scale-filter` property for defining how to scale the frames.
@ -448,13 +350,7 @@ specifically the [variant used by Rust](http://doc.crates.io/manifest.html#the-v
- webrtcsink: Make the `turn-server` property a `turn-servers` list
- webrtcsink: Move from async-std to tokio
[Unreleased]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.7...HEAD
[0.12.7]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.6...0.12.7
[0.12.6]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.5...0.12.6
[0.12.5]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.4...0.12.5
[0.12.4]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.3...0.12.4
[0.12.3]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.2...0.12.3
[0.12.2]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.1...0.12.2
[Unreleased]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.1...HEAD
[0.12.1]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.0...0.12.1
[0.12.0]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.11.3...0.12.0
[0.11.3]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.11.2...0.11.3

2609
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -12,11 +12,9 @@ members = [
"audio/spotify",
"generic/file",
"generic/originalbuffer",
"generic/sodium",
"generic/threadshare",
"generic/inter",
"generic/gopbuffer",
"mux/flavors",
"mux/fmp4",
@ -34,7 +32,6 @@ members = [
"net/webrtc",
"net/webrtc/protocol",
"net/webrtc/signalling",
"net/quinn",
"text/ahead",
"text/json",
@ -68,10 +65,8 @@ default-members = [
"audio/claxon",
"audio/lewton",
"generic/originalbuffer",
"generic/threadshare",
"generic/inter",
"generic/gopbuffer",
"mux/fmp4",
"mux/mp4",
@ -88,7 +83,6 @@ default-members = [
"net/webrtc/protocol",
"net/webrtc/signalling",
"net/ndi",
"net/quinn",
"text/ahead",
"text/json",
@ -119,37 +113,36 @@ panic = 'unwind'
opt-level = 1
[workspace.package]
version = "0.13.0-alpha.1"
version = "0.12.1"
repository = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs"
edition = "2021"
rust-version = "1.71"
rust-version = "1.70"
[workspace.dependencies]
once_cell = "1"
glib = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
gio = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
cairo-rs = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master", features=["use_glib"] }
pango = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
pangocairo = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
gtk = { package = "gtk4", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master"}
gdk-wayland = { package = "gdk4-wayland", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master"}
gdk-x11 = { package = "gdk4-x11", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master"}
gdk-win32 = { package = "gdk4-win32", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master"}
gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-allocators = { package = "gstreamer-allocators", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-app = { package = "gstreamer-app", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-audio = { package = "gstreamer-audio", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-base = { package = "gstreamer-base", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-check = { package = "gstreamer-check", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-gl = { package = "gstreamer-gl", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-gl-egl = { package = "gstreamer-gl-egl", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-gl-wayland = { package = "gstreamer-gl-wayland", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-gl-x11 = { package = "gstreamer-gl-x11", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-net = { package = "gstreamer-net", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-pbutils = { package = "gstreamer-pbutils", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-plugin-version-helper = { path="./version-helper" }
gst-rtp = { package = "gstreamer-rtp", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-sdp = { package = "gstreamer-sdp", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-utils = { package = "gstreamer-utils", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-video = { package = "gstreamer-video", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-webrtc = { package = "gstreamer-webrtc", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
glib = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "0.19", version = "0.19" }
gio = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "0.19", version = "0.19" }
cairo-rs = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "0.19", version = "0.19", features=["use_glib"] }
pango = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "0.19", version = "0.19" }
pangocairo = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "0.19", version = "0.19" }
gtk = { package = "gtk4", git = "https://github.com/gtk-rs/gtk4-rs", branch = "0.8", version = "0.8"}
gdk-wayland = { package = "gdk4-wayland", git = "https://github.com/gtk-rs/gtk4-rs", branch = "0.8", version = "0.8"}
gdk-x11 = { package = "gdk4-x11", git = "https://github.com/gtk-rs/gtk4-rs", branch = "0.8", version = "0.8"}
gdk-win32 = { package = "gdk4-win32", git = "https://github.com/gtk-rs/gtk4-rs", branch = "0.8", version = "0.8"}
gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-app = { package = "gstreamer-app", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-audio = { package = "gstreamer-audio", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-base = { package = "gstreamer-base", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-check = { package = "gstreamer-check", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-gl = { package = "gstreamer-gl", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-gl-egl = { package = "gstreamer-gl-egl", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-gl-wayland = { package = "gstreamer-gl-wayland", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-gl-x11 = { package = "gstreamer-gl-x11", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-net = { package = "gstreamer-net", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-pbutils = { package = "gstreamer-pbutils", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-plugin-version-helper = { path="./version-helper", version = "0.8" }
gst-rtp = { package = "gstreamer-rtp", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-sdp = { package = "gstreamer-sdp", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-utils = { package = "gstreamer-utils", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-video = { package = "gstreamer-video", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-webrtc = { package = "gstreamer-webrtc", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }

View file

@ -33,9 +33,6 @@ You will find the following plugins in this repository:
- `onvif`: Various elements for parsing, RTP (de)payloading, overlaying of ONVIF timed metadata.
- `quinn`: Transfer data over the network using QUIC
- `quinnquicsink`/`quinnquicsrc`: Send and receive data using QUIC
- `raptorq`: Encoder/decoder element for RaptorQ RTP FEC mechanism.
- `reqwest`: An HTTP source element based on the [reqwest](https://github.com/seanmonstar/reqwest) library.

View file

@ -11,8 +11,8 @@ use gst::prelude::*;
use gst::subclass::prelude::*;
use gst_audio::subclass::prelude::*;
use std::cmp;
use std::sync::Mutex;
use std::{cmp, u64};
use byte_slice_cast::*;

View file

@ -18,6 +18,7 @@ use gst::subclass::prelude::*;
use std::mem;
use std::sync::Mutex;
use std::u64;
use byte_slice_cast::*;

View file

@ -12,6 +12,7 @@ use gst::subclass::prelude::*;
use gst_audio::subclass::prelude::*;
use gst_base::prelude::*;
use std::i32;
use std::sync::atomic;
use std::sync::Mutex;

View file

@ -649,7 +649,7 @@ impl BaseTransformImpl for HrtfRender {
if direction == gst::PadDirection::Sink {
s.set("channels", 2);
s.set("channel-mask", gst::Bitmask(0x3));
s.set("channel-mask", 0x3);
} else {
let settings = self.settings.lock().unwrap();
if let Some(objs) = &settings.spatial_objects {

View file

@ -198,7 +198,7 @@ fn basic_two_channels() {
#[test]
fn silence() {
run_test("wave=silence", None, 1000, 1024, 1, f64::NEG_INFINITY);
run_test("wave=silence", None, 1000, 1024, 1, std::f64::NEG_INFINITY);
}
#[test]
@ -228,7 +228,7 @@ fn below_threshold() {
1000,
1024,
1,
f64::NEG_INFINITY,
std::f64::NEG_INFINITY,
);
}

View file

@ -17,6 +17,7 @@ use gst_base::subclass::prelude::*;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Mutex;
use std::{f64, i32};
use byte_slice_cast::*;

View file

@ -8,11 +8,10 @@ to respect their legal/licensing restrictions.
## Spotify Credentials
This plugin requires a [Spotify Premium](https://www.spotify.com/premium/) account.
If your account is linked with Facebook, you'll need to setup
a [device username and password](https://www.spotify.com/us/account/set-device-password/).
This plugin requires a [Spotify Premium](https://www.spotify.com/premium/) account configured
with a [device password](https://www.spotify.com/us/account/set-device-password/).
Those username and password are then set using the `username` and `password` properties.
You can then set the device username and password using the `username` and `password` properties.
You may also want to cache credentials and downloaded files, see the `cache-` properties on the element.

View file

@ -30,13 +30,13 @@ impl Settings {
pub fn properties() -> Vec<glib::ParamSpec> {
vec![glib::ParamSpecString::builder("username")
.nick("Username")
.blurb("Spotify username, Facebook accounts need a device username from https://www.spotify.com/us/account/set-device-password/")
.blurb("Spotify device username from https://www.spotify.com/us/account/set-device-password/")
.default_value(Some(""))
.mutable_ready()
.build(),
glib::ParamSpecString::builder("password")
.nick("Password")
.blurb("Spotify password, Facebook accounts need a device password from https://www.spotify.com/us/account/set-device-password/")
.blurb("Spotify device password from https://www.spotify.com/us/account/set-device-password/")
.default_value(Some(""))
.mutable_ready()
.build(),

View file

@ -6,7 +6,7 @@
//
// SPDX-License-Identifier: MPL-2.0
use std::sync::{mpsc, Arc, Mutex};
use std::sync::{mpsc, Arc, Mutex, MutexGuard};
use futures::future::{AbortHandle, Abortable, Aborted};
use once_cell::sync::Lazy;
@ -66,42 +66,18 @@ struct Settings {
bitrate: Bitrate,
}
#[derive(Default)]
enum SetupThread {
#[default]
None,
Pending {
thread_handle: Option<std::thread::JoinHandle<Result<anyhow::Result<()>, Aborted>>>,
abort_handle: AbortHandle,
},
Cancelled,
Done,
}
impl SetupThread {
fn abort(&mut self) {
// Cancel setup thread if it is pending and not done yet
if matches!(self, SetupThread::None | SetupThread::Done) {
return;
}
if let SetupThread::Pending {
ref abort_handle, ..
} = *self
{
abort_handle.abort();
}
*self = SetupThread::Cancelled;
}
}
#[derive(Default)]
pub struct SpotifyAudioSrc {
setup_thread: Mutex<SetupThread>,
setup_thread: Mutex<Option<SetupThread>>,
state: Arc<Mutex<Option<State>>>,
settings: Mutex<Settings>,
}
struct SetupThread {
thread_handle: std::thread::JoinHandle<Result<anyhow::Result<()>, Aborted>>,
abort_handle: AbortHandle,
}
#[glib::object_subclass]
impl ObjectSubclass for SpotifyAudioSrc {
const NAME: &'static str = "GstSpotifyAudioSrc";
@ -196,18 +172,21 @@ impl BaseSrcImpl for SpotifyAudioSrc {
}
{
// If not started yet and not cancelled, start the setup
let mut setup_thread = self.setup_thread.lock().unwrap();
assert!(!matches!(&*setup_thread, SetupThread::Cancelled));
if matches!(&*setup_thread, SetupThread::None) {
self.start_setup(&mut setup_thread);
let setup_thread = self.setup_thread.lock().unwrap();
if setup_thread.is_some() {
// already starting
return Ok(());
}
self.start_setup(setup_thread);
}
Ok(())
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
// stop the setup if it's not completed yet
self.cancel_setup();
if let Some(state) = self.state.lock().unwrap().take() {
gst::debug!(CAT, imp: self, "stopping");
state.player.stop();
@ -220,17 +199,9 @@ impl BaseSrcImpl for SpotifyAudioSrc {
}
fn unlock(&self) -> Result<(), gst::ErrorMessage> {
let mut setup_thread = self.setup_thread.lock().unwrap();
setup_thread.abort();
Ok(())
}
self.cancel_setup();
fn unlock_stop(&self) -> Result<(), gst::ErrorMessage> {
let mut setup_thread = self.setup_thread.lock().unwrap();
if matches!(&*setup_thread, SetupThread::Cancelled) {
*setup_thread = SetupThread::None;
}
Ok(())
self.parent_unlock()
}
}
@ -245,47 +216,30 @@ impl PushSrcImpl for SpotifyAudioSrc {
};
if !state_set {
// If not started yet and not cancelled, start the setup
let mut setup_thread = self.setup_thread.lock().unwrap();
if matches!(&*setup_thread, SetupThread::Cancelled) {
return Err(gst::FlowError::Flushing);
}
if matches!(&*setup_thread, SetupThread::None) {
self.start_setup(&mut setup_thread);
let setup_thread = self.setup_thread.lock().unwrap();
if setup_thread.is_none() {
// unlock() could potentially cancel the setup, and create() can be called after unlock() without going through start() again.
self.start_setup(setup_thread);
}
}
{
// wait for the setup to be completed
let mut setup_thread = self.setup_thread.lock().unwrap();
if let SetupThread::Pending {
ref mut thread_handle,
..
} = *setup_thread
{
let thread_handle = thread_handle.take().expect("Waiting multiple times");
drop(setup_thread);
let res = thread_handle.join().unwrap();
if let Some(setup) = setup_thread.take() {
let res = setup.thread_handle.join().unwrap();
match res {
Err(_aborted) => {
gst::debug!(CAT, imp: self, "setup has been cancelled");
setup_thread = self.setup_thread.lock().unwrap();
*setup_thread = SetupThread::Cancelled;
return Err(gst::FlowError::Flushing);
}
Ok(Err(err)) => {
gst::error!(CAT, imp: self, "failed to start: {err:?}");
gst::element_imp_error!(self, gst::ResourceError::Settings, ["{err:?}"]);
setup_thread = self.setup_thread.lock().unwrap();
*setup_thread = SetupThread::None;
return Err(gst::FlowError::Error);
}
Ok(Ok(_)) => {
setup_thread = self.setup_thread.lock().unwrap();
*setup_thread = SetupThread::Done;
}
Ok(Ok(_)) => {}
}
}
}
@ -377,9 +331,7 @@ impl URIHandlerImpl for SpotifyAudioSrc {
}
impl SpotifyAudioSrc {
fn start_setup(&self, setup_thread: &mut SetupThread) {
assert!(matches!(setup_thread, SetupThread::None));
fn start_setup(&self, mut setup_thread: MutexGuard<Option<SetupThread>>) {
let self_ = self.to_owned();
// run the runtime from another thread to prevent the "start a runtime from within a runtime" panic
@ -392,10 +344,10 @@ impl SpotifyAudioSrc {
})
});
*setup_thread = SetupThread::Pending {
thread_handle: Some(thread_handle),
setup_thread.replace(SetupThread {
thread_handle,
abort_handle,
};
});
}
async fn setup(&self) -> anyhow::Result<()> {
@ -468,4 +420,12 @@ impl SpotifyAudioSrc {
Ok(())
}
fn cancel_setup(&self) {
let mut setup_thread = self.setup_thread.lock().unwrap();
if let Some(setup) = setup_thread.take() {
setup.abort_handle.abort();
}
}
}

View file

@ -1,103 +0,0 @@
#!/usr/bin/python3
#
# Copied from gstreamer.git/ci/gitlab/trigger_cerbero_pipeline.py
import time
import os
import sys
import gitlab
CERBERO_PROJECT = 'gstreamer/cerbero'
class Status:
FAILED = 'failed'
MANUAL = 'manual'
CANCELED = 'canceled'
SUCCESS = 'success'
SKIPPED = 'skipped'
CREATED = 'created'
@classmethod
def is_finished(cls, state):
return state in [
cls.FAILED,
cls.MANUAL,
cls.CANCELED,
cls.SUCCESS,
cls.SKIPPED,
]
def fprint(msg):
print(msg, end="")
sys.stdout.flush()
if __name__ == "__main__":
server = os.environ['CI_SERVER_URL']
gl = gitlab.Gitlab(server,
private_token=os.environ.get('GITLAB_API_TOKEN'),
job_token=os.environ.get('CI_JOB_TOKEN'))
def get_matching_user_project(project, branch):
cerbero = gl.projects.get(project)
# Search for matching branches, return only if the branch name matches
# exactly
for b in cerbero.branches.list(search=cerbero_branch, iterator=True):
if branch == b.name:
return cerbero
return None
cerbero = None
# We do not want to run on (often out of date) user upstream branch
if os.environ["CI_COMMIT_REF_NAME"] != os.environ['CERBERO_UPSTREAM_BRANCH']:
try:
cerbero_name = f'{os.environ["CI_PROJECT_NAMESPACE"]}/cerbero'
cerbero_branch = os.environ["CI_COMMIT_REF_NAME"]
cerbero = get_matching_user_project(cerbero_name, cerbero_branch)
except gitlab.exceptions.GitlabGetError:
pass
if cerbero is None:
cerbero_name = CERBERO_PROJECT
cerbero_branch = os.environ["CERBERO_UPSTREAM_BRANCH"]
cerbero = gl.projects.get(cerbero_name)
fprint(f"-> Triggering on branch {cerbero_branch} in {cerbero_name}\n")
# CI_PROJECT_URL is not necessarily the project where the branch we need to
# build resides, for instance merge request pipelines can be run on
# 'gstreamer' namespace. Fetch the branch name in the same way, just in
# case it breaks in the future.
if 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' in os.environ:
project_url = os.environ['CI_MERGE_REQUEST_SOURCE_PROJECT_URL']
project_branch = os.environ['CI_MERGE_REQUEST_SOURCE_BRANCH_NAME']
else:
project_url = os.environ['CI_PROJECT_URL']
project_branch = os.environ['CI_COMMIT_REF_NAME']
variables = {
"CI_GST_PLUGINS_RS_URL": project_url,
"CI_GST_PLUGINS_RS_REF_NAME": project_branch,
# This tells cerbero CI that this is a pipeline started via the
# trigger API, which means it can use a deps cache instead of
# building from scratch.
"CI_GSTREAMER_TRIGGERED": "true",
}
pipe = cerbero.trigger_pipeline(
token=os.environ['CI_JOB_TOKEN'],
ref=cerbero_branch,
variables=variables,
)
fprint(f'Cerbero pipeline running at {pipe.web_url} ')
while True:
time.sleep(15)
pipe.refresh()
if Status.is_finished(pipe.status):
fprint(f": {pipe.status}\n")
sys.exit(0 if pipe.status == Status.SUCCESS else 1)
else:
fprint(".")

View file

@ -1,14 +0,0 @@
#!/bin/bash
MESON_VERSION=`head -n5 meson.build | grep ' version\s*:' | sed -e "s/.*version\s*:\s*'//" -e "s/',.*//"`
CARGO_VERSION=`cat Cargo.toml | grep -A1 workspace.package | grep ^version | sed -e 's/^version = "\(.*\)"/\1/'`
echo "gst-plugins-rs version (meson.build) : $MESON_VERSION"
echo "gst-plugins-rs version (Cargo.toml) : $CARGO_VERSION"
if test "x$MESON_VERSION" != "x$CARGO_VERSION"; then
echo
echo "===> Version mismatch between meson.build and Cargo.toml! <==="
echo
exit 1;
fi

View file

@ -36,7 +36,6 @@ function Run-Tests {
}
$env:G_DEBUG="fatal_warnings"
$env:RUST_BACKTRACE="1"
cargo test --no-fail-fast --color=always --workspace $local_exclude --all-targets $Features
if (!$?) {

View file

@ -70,15 +70,6 @@ version = "0.9"
[[bans.skip]]
name = "hmac"
version = "0.11"
[[bans.skip]]
name = "zerocopy"
version = "0.6"
[[bans.skip]]
name = "multimap"
version = "0.8"
[[bans.skip]]
name = "nix"
version = "0.23"
# field-offset and nix depend on an older memoffset
# https://github.com/Diggsey/rust-field-offset/pull/23
@ -91,23 +82,22 @@ version = "0.6"
[[bans.skip]]
name = "hermit-abi"
version = "0.1"
[[bans.skip]]
name = "hermit-abi"
version = "0.3"
# Various crates depend on an older version of base64
[[bans.skip]]
name = "base64"
version = "0.13"
[[bans.skip]]
name = "base64"
version = "0.21"
# Various crates depend on an older version of socket2
[[bans.skip]]
name = "socket2"
version = "0.4"
# Various crates depend on an older version of syn
[[bans.skip]]
name = "syn"
version = "1.0"
# Various crates depend on an older version of bitflags
[[bans.skip]]
name = "bitflags"
@ -137,11 +127,6 @@ version = "0.12"
name = "itertools"
version = "0.11"
# various rav1e / dssim-core depend on an old version of itertools
[[bans.skip]]
name = "itertools"
version = "0.12"
# matchers depends on an old version of regex-automata
[[bans.skip]]
name = "regex-automata"
@ -199,62 +184,6 @@ version = "0.2"
[[bans.skip]]
name = "toml_edit"
version = "0.21"
[[bans.skip]]
name = "winnow"
version = "0.5"
# Various crates depend on an older version of heck
[[bans.skip]]
name = "heck"
version = "0.4"
# Various crates depend on an older version of hyper / reqwest / headers / etc
[[bans.skip]]
name = "hyper"
version = "0.14"
[[bans.skip]]
name = "hyper-tls"
version = "0.5"
[[bans.skip]]
name = "http-body"
version = "0.4"
[[bans.skip]]
name = "headers-core"
version = "0.2"
[[bans.skip]]
name = "headers"
version = "0.3"
[[bans.skip]]
name = "h2"
version = "0.3"
[[bans.skip]]
name = "reqwest"
version = "0.11"
[[bans.skip]]
name = "rustls-pemfile"
version = "1.0"
[[bans.skip]]
name = "winreg"
version = "0.50"
# The AWS SDK uses old versions of rustls and related crates
[[bans.skip]]
name = "rustls"
version = "0.21"
[[bans.skip]]
name = "rustls-native-certs"
version = "0.6"
[[bans.skip]]
name = "rustls-webpki"
version = "0.101"
# warp depends on an older version of tokio-tungstenite
[[bans.skip]]
name = "tokio-tungstenite"
version = "0.21"
[[bans.skip]]
name = "tungstenite"
version = "0.21"
[sources]
unknown-registry = "deny"

View file

@ -1,9 +1,5 @@
build_hotdoc = false
if get_option('doc').disabled()
subdir_done()
endif
if meson.is_cross_build()
if get_option('doc').enabled()
error('Documentation enabled but building the doc while cross building is not supported yet.')

File diff suppressed because it is too large Load diff

View file

@ -1,44 +0,0 @@
[package]
name = "gst-plugin-gopbuffer"
version.workspace = true
authors = ["Matthew Waters <matthew@centricular.com>"]
license = "MPL-2.0"
description = "Store complete groups of pictures at a time"
repository.workspace = true
edition.workspace = true
rust-version.workspace = true
[dependencies]
anyhow = "1"
gst = { workspace = true, features = ["v1_18"] }
gst-video = { workspace = true, features = ["v1_18"] }
once_cell.workspace = true
[lib]
name = "gstgopbuffer"
crate-type = ["cdylib", "rlib"]
path = "src/lib.rs"
[dev-dependencies]
gst-app = { workspace = true, features = ["v1_18"] }
gst-check = { workspace = true, features = ["v1_18"] }
[build-dependencies]
gst-plugin-version-helper = { path="../../version-helper" }
[features]
static = []
capi = []
[package.metadata.capi]
min_version = "0.8.0"
[package.metadata.capi.header]
enabled = false
[package.metadata.capi.library]
install_subdir = "gstreamer-1.0"
versioning = false
[package.metadata.capi.pkg_config]
requires_private = "gstreamer-1.0, gstreamer-base-1.0, gstreamer-audio-1.0, gstreamer-video-1.0, gobject-2.0, glib-2.0, gmodule-2.0"

View file

@ -1,373 +0,0 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

View file

@ -1,3 +0,0 @@
fn main() {
gst_plugin_version_helper::info()
}

View file

@ -1,880 +0,0 @@
// Copyright (C) 2023 Matthew Waters <matthew@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
/**
* SECTION:element-gopbuffer
*
* #gopbuffer is an element that can be used to store a minimum duration of data delimited by
* discrete GOPs (Group of Picture). It does this in by differentiation on the DELTA_UNIT
* flag on each input buffer.
*
* One example of the usefulness of #gopbuffer is its ability to store a backlog of data starting
* on a key frame boundary if say the previous 10s seconds of a stream would like to be recorded to
* disk.
*
* ## Example pipeline
*
* |[
* gst-launch videotestsrc ! vp8enc ! gopbuffer minimum-duration=10000000000 ! fakesink
* ]|
*
* Since: plugins-rs-0.13.0
*/
use gst::glib;
use gst::prelude::*;
use gst::subclass::prelude::*;
use std::collections::VecDeque;
use std::sync::Mutex;
use once_cell::sync::Lazy;
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"gopbuffer",
gst::DebugColorFlags::empty(),
Some("GopBuffer Element"),
)
});
const DEFAULT_MIN_TIME: gst::ClockTime = gst::ClockTime::from_seconds(1);
const DEFAULT_MAX_TIME: Option<gst::ClockTime> = None;
#[derive(Debug, Clone)]
struct Settings {
min_time: gst::ClockTime,
max_time: Option<gst::ClockTime>,
}
impl Default for Settings {
fn default() -> Self {
Settings {
min_time: DEFAULT_MIN_TIME,
max_time: DEFAULT_MAX_TIME,
}
}
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum DeltaFrames {
/// Only single completely decodable frames
IntraOnly,
/// Frames may depend on past frames
PredictiveOnly,
/// Frames may depend on past or future frames
Bidirectional,
}
impl DeltaFrames {
/// Whether dts is required to order buffers differently from presentation order
pub(crate) fn requires_dts(&self) -> bool {
matches!(self, Self::Bidirectional)
}
/// Whether this coding structure does not allow delta flags on buffers
pub(crate) fn intra_only(&self) -> bool {
matches!(self, Self::IntraOnly)
}
pub(crate) fn from_caps(caps: &gst::CapsRef) -> Option<Self> {
let s = caps.structure(0)?;
Some(match s.name().as_str() {
"video/x-h264" | "video/x-h265" => DeltaFrames::Bidirectional,
"video/x-vp8" | "video/x-vp9" | "video/x-av1" => DeltaFrames::PredictiveOnly,
"image/jpeg" | "image/png" | "video/x-raw" => DeltaFrames::IntraOnly,
_ => return None,
})
}
}
// TODO: add buffer list support
#[derive(Debug)]
enum GopItem {
Buffer(gst::Buffer),
Event(gst::Event),
}
struct Gop {
// all times are in running time
start_pts: gst::ClockTime,
start_dts: Option<gst::Signed<gst::ClockTime>>,
earliest_pts: gst::ClockTime,
final_earliest_pts: bool,
end_pts: gst::ClockTime,
end_dts: Option<gst::Signed<gst::ClockTime>>,
final_end_pts: bool,
// Buffer or event
data: VecDeque<GopItem>,
}
impl Gop {
fn push_on_pad(mut self, pad: &gst::Pad) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut iter = self.data.iter().filter_map(|item| match item {
GopItem::Buffer(buffer) => buffer.pts(),
_ => None,
});
let first_pts = iter.next();
let last_pts = iter.last();
gst::debug!(
CAT,
"pushing gop with start pts {} end pts {}",
first_pts.display(),
last_pts.display(),
);
for item in self.data.drain(..) {
match item {
GopItem::Buffer(buffer) => {
pad.push(buffer)?;
}
GopItem::Event(event) => {
pad.push_event(event);
}
}
}
Ok(gst::FlowSuccess::Ok)
}
}
struct Stream {
sinkpad: gst::Pad,
srcpad: gst::Pad,
sink_segment: Option<gst::FormattedSegment<gst::ClockTime>>,
delta_frames: DeltaFrames,
queued_gops: VecDeque<Gop>,
}
impl Stream {
fn queue_buffer(
&mut self,
buffer: gst::Buffer,
segment: &gst::FormattedSegment<gst::ClockTime>,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let pts_position = buffer.pts().unwrap();
let end_pts_position = pts_position
.opt_add(buffer.duration())
.unwrap_or(pts_position);
let pts = segment
.to_running_time_full(pts_position)
.ok_or_else(|| {
gst::error!(CAT, obj: self.sinkpad, "Couldn't convert PTS to running time");
gst::FlowError::Error
})?
.positive()
.unwrap_or_else(|| {
gst::warning!(CAT, obj: self.sinkpad, "Negative PTSs are not supported");
gst::ClockTime::ZERO
});
let end_pts = segment
.to_running_time_full(end_pts_position)
.ok_or_else(|| {
gst::error!(
CAT,
obj: self.sinkpad,
"Couldn't convert end PTS to running time"
);
gst::FlowError::Error
})?
.positive()
.unwrap_or_else(|| {
gst::warning!(CAT, obj: self.sinkpad, "Negative PTSs are not supported");
gst::ClockTime::ZERO
});
let (dts, end_dts) = if !self.delta_frames.requires_dts() {
(None, None)
} else {
let dts_position = buffer.dts().expect("No dts");
let end_dts_position = buffer
.duration()
.opt_add(dts_position)
.unwrap_or(dts_position);
let dts = segment.to_running_time_full(dts_position).ok_or_else(|| {
gst::error!(CAT, obj: self.sinkpad, "Couldn't convert DTS to running time");
gst::FlowError::Error
})?;
let end_dts = segment
.to_running_time_full(end_dts_position)
.ok_or_else(|| {
gst::error!(
CAT,
obj: self.sinkpad,
"Couldn't convert end DTS to running time"
);
gst::FlowError::Error
})?;
let end_dts = std::cmp::max(end_dts, dts);
(Some(dts), Some(end_dts))
};
if !buffer.flags().contains(gst::BufferFlags::DELTA_UNIT) {
gst::debug!(
CAT,
"New GOP detected with buffer pts {} dts {}",
buffer.pts().display(),
buffer.dts().display()
);
let gop = Gop {
start_pts: pts,
start_dts: dts,
earliest_pts: pts,
final_earliest_pts: false,
end_pts: pts,
end_dts,
final_end_pts: false,
data: VecDeque::from([GopItem::Buffer(buffer)]),
};
self.queued_gops.push_front(gop);
if let Some(prev_gop) = self.queued_gops.get_mut(1) {
gst::debug!(
CAT,
obj: self.sinkpad,
"Updating previous GOP starting at PTS {} to end PTS {}",
prev_gop.earliest_pts,
pts,
);
prev_gop.end_pts = std::cmp::max(prev_gop.end_pts, pts);
prev_gop.end_dts = std::cmp::max(prev_gop.end_dts, dts);
if !self.delta_frames.requires_dts() {
prev_gop.final_end_pts = true;
}
if !prev_gop.final_earliest_pts {
// Don't bother logging this for intra-only streams as it would be for every
// single buffer.
if self.delta_frames.requires_dts() {
gst::debug!(
CAT,
obj: self.sinkpad,
"Previous GOP has final earliest PTS at {}",
prev_gop.earliest_pts
);
}
prev_gop.final_earliest_pts = true;
if let Some(prev_prev_gop) = self.queued_gops.get_mut(2) {
prev_prev_gop.final_end_pts = true;
}
}
}
} else if let Some(gop) = self.queued_gops.front_mut() {
gop.end_pts = std::cmp::max(gop.end_pts, end_pts);
gop.end_dts = gop.end_dts.opt_max(end_dts);
gop.data.push_back(GopItem::Buffer(buffer));
if self.delta_frames.requires_dts() {
let dts = dts.unwrap();
if gop.earliest_pts > pts && !gop.final_earliest_pts {
gst::debug!(
CAT,
obj: self.sinkpad,
"Updating current GOP earliest PTS from {} to {}",
gop.earliest_pts,
pts
);
gop.earliest_pts = pts;
if let Some(prev_gop) = self.queued_gops.get_mut(1) {
if prev_gop.end_pts < pts {
gst::debug!(
CAT,
obj: self.sinkpad,
"Updating previous GOP starting PTS {} end time from {} to {}",
pts,
prev_gop.end_pts,
pts
);
prev_gop.end_pts = pts;
}
}
}
let gop = self.queued_gops.front_mut().unwrap();
// The earliest PTS is known when the current DTS is bigger or equal to the first
// PTS that was observed in this GOP. If there was another frame later that had a
// lower PTS then it wouldn't be possible to display it in time anymore, i.e. the
// stream would be invalid.
if gop.start_pts <= dts && !gop.final_earliest_pts {
gst::debug!(
CAT,
obj: self.sinkpad,
"GOP has final earliest PTS at {}",
gop.earliest_pts
);
gop.final_earliest_pts = true;
if let Some(prev_gop) = self.queued_gops.get_mut(1) {
prev_gop.final_end_pts = true;
}
}
}
} else {
gst::debug!(
CAT,
"dropping buffer before first GOP with pts {} dts {}",
buffer.pts().display(),
buffer.dts().display()
);
}
if let Some((prev_gop, first_gop)) = Option::zip(
self.queued_gops.iter().find(|gop| gop.final_end_pts),
self.queued_gops.back(),
) {
gst::debug!(
CAT,
obj: self.sinkpad,
"Queued full GOPs duration updated to {}",
prev_gop.end_pts.saturating_sub(first_gop.earliest_pts),
);
}
gst::debug!(
CAT,
obj: self.sinkpad,
"Queued duration updated to {}",
Option::zip(self.queued_gops.front(), self.queued_gops.back())
.map(|(end, start)| end.end_pts.saturating_sub(start.start_pts))
.unwrap_or(gst::ClockTime::ZERO)
);
Ok(gst::FlowSuccess::Ok)
}
fn oldest_gop(&mut self) -> Option<Gop> {
self.queued_gops.pop_back()
}
fn peek_oldest_gop(&self) -> Option<&Gop> {
self.queued_gops.back()
}
fn peek_second_oldest_gop(&self) -> Option<&Gop> {
if self.queued_gops.len() <= 1 {
return None;
}
self.queued_gops.get(self.queued_gops.len() - 2)
}
fn drain_all(&mut self) -> impl Iterator<Item = Gop> + '_ {
self.queued_gops.drain(..)
}
fn flush(&mut self) {
self.queued_gops.clear();
}
}
#[derive(Default)]
struct State {
streams: Vec<Stream>,
}
impl State {
fn stream_from_sink_pad(&self, pad: &gst::Pad) -> Option<&Stream> {
self.streams.iter().find(|stream| &stream.sinkpad == pad)
}
fn stream_from_sink_pad_mut(&mut self, pad: &gst::Pad) -> Option<&mut Stream> {
self.streams
.iter_mut()
.find(|stream| &stream.sinkpad == pad)
}
fn stream_from_src_pad(&self, pad: &gst::Pad) -> Option<&Stream> {
self.streams.iter().find(|stream| &stream.srcpad == pad)
}
}
#[derive(Default)]
pub(crate) struct GopBuffer {
state: Mutex<State>,
settings: Mutex<Settings>,
}
impl GopBuffer {
fn sink_chain(
&self,
pad: &gst::Pad,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let obj = self.obj();
if buffer.pts().is_none() {
gst::error!(CAT, obj: obj, "Require timestamped buffers!");
return Err(gst::FlowError::Error);
}
let settings = self.settings.lock().unwrap().clone();
let mut state = self.state.lock().unwrap();
let stream = state
.stream_from_sink_pad_mut(pad)
.expect("pad without an internal Stream");
let Some(segment) = stream.sink_segment.clone() else {
gst::element_imp_error!(self, gst::CoreError::Clock, ["Got buffer before segment"]);
return Err(gst::FlowError::Error);
};
if stream.delta_frames.intra_only() && buffer.flags().contains(gst::BufferFlags::DELTA_UNIT)
{
gst::error!(CAT, obj: pad, "Intra-only stream with delta units");
return Err(gst::FlowError::Error);
}
if stream.delta_frames.requires_dts() && buffer.dts().is_none() {
gst::error!(CAT, obj: pad, "Require DTS for video streams");
return Err(gst::FlowError::Error);
}
let srcpad = stream.srcpad.clone();
stream.queue_buffer(buffer, &segment)?;
let mut gops_to_push = vec![];
let Some(newest_gop) = stream.queued_gops.front() else {
return Ok(gst::FlowSuccess::Ok);
};
// we are looking for the latest pts value here (which should be the largest)
let newest_ts = if stream.delta_frames.requires_dts() {
newest_gop.end_dts.unwrap()
} else {
gst::Signed::Positive(newest_gop.end_pts)
};
loop {
// check stored times as though the oldest GOP doesn't exist.
let Some(second_oldest_gop) = stream.peek_second_oldest_gop() else {
break;
};
// we are looking for the oldest pts here (with the largest value). This is our potentially
// new end time.
let oldest_ts = if stream.delta_frames.requires_dts() {
second_oldest_gop.start_dts.unwrap()
} else {
gst::Signed::Positive(second_oldest_gop.start_pts)
};
let stored_duration_without_oldest = newest_ts.saturating_sub(oldest_ts);
gst::trace!(
CAT,
obj: obj,
"newest_pts {}, second oldest_pts {}, stored_duration_without_oldest_gop {}, min-time {}",
newest_ts.display(),
oldest_ts.display(),
stored_duration_without_oldest.display(),
settings.min_time.display()
);
if stored_duration_without_oldest < settings.min_time {
break;
}
gops_to_push.push(stream.oldest_gop().unwrap());
}
if let Some(max_time) = settings.max_time {
while let Some(oldest_gop) = stream.peek_oldest_gop() {
let oldest_ts = oldest_gop.data.iter().rev().find_map(|item| match item {
GopItem::Buffer(buffer) => {
if stream.delta_frames.requires_dts() {
Some(gst::Signed::Positive(buffer.dts().unwrap()))
} else {
Some(gst::Signed::Positive(buffer.pts().unwrap()))
}
}
_ => None,
});
if newest_ts
.opt_saturating_sub(oldest_ts)
.is_some_and(|diff| diff > gst::Signed::Positive(max_time))
{
gst::warning!(CAT, obj: obj, "Stored data has overflowed the maximum allowed stored time {}, pushing oldest GOP", max_time.display());
gops_to_push.push(stream.oldest_gop().unwrap());
} else {
break;
}
}
}
drop(state);
for gop in gops_to_push.into_iter() {
gop.push_on_pad(&srcpad)?;
}
Ok(gst::FlowSuccess::Ok)
}
fn sink_event(&self, pad: &gst::Pad, event: gst::Event) -> bool {
let obj = self.obj();
let mut state = self.state.lock().unwrap();
let stream = state
.stream_from_sink_pad_mut(pad)
.expect("pad without an internal Stream!");
match event.view() {
gst::EventView::Caps(caps) => {
let Some(delta_frames) = DeltaFrames::from_caps(caps.caps()) else {
return false;
};
stream.delta_frames = delta_frames;
}
gst::EventView::FlushStop(_flush) => {
gst::debug!(CAT, obj: obj, "flushing stored data");
stream.flush();
}
gst::EventView::Eos(_eos) => {
gst::debug!(CAT, obj: obj, "draining data at EOS");
let gops = stream.drain_all().collect::<Vec<_>>();
let srcpad = stream.srcpad.clone();
drop(state);
for gop in gops.into_iter() {
let _ = gop.push_on_pad(&srcpad);
}
// once we've pushed all the data, we can push the corresponding eos
gst::Pad::event_default(pad, Some(&*obj), event);
return true;
}
gst::EventView::Segment(segment) => {
let Ok(segment) = segment.segment().clone().downcast::<gst::ClockTime>() else {
gst::error!(CAT, "Non TIME segments are not supported");
return false;
};
stream.sink_segment = Some(segment);
}
_ => (),
};
if event.is_serialized() {
if stream.peek_oldest_gop().is_none() {
// if there is nothing queued, the event can go straight through
gst::trace!(CAT, obj: obj, "nothing queued, event {:?} passthrough", event.structure().map(|s| s.name().as_str()));
drop(state);
return gst::Pad::event_default(pad, Some(&*obj), event);
}
let gop = stream.queued_gops.front_mut().unwrap();
gop.data.push_back(GopItem::Event(event));
true
} else {
// non-serialized events can be pushed directly
drop(state);
gst::Pad::event_default(pad, Some(&*obj), event)
}
}
fn sink_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
let obj = self.obj();
if query.is_serialized() {
// TODO: serialized queries somehow?
gst::warning!(CAT, obj: pad, "Serialized queries are currently not supported");
return false;
}
gst::Pad::query_default(pad, Some(&*obj), query)
}
fn src_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
let obj = self.obj();
match query.view_mut() {
gst::QueryViewMut::Latency(latency) => {
let mut upstream_query = gst::query::Latency::new();
let otherpad = {
let state = self.state.lock().unwrap();
let Some(stream) = state.stream_from_src_pad(pad) else {
return false;
};
stream.sinkpad.clone()
};
let ret = otherpad.peer_query(&mut upstream_query);
if ret {
let (live, mut min, mut max) = upstream_query.result();
let settings = self.settings.lock().unwrap();
min += settings.max_time.unwrap_or(settings.min_time);
max = max.opt_max(settings.max_time);
latency.set(live, min, max);
gst::debug!(
CAT,
obj: pad,
"Latency query response: live {} min {} max {}",
live,
min,
max.display()
);
}
ret
}
_ => gst::Pad::query_default(pad, Some(&*obj), query),
}
}
fn iterate_internal_links(&self, pad: &gst::Pad) -> gst::Iterator<gst::Pad> {
let state = self.state.lock().unwrap();
let otherpad = match pad.direction() {
gst::PadDirection::Src => state
.stream_from_src_pad(pad)
.map(|stream| stream.sinkpad.clone()),
gst::PadDirection::Sink => state
.stream_from_sink_pad(pad)
.map(|stream| stream.srcpad.clone()),
_ => unreachable!(),
};
if let Some(otherpad) = otherpad {
gst::Iterator::from_vec(vec![otherpad])
} else {
gst::Iterator::from_vec(vec![])
}
}
}
#[glib::object_subclass]
impl ObjectSubclass for GopBuffer {
const NAME: &'static str = "GstGopBuffer";
type Type = super::GopBuffer;
type ParentType = gst::Element;
}
impl ObjectImpl for GopBuffer {
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
vec![
glib::ParamSpecUInt64::builder("minimum-duration")
.nick("Minimum Duration")
.blurb("The minimum duration to store")
.default_value(DEFAULT_MIN_TIME.nseconds())
.mutable_ready()
.build(),
glib::ParamSpecUInt64::builder("max-size-time")
.nick("Maximum Duration")
.blurb("The maximum duration to store (0=disable)")
.default_value(0)
.mutable_ready()
.build(),
]
});
&PROPERTIES
}
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
match pspec.name() {
"minimum-duration" => {
let mut settings = self.settings.lock().unwrap();
let min_time = value.get().expect("type checked upstream");
if settings.min_time != min_time {
settings.min_time = min_time;
drop(settings);
self.post_message(gst::message::Latency::builder().src(&*self.obj()).build());
}
}
"max-size-time" => {
let mut settings = self.settings.lock().unwrap();
let max_time = value
.get::<Option<gst::ClockTime>>()
.expect("type checked upstream");
let max_time = if matches!(max_time, Some(gst::ClockTime::ZERO) | None) {
None
} else {
max_time
};
if settings.max_time != max_time {
settings.max_time = max_time;
drop(settings);
self.post_message(gst::message::Latency::builder().src(&*self.obj()).build());
}
}
_ => unimplemented!(),
}
}
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
match pspec.name() {
"minimum-duration" => {
let settings = self.settings.lock().unwrap();
settings.min_time.to_value()
}
"max-size-time" => {
let settings = self.settings.lock().unwrap();
settings.max_time.unwrap_or(gst::ClockTime::ZERO).to_value()
}
_ => unimplemented!(),
}
}
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
let class = obj.class();
let templ = class.pad_template("video_sink").unwrap();
let sinkpad = gst::Pad::builder_from_template(&templ)
.name("video_sink")
.chain_function(|pad, parent, buffer| {
GopBuffer::catch_panic_pad_function(
parent,
|| Err(gst::FlowError::Error),
|gopbuffer| gopbuffer.sink_chain(pad, buffer),
)
})
.event_function(|pad, parent, event| {
GopBuffer::catch_panic_pad_function(
parent,
|| false,
|gopbuffer| gopbuffer.sink_event(pad, event),
)
})
.query_function(|pad, parent, query| {
GopBuffer::catch_panic_pad_function(
parent,
|| false,
|gopbuffer| gopbuffer.sink_query(pad, query),
)
})
.iterate_internal_links_function(|pad, parent| {
GopBuffer::catch_panic_pad_function(
parent,
|| gst::Pad::iterate_internal_links_default(pad, parent),
|gopbuffer| gopbuffer.iterate_internal_links(pad),
)
})
.flags(gst::PadFlags::PROXY_CAPS)
.build();
obj.add_pad(&sinkpad).unwrap();
let templ = class.pad_template("video_src").unwrap();
let srcpad = gst::Pad::builder_from_template(&templ)
.name("video_src")
.query_function(|pad, parent, query| {
GopBuffer::catch_panic_pad_function(
parent,
|| false,
|gopbuffer| gopbuffer.src_query(pad, query),
)
})
.iterate_internal_links_function(|pad, parent| {
GopBuffer::catch_panic_pad_function(
parent,
|| gst::Pad::iterate_internal_links_default(pad, parent),
|gopbuffer| gopbuffer.iterate_internal_links(pad),
)
})
.build();
obj.add_pad(&srcpad).unwrap();
let mut state = self.state.lock().unwrap();
state.streams.push(Stream {
sinkpad,
srcpad,
sink_segment: None,
delta_frames: DeltaFrames::IntraOnly,
queued_gops: VecDeque::new(),
});
}
}
impl GstObjectImpl for GopBuffer {}
impl ElementImpl for GopBuffer {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"GopBuffer",
"Video",
"GOP Buffer",
"Matthew Waters <matthew@centricular.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
// This element is designed to implement multiple streams but it has not been
// implemented.
//
// The things missing for multiple (audio or video) streams are:
// 1. More pad templates
// 2. Choosing a main stream to drive the timestamp logic between all input streams
// 3. Allowing either the main stream to cause other streams to push data
// regardless of it's GOP state, or allow each stream to be individually delimited
// by GOP but all still within the minimum duration.
let video_caps = [
gst::Structure::builder("video/x-h264")
.field("stream-format", gst::List::new(["avc", "avc3"]))
.field("alignment", "au")
.build(),
gst::Structure::builder("video/x-h265")
.field("stream-format", gst::List::new(["hvc1", "hev1"]))
.field("alignment", "au")
.build(),
gst::Structure::builder("video/x-vp8").build(),
gst::Structure::builder("video/x-vp9").build(),
gst::Structure::builder("video/x-av1")
.field("stream-format", "obu-stream")
.field("alignment", "tu")
.build(),
]
.into_iter()
.collect::<gst::Caps>();
let src_pad_template = gst::PadTemplate::new(
"video_src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&video_caps,
)
.unwrap();
let sink_pad_template = gst::PadTemplate::new(
"video_sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&video_caps,
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
fn change_state(
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
#[allow(clippy::single_match)]
match transition {
gst::StateChange::NullToReady => {
let settings = self.settings.lock().unwrap();
if let Some(max_time) = settings.max_time {
if max_time < settings.min_time {
gst::element_imp_error!(
self,
gst::CoreError::StateChange,
["Configured maximum time is less than the minimum time"]
);
return Err(gst::StateChangeError);
}
}
}
_ => (),
}
self.parent_change_state(transition)?;
Ok(gst::StateChangeSuccess::Success)
}
}

View file

@ -1,27 +0,0 @@
// Copyright (C) 2022 Matthew Waters <matthew@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub(crate) struct GopBuffer(ObjectSubclass<imp::GopBuffer>) @extends gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"gopbuffer",
gst::Rank::PRIMARY,
GopBuffer::static_type(),
)?;
Ok(())
}

View file

@ -1,34 +0,0 @@
// Copyright (C) 2022 Matthew Waters <matthew@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(clippy::non_send_fields_in_send_ty, unused_doc_comments)]
/**
* plugin-gopbuffer:
*
* Since: plugins-rs-0.13.0
*/
use gst::glib;
mod gopbuffer;
fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gopbuffer::register(plugin)
}
gst::plugin_define!(
gopbuffer,
env!("CARGO_PKG_DESCRIPTION"),
plugin_init,
concat!(env!("CARGO_PKG_VERSION"), "-", env!("COMMIT_ID")),
// FIXME: MPL-2.0 is only allowed since 1.18.3 (as unknown) and 1.20 (as known)
"MPL",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_REPOSITORY"),
env!("BUILD_REL_DATE")
);

View file

@ -1,128 +0,0 @@
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
//
use gst::prelude::*;
fn init() {
use std::sync::Once;
static INIT: Once = Once::new();
INIT.call_once(|| {
gst::init().unwrap();
gstgopbuffer::plugin_register_static().unwrap();
});
}
macro_rules! check_buffer {
($buf1:expr, $buf2:expr) => {
assert_eq!($buf1.pts(), $buf2.pts());
assert_eq!($buf1.dts(), $buf2.dts());
assert_eq!($buf1.flags(), $buf2.flags());
};
}
#[test]
fn test_min_one_gop_held() {
const OFFSET: gst::ClockTime = gst::ClockTime::from_seconds(10);
init();
let mut h =
gst_check::Harness::with_padnames("gopbuffer", Some("video_sink"), Some("video_src"));
// 200ms min buffer time
let element = h.element().unwrap();
element.set_property("minimum-duration", gst::ClockTime::from_mseconds(200));
h.set_src_caps(
gst::Caps::builder("video/x-h264")
.field("width", 320i32)
.field("height", 240i32)
.field("framerate", gst::Fraction::new(10, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::with_size(1).unwrap())
.build(),
);
let mut in_segment = gst::Segment::new();
in_segment.set_format(gst::Format::Time);
in_segment.set_base(10.seconds());
assert!(h.push_event(gst::event::Segment::builder(&in_segment).build()));
h.play();
// Push 10 buffers of 100ms each, 2nd and 5th buffer without DELTA_UNIT flag
let in_buffers: Vec<_> = (0..6)
.map(|i| {
let mut buffer = gst::Buffer::with_size(1).unwrap();
{
let buffer = buffer.get_mut().unwrap();
buffer.set_pts(OFFSET + gst::ClockTime::from_mseconds(i * 100));
buffer.set_dts(OFFSET + gst::ClockTime::from_mseconds(i * 100));
buffer.set_duration(gst::ClockTime::from_mseconds(100));
if i != 1 && i != 4 {
buffer.set_flags(gst::BufferFlags::DELTA_UNIT);
}
}
assert_eq!(h.push(buffer.clone()), Ok(gst::FlowSuccess::Ok));
buffer
})
.collect();
// pull mandatory events
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::StreamStart);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Caps);
// GstHarness pushes its own segment event that we need to eat
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Segment);
let ev = h.pull_event().unwrap();
let gst::event::EventView::Segment(recv_segment) = ev.view() else {
unreachable!()
};
let recv_segment = recv_segment.segment();
assert_eq!(recv_segment, &in_segment);
// check that at least the first GOP has been output already as it exceeds the minimum-time
// value
let mut in_iter = in_buffers.iter();
// the first buffer is dropped because it was not preceded by a keyframe
let _buffer = in_iter.next().unwrap();
// a keyframe
let out = h.pull().unwrap();
let buffer = in_iter.next().unwrap();
check_buffer!(buffer, out);
// not a keyframe
let out = h.pull().unwrap();
let buffer = in_iter.next().unwrap();
check_buffer!(buffer, out);
// not a keyframe
let out = h.pull().unwrap();
let buffer = in_iter.next().unwrap();
check_buffer!(buffer, out);
// no more buffers
assert_eq!(h.buffers_in_queue(), 0);
// push eos to drain out the rest of the data
assert!(h.push_event(gst::event::Eos::new()));
for buffer in in_iter {
let out = h.pull().unwrap();
check_buffer!(buffer, out);
}
// no more buffers
assert_eq!(h.buffers_in_queue(), 0);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Eos);
}

View file

@ -1,43 +0,0 @@
[package]
name = "gst-plugin-originalbuffer"
version.workspace = true
authors = ["Olivier Crête <olivier.crete@collabora.com>"]
repository.workspace = true
license = "MPL-2.0"
description = "GStreamer Origin buffer meta Plugin"
edition.workspace = true
rust-version.workspace = true
[dependencies]
glib.workspace = true
gst.workspace = true
gst-video.workspace = true
atomic_refcell = "0.1"
once_cell.workspace = true
[lib]
name = "gstoriginalbuffer"
crate-type = ["cdylib", "rlib"]
path = "src/lib.rs"
[build-dependencies]
gst-plugin-version-helper.workspace = true
[features]
static = []
capi = []
doc = ["gst/v1_16"]
[package.metadata.capi]
min_version = "0.9.21"
[package.metadata.capi.header]
enabled = false
[package.metadata.capi.library]
install_subdir = "gstreamer-1.0"
versioning = false
import_library = false
[package.metadata.capi.pkg_config]
requires_private = "gstreamer-1.0, gstreamer-base-1.0, gobject-2.0, glib-2.0, gmodule-2.0"

View file

@ -1,3 +0,0 @@
fn main() {
gst_plugin_version_helper::info()
}

View file

@ -1,38 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(clippy::non_send_fields_in_send_ty, unused_doc_comments)]
/**
* plugin-originalbuffer:
*
* Since: plugins-rs-0.12 */
use gst::glib;
mod originalbuffermeta;
mod originalbufferrestore;
mod originalbuffersave;
fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
originalbuffersave::register(plugin)?;
originalbufferrestore::register(plugin)?;
Ok(())
}
gst::plugin_define!(
originalbuffer,
env!("CARGO_PKG_DESCRIPTION"),
plugin_init,
concat!(env!("CARGO_PKG_VERSION"), "-", env!("COMMIT_ID")),
"MPL",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_REPOSITORY"),
env!("BUILD_REL_DATE")
);

View file

@ -1,199 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::prelude::*;
use std::fmt;
use std::mem;
#[repr(transparent)]
pub struct OriginalBufferMeta(imp::OriginalBufferMeta);
unsafe impl Send for OriginalBufferMeta {}
unsafe impl Sync for OriginalBufferMeta {}
impl OriginalBufferMeta {
pub fn add(
buffer: &mut gst::BufferRef,
original: gst::Buffer,
caps: Option<gst::Caps>,
) -> gst::MetaRefMut<'_, Self, gst::meta::Standalone> {
unsafe {
// Manually dropping because gst_buffer_add_meta() takes ownership of the
// content of the struct
let mut params =
mem::ManuallyDrop::new(imp::OriginalBufferMetaParams { original, caps });
let meta = gst::ffi::gst_buffer_add_meta(
buffer.as_mut_ptr(),
imp::original_buffer_meta_get_info(),
&mut *params as *mut imp::OriginalBufferMetaParams as gst::glib::ffi::gpointer,
) as *mut imp::OriginalBufferMeta;
Self::from_mut_ptr(buffer, meta)
}
}
pub fn replace(&mut self, original: gst::Buffer, caps: Option<gst::Caps>) {
self.0.original = Some(original);
self.0.caps = caps;
}
pub fn original(&self) -> &gst::Buffer {
self.0.original.as_ref().unwrap()
}
pub fn caps(&self) -> &gst::Caps {
self.0.caps.as_ref().unwrap()
}
}
unsafe impl MetaAPI for OriginalBufferMeta {
type GstType = imp::OriginalBufferMeta;
fn meta_api() -> gst::glib::Type {
imp::original_buffer_meta_api_get_type()
}
}
impl fmt::Debug for OriginalBufferMeta {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("OriginalBufferMeta")
.field("buffer", &self.original())
.finish()
}
}
mod imp {
use gst::glib::translate::*;
use once_cell::sync::Lazy;
use std::mem;
use std::ptr;
pub(super) struct OriginalBufferMetaParams {
pub original: gst::Buffer,
pub caps: Option<gst::Caps>,
}
#[repr(C)]
pub struct OriginalBufferMeta {
parent: gst::ffi::GstMeta,
pub(super) original: Option<gst::Buffer>,
pub(super) caps: Option<gst::Caps>,
}
pub(super) fn original_buffer_meta_api_get_type() -> glib::Type {
static TYPE: Lazy<glib::Type> = Lazy::new(|| unsafe {
let t = from_glib(gst::ffi::gst_meta_api_type_register(
b"GstOriginalBufferMetaAPI\0".as_ptr() as *const _,
[ptr::null::<std::os::raw::c_char>()].as_ptr() as *mut *const _,
));
assert_ne!(t, glib::Type::INVALID);
t
});
*TYPE
}
unsafe extern "C" fn original_buffer_meta_init(
meta: *mut gst::ffi::GstMeta,
params: glib::ffi::gpointer,
_buffer: *mut gst::ffi::GstBuffer,
) -> glib::ffi::gboolean {
assert!(!params.is_null());
let meta = &mut *(meta as *mut OriginalBufferMeta);
let params = ptr::read(params as *const OriginalBufferMetaParams);
let OriginalBufferMetaParams { original, caps } = params;
ptr::write(&mut meta.original, Some(original));
ptr::write(&mut meta.caps, caps);
true.into_glib()
}
unsafe extern "C" fn original_buffer_meta_free(
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
) {
let meta = &mut *(meta as *mut OriginalBufferMeta);
meta.original = None;
meta.caps = None;
}
unsafe extern "C" fn original_buffer_meta_transform(
dest: *mut gst::ffi::GstBuffer,
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
_type_: glib::ffi::GQuark,
_data: glib::ffi::gpointer,
) -> glib::ffi::gboolean {
let dest = gst::BufferRef::from_mut_ptr(dest);
let meta = &*(meta as *const OriginalBufferMeta);
if dest.meta::<super::OriginalBufferMeta>().is_some() {
return true.into_glib();
}
// We don't store a ref in the meta if it's self-refencing, but we add it
// when copying the meta to another buffer.
super::OriginalBufferMeta::add(
dest,
meta.original.as_ref().unwrap().clone(),
meta.caps.clone(),
);
true.into_glib()
}
pub(super) fn original_buffer_meta_get_info() -> *const gst::ffi::GstMetaInfo {
struct MetaInfo(ptr::NonNull<gst::ffi::GstMetaInfo>);
unsafe impl Send for MetaInfo {}
unsafe impl Sync for MetaInfo {}
static META_INFO: Lazy<MetaInfo> = Lazy::new(|| unsafe {
MetaInfo(
ptr::NonNull::new(gst::ffi::gst_meta_register(
original_buffer_meta_api_get_type().into_glib(),
b"OriginalBufferMeta\0".as_ptr() as *const _,
mem::size_of::<OriginalBufferMeta>(),
Some(original_buffer_meta_init),
Some(original_buffer_meta_free),
Some(original_buffer_meta_transform),
) as *mut gst::ffi::GstMetaInfo)
.expect("Failed to register meta API"),
)
});
META_INFO.0.as_ptr()
}
}
#[test]
fn test() {
gst::init().unwrap();
let mut b = gst::Buffer::with_size(10).unwrap();
let caps = gst::Caps::new_empty_simple("video/x-raw");
let copy = b.copy();
let m = OriginalBufferMeta::add(b.make_mut(), copy, Some(caps.clone()));
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original().clone(), b);
let b2: gst::Buffer = b.copy_deep().unwrap();
let m = b.meta::<OriginalBufferMeta>().unwrap();
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original(), &b);
let m = b2.meta::<OriginalBufferMeta>().unwrap();
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original(), &b);
let b3: gst::Buffer = b2.copy_deep().unwrap();
drop(b2);
let m = b3.meta::<OriginalBufferMeta>().unwrap();
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original(), &b);
}

View file

@ -1,315 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::subclass::prelude::*;
use gst_video::prelude::*;
use atomic_refcell::AtomicRefCell;
use crate::originalbuffermeta;
use crate::originalbuffermeta::OriginalBufferMeta;
struct CapsState {
caps: gst::Caps,
vinfo: Option<gst_video::VideoInfo>,
}
impl Default for CapsState {
fn default() -> Self {
CapsState {
caps: gst::Caps::new_empty(),
vinfo: None,
}
}
}
#[derive(Default)]
struct State {
sinkpad_caps: CapsState,
meta_caps: CapsState,
sinkpad_segment: Option<gst::Event>,
}
pub struct OriginalBufferRestore {
state: AtomicRefCell<State>,
src_pad: gst::Pad,
sink_pad: gst::Pad,
}
use once_cell::sync::Lazy;
#[cfg(unused_code)]
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"originalbufferrestore",
gst::DebugColorFlags::empty(),
Some("Restore Original buffer as meta"),
)
});
#[glib::object_subclass]
impl ObjectSubclass for OriginalBufferRestore {
const NAME: &'static str = "GstOriginalBufferRestore";
type Type = super::OriginalBufferRestore;
type ParentType = gst::Element;
fn with_class(klass: &Self::Class) -> Self {
let sink_templ = klass.pad_template("sink").unwrap();
let src_templ = klass.pad_template("src").unwrap();
let sink_pad = gst::Pad::builder_from_template(&sink_templ)
.chain_function(|pad, parent, buffer| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| Err(gst::FlowError::Error),
|obj| obj.sink_chain(pad, buffer),
)
})
.event_function(|pad, parent, event| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| false,
|obj| obj.sink_event(pad, parent, event),
)
})
.query_function(|pad, parent, query| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| false,
|obj| obj.sink_query(pad, parent, query),
)
})
.build();
let src_pad = gst::Pad::builder_from_template(&src_templ)
.event_function(|pad, parent, event| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| false,
|obj| obj.src_event(pad, parent, event),
)
})
.build();
Self {
src_pad,
sink_pad,
state: Default::default(),
}
}
}
impl ObjectImpl for OriginalBufferRestore {
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
obj.add_pad(&self.sink_pad).unwrap();
obj.add_pad(&self.src_pad).unwrap();
}
}
impl GstObjectImpl for OriginalBufferRestore {}
impl ElementImpl for OriginalBufferRestore {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"Original Buffer Restore",
"Generic",
"Restores a reference to the buffer in a meta",
"Olivier Crête <olivier.crete@collabora.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let caps = gst::Caps::new_any();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
fn change_state(
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
let ret = self.parent_change_state(transition)?;
if transition == gst::StateChange::PausedToReady {
let mut state = self.state.borrow_mut();
*state = State::default();
}
Ok(ret)
}
}
impl OriginalBufferRestore {
fn sink_event(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
event: gst::Event,
) -> bool {
match event.view() {
gst::EventView::Caps(e) => {
let mut state = self.state.borrow_mut();
let caps = e.caps_owned();
let vinfo = gst_video::VideoInfo::from_caps(&caps).ok();
state.sinkpad_caps = CapsState { caps, vinfo };
true
}
gst::EventView::Segment(_) => {
let mut state = self.state.borrow_mut();
state.sinkpad_segment = Some(event);
true
}
_ => gst::Pad::event_default(pad, parent, event),
}
}
fn src_event(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
event: gst::Event,
) -> bool {
if event.type_() == gst::EventType::Reconfigure
|| event.has_name("gst-original-buffer-forward-upstream-event")
{
let s = gst::Structure::builder("gst-original-buffer-forward-upstream-event")
.field("event", event)
.build();
let event = gst::event::CustomUpstream::new(s);
self.sink_pad.push_event(event)
} else {
gst::Pad::event_default(pad, parent, event)
}
}
fn sink_query(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
query: &mut gst::QueryRef,
) -> bool {
if let gst::QueryViewMut::Custom(_) = query.view_mut() {
let s = query.structure_mut();
if s.has_name("gst-original-buffer-forward-query") {
if let Ok(mut q) = s.get::<gst::Query>("query") {
s.remove_field("query");
assert!(q.is_writable());
let res = self.src_pad.peer_query(q.get_mut().unwrap());
s.set("query", q);
s.set("result", res);
return true;
}
}
}
gst::Pad::query_default(pad, parent, query)
}
fn sink_chain(
&self,
_pad: &gst::Pad,
inbuf: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let Some(ometa) = inbuf.meta::<OriginalBufferMeta>() else {
//gst::element_warning!(self, gst::StreamError::Failed, ["Buffer {} is missing the GstOriginalBufferMeta, put originalbuffersave upstream in your pipeline", buffer]);
return Ok(gst::FlowSuccess::Ok);
};
let mut state = self.state.borrow_mut();
let meta_caps = &mut state.meta_caps;
if &meta_caps.caps != ometa.caps() {
if !self.src_pad.push_event(gst::event::Caps::new(ometa.caps())) {
return Err(gst::FlowError::NotNegotiated);
}
meta_caps.caps = ometa.caps().clone();
meta_caps.vinfo = gst_video::VideoInfo::from_caps(&meta_caps.caps).ok();
}
let mut outbuf = ometa.original().copy();
inbuf
.copy_into(
outbuf.make_mut(),
gst::BufferCopyFlags::TIMESTAMPS | gst::BufferCopyFlags::FLAGS,
..,
)
.unwrap();
for meta in inbuf.iter_meta::<gst::Meta>() {
if meta.api() == originalbuffermeta::OriginalBufferMeta::meta_api() {
continue;
}
if meta.has_tag::<gst::meta::tags::Memory>()
|| meta.has_tag::<gst::meta::tags::MemoryReference>()
{
continue;
}
if meta.has_tag::<gst_video::video_meta::tags::Size>() {
if let (Some(ref meta_vinfo), Some(ref sink_vinfo)) =
(&state.meta_caps.vinfo, &state.sinkpad_caps.vinfo)
{
if (meta_vinfo.width() != sink_vinfo.width()
|| meta_vinfo.height() != sink_vinfo.height())
&& meta
.transform(
outbuf.make_mut(),
&gst_video::video_meta::VideoMetaTransformScale::new(
sink_vinfo, meta_vinfo,
),
)
.is_ok()
{
continue;
}
}
}
let _ = meta.transform(
outbuf.make_mut(),
&gst::meta::MetaTransformCopy::new(false, ..),
);
}
if let Some(event) = state.sinkpad_segment.take() {
if !self.src_pad.push_event(event) {
return Err(gst::FlowError::Error);
}
}
self.src_pad.push(outbuf)
}
}

View file

@ -1,31 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
/**
* SECTION:element-originalbufferrestore
*
* See originalbuffersave for details
*/
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub struct OriginalBufferRestore(ObjectSubclass<imp::OriginalBufferRestore>) @extends gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"originalbufferrestore",
gst::Rank::NONE,
OriginalBufferRestore::static_type(),
)
}

View file

@ -1,205 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::prelude::*;
use gst::subclass::prelude::*;
use crate::originalbuffermeta::OriginalBufferMeta;
pub struct OriginalBufferSave {
src_pad: gst::Pad,
sink_pad: gst::Pad,
}
use once_cell::sync::Lazy;
#[cfg(unused_code)]
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"originalbuffersave",
gst::DebugColorFlags::empty(),
Some("Save Original buffer as meta"),
)
});
#[glib::object_subclass]
impl ObjectSubclass for OriginalBufferSave {
const NAME: &'static str = "GstOriginalBufferSave";
type Type = super::OriginalBufferSave;
type ParentType = gst::Element;
fn with_class(klass: &Self::Class) -> Self {
let sink_templ = klass.pad_template("sink").unwrap();
let src_templ = klass.pad_template("src").unwrap();
let sink_pad = gst::Pad::builder_from_template(&sink_templ)
.chain_function(|pad, parent, buffer| {
OriginalBufferSave::catch_panic_pad_function(
parent,
|| Err(gst::FlowError::Error),
|obj| obj.sink_chain(pad, buffer),
)
})
.query_function(|pad, parent, query| {
OriginalBufferSave::catch_panic_pad_function(
parent,
|| false,
|obj| obj.sink_query(pad, parent, query),
)
})
.flags(gst::PadFlags::PROXY_CAPS | gst::PadFlags::PROXY_ALLOCATION)
.build();
let src_pad = gst::Pad::builder_from_template(&src_templ)
.event_function(|pad, parent, event| {
OriginalBufferSave::catch_panic_pad_function(
parent,
|| false,
|obj| obj.src_event(pad, parent, event),
)
})
.build();
Self { src_pad, sink_pad }
}
}
impl ObjectImpl for OriginalBufferSave {
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
obj.add_pad(&self.sink_pad).unwrap();
obj.add_pad(&self.src_pad).unwrap();
}
}
impl GstObjectImpl for OriginalBufferSave {}
impl ElementImpl for OriginalBufferSave {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"Original Buffer Save",
"Generic",
"Saves a reference to the buffer in a meta",
"Olivier Crête <olivier.crete@collabora.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let caps = gst::Caps::new_any();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl OriginalBufferSave {
fn forward_query(&self, query: gst::Query) -> Option<gst::Query> {
let mut s = gst::Structure::new_empty("gst-original-buffer-forward-query");
s.set("query", query);
let mut query = gst::query::Custom::new(s);
if self.src_pad.peer_query(&mut query) {
let s = query.structure_mut();
if let (Ok(true), Ok(q)) = (s.get("result"), s.get::<gst::Query>("query")) {
Some(q)
} else {
None
}
} else {
None
}
}
fn sink_chain(
&self,
pad: &gst::Pad,
inbuf: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut buf = inbuf.copy();
let caps = pad.current_caps();
if let Some(mut meta) = buf.make_mut().meta_mut::<OriginalBufferMeta>() {
meta.replace(inbuf, caps);
} else {
OriginalBufferMeta::add(buf.make_mut(), inbuf, caps);
}
self.src_pad.push(buf)
}
fn sink_query(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
query: &mut gst::QueryRef,
) -> bool {
let ret = gst::Pad::query_default(pad, parent, query);
if !ret {
return ret;
}
if let gst::QueryViewMut::Caps(q) = query.view_mut() {
if let Some(caps) = q.result_owned() {
let forwarding_q = gst::query::Caps::new(Some(&caps)).into();
if let Some(forwarding_q) = self.forward_query(forwarding_q) {
if let gst::QueryView::Caps(c) = forwarding_q.view() {
let res = c
.result_owned()
.map(|c| c.intersect_with_mode(&caps, gst::CapsIntersectMode::First));
q.set_result(&res);
}
}
}
}
// We should also do allocation queries, but that requires supporting the same
// intersection semantics as gsttee, which should be in a helper function.
true
}
fn src_event(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
event: gst::Event,
) -> bool {
let event = if event.has_name("gst-original-buffer-forward-upstream-event") {
event.structure().unwrap().get("event").unwrap()
} else {
event
};
gst::Pad::event_default(pad, parent, event)
}
}

View file

@ -1,41 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
/**
* SECTION:element-originalbuffersave
*
* GStreamer elements to store the original buffer and restore it later
*
* In many analysis scenario (for example machine learning), it is desirable to
* use a pre-processed buffer, for example by lowering the resolution, but we may
* want to take the output of this analysis, and apply it to the original buffer.
*
* These elements do just this, the typical usage would be a pipeline like:
*
* `... ! originalbuffersave ! videoconvertscale ! video/x-raw, width=100, height=100 ! analysiselement ! originalbufferrestore ! ...`
*
* The originalbufferrestore element will "restore" the buffer that was entered to the "save" element, but will keep any metadata that was added later.
*/
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub struct OriginalBufferSave(ObjectSubclass<imp::OriginalBufferSave>) @extends gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"originalbuffersave",
gst::Rank::NONE,
OriginalBufferSave::static_type(),
)
}

View file

@ -500,7 +500,7 @@ impl Decrypter {
gst::debug!(CAT, obj: pad, "Stream Block index: {}", chunk_index);
let pull_offset = offset - (chunk_index * block_size as u64);
assert!(pull_offset <= u32::MAX as u64);
assert!(pull_offset <= std::u32::MAX as u64);
let pull_offset = pull_offset as u32;
let pulled_buffer =

View file

@ -30,6 +30,7 @@ use once_cell::sync::Lazy;
use std::sync::Mutex;
use std::time::Duration;
use std::u32;
use crate::runtime::prelude::*;
use crate::runtime::{Context, PadSrc, Task, TaskState};

View file

@ -26,6 +26,7 @@ use once_cell::sync::Lazy;
use std::collections::VecDeque;
use std::sync::Arc;
use std::sync::Mutex as StdMutex;
use std::u32;
static DATA_QUEUE_CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(

View file

@ -30,6 +30,7 @@ use once_cell::sync::Lazy;
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
use std::time::Duration;
use std::u32;
use crate::runtime::prelude::*;
use crate::runtime::{self, PadSink, PadSrc};
@ -416,8 +417,8 @@ impl ObjectImpl for InputSelector {
let pads = self.pads.lock().unwrap();
let mut old_pad = None;
if let Some(ref pad) = pad {
if pads.sink_pads.contains_key(pad) {
old_pad.clone_from(&state.active_sinkpad);
if pads.sink_pads.get(pad).is_some() {
old_pad = state.active_sinkpad.clone();
state.active_sinkpad = Some(pad.clone());
state.switched_pad = true;
}

View file

@ -266,7 +266,7 @@ impl SinkHandler {
inner.gap_packets.insert(GapPacket::new(buffer));
if gap_packets_length > 0 {
let mut prev_gap_seq = u32::MAX;
let mut prev_gap_seq = std::u32::MAX;
let mut all_consecutive = true;
for gap_packet in inner.gap_packets.iter() {
@ -279,7 +279,7 @@ impl SinkHandler {
all_consecutive = gap_packet.pt == pt;
if prev_gap_seq == u32::MAX {
if prev_gap_seq == std::u32::MAX {
prev_gap_seq = gap_packet.seq as u32;
} else if gst_rtp::compare_seqnum(gap_packet.seq, prev_gap_seq as u16) != -1 {
all_consecutive = false;
@ -472,7 +472,7 @@ impl SinkHandler {
(Some(earliest_pts), Some(pts)) if pts < earliest_pts => true,
(Some(earliest_pts), Some(pts)) if pts == earliest_pts => state
.earliest_seqnum
.is_some_and(|earliest_seqnum| seq > earliest_seqnum),
.map_or(false, |earliest_seqnum| seq > earliest_seqnum),
_ => false,
};
@ -527,7 +527,7 @@ impl SinkHandler {
if let Some((next_wakeup, _)) = next_wakeup {
if let Some((previous_next_wakeup, ref abort_handle)) = state.wait_handle {
if previous_next_wakeup.is_none()
|| next_wakeup.is_some_and(|next| previous_next_wakeup.unwrap() > next)
|| next_wakeup.map_or(false, |next| previous_next_wakeup.unwrap() > next)
{
gst::debug!(
CAT,
@ -1166,7 +1166,7 @@ impl TaskImpl for JitterBufferTask {
context_wait,
);
if let Some((Some(next_wakeup), _)) = next_wakeup {
if now.is_some_and(|now| next_wakeup > now) {
if now.map_or(false, |now| next_wakeup > now) {
// Reschedule and wait a bit longer in the next iteration
return Ok(());
}

View file

@ -91,7 +91,7 @@ impl RTPJitterBufferItem {
r#type: 0,
dts: dts.into().into_glib(),
pts: pts.into().into_glib(),
seqnum: seqnum.map(|s| s as u32).unwrap_or(u32::MAX),
seqnum: seqnum.map(|s| s as u32).unwrap_or(std::u32::MAX),
count: 1,
rtptime,
},
@ -138,7 +138,7 @@ impl RTPJitterBufferItem {
pub fn seqnum(&self) -> Option<u16> {
unsafe {
let item = self.0.as_ref().expect("Invalid wrapper");
if item.as_ref().seqnum == u32::MAX {
if item.as_ref().seqnum == std::u32::MAX {
None
} else {
Some(item.as_ref().seqnum as u16)
@ -306,7 +306,7 @@ impl RTPJitterBuffer {
let pts = from_glib(pts.assume_init());
let seqnum = seqnum.assume_init();
let seqnum = if seqnum == u32::MAX {
let seqnum = if seqnum == std::u32::MAX {
None
} else {
Some(seqnum as u16)
@ -339,7 +339,7 @@ impl RTPJitterBuffer {
(None, None)
} else {
let seqnum = (*item).seqnum;
let seqnum = if seqnum == u32::MAX {
let seqnum = if seqnum == std::u32::MAX {
None
} else {
Some(seqnum as u16)

View file

@ -31,6 +31,7 @@ use std::collections::{HashMap, VecDeque};
use std::sync::{Arc, Weak};
use std::sync::{Mutex, MutexGuard};
use std::time::Duration;
use std::{u32, u64};
use crate::runtime::prelude::*;
use crate::runtime::{Context, PadSink, PadSinkWeak, PadSrc, PadSrcWeak, Task};

View file

@ -30,6 +30,7 @@ use once_cell::sync::Lazy;
use std::collections::VecDeque;
use std::sync::Mutex;
use std::time::Duration;
use std::{u32, u64};
use crate::runtime::prelude::*;
use crate::runtime::{Context, PadSink, PadSrc, Task};

View file

@ -57,7 +57,7 @@ const READ: usize = 0;
const WRITE: usize = 1;
thread_local! {
static CURRENT_REACTOR: RefCell<Option<Reactor>> = const { RefCell::new(None) };
static CURRENT_REACTOR: RefCell<Option<Reactor>> = RefCell::new(None);
}
#[derive(Debug)]

View file

@ -27,7 +27,7 @@ use super::{CallOnDrop, JoinHandle, Reactor};
use crate::runtime::RUNTIME_CAT;
thread_local! {
static CURRENT_SCHEDULER: RefCell<Option<HandleWeak>> = const { RefCell::new(None) };
static CURRENT_SCHEDULER: RefCell<Option<HandleWeak>> = RefCell::new(None);
}
#[derive(Debug)]
@ -301,7 +301,9 @@ impl Scheduler {
.borrow()
.as_ref()
.and_then(HandleWeak::upgrade)
.is_some_and(|cur| std::ptr::eq(self, Arc::as_ptr(&cur.0.scheduler)))
.map_or(false, |cur| {
std::ptr::eq(self, Arc::as_ptr(&cur.0.scheduler))
})
})
}
}

View file

@ -24,7 +24,7 @@ use super::CallOnDrop;
use crate::runtime::RUNTIME_CAT;
thread_local! {
static CURRENT_TASK_ID: Cell<Option<TaskId>> = const { Cell::new(None) };
static CURRENT_TASK_ID: Cell<Option<TaskId>> = Cell::new(None);
}
#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)]

View file

@ -31,6 +31,8 @@ use std::io;
use std::net::{IpAddr, SocketAddr, TcpStream};
use std::sync::Mutex;
use std::time::Duration;
use std::u16;
use std::u32;
use crate::runtime::prelude::*;
use crate::runtime::task;
@ -38,8 +40,6 @@ use crate::runtime::{Context, PadSrc, Task, TaskState};
use crate::runtime::Async;
use crate::socket::{Socket, SocketError, SocketRead};
use futures::channel::mpsc::{channel, Receiver, Sender};
use futures::pin_mut;
const DEFAULT_HOST: Option<&str> = Some("127.0.0.1");
const DEFAULT_PORT: i32 = 4953;
@ -48,11 +48,6 @@ const DEFAULT_BLOCKSIZE: u32 = 4096;
const DEFAULT_CONTEXT: &str = "";
const DEFAULT_CONTEXT_WAIT: Duration = Duration::ZERO;
#[derive(Debug, Default)]
struct State {
event_sender: Option<Sender<gst::Event>>,
}
#[derive(Debug, Clone)]
struct Settings {
host: Option<String>,
@ -171,16 +166,10 @@ struct TcpClientSrcTask {
socket: Option<Socket<TcpClientReader>>,
need_initial_events: bool,
need_segment: bool,
event_receiver: Receiver<gst::Event>,
}
impl TcpClientSrcTask {
fn new(
element: super::TcpClientSrc,
saddr: SocketAddr,
buffer_pool: gst::BufferPool,
event_receiver: Receiver<gst::Event>,
) -> Self {
fn new(element: super::TcpClientSrc, saddr: SocketAddr, buffer_pool: gst::BufferPool) -> Self {
TcpClientSrcTask {
element,
saddr,
@ -188,7 +177,6 @@ impl TcpClientSrcTask {
socket: None,
need_initial_events: true,
need_segment: true,
event_receiver,
}
}
@ -325,58 +313,34 @@ impl TaskImpl for TcpClientSrcTask {
fn try_next(&mut self) -> BoxFuture<'_, Result<gst::Buffer, gst::FlowError>> {
async move {
let event_fut = self.event_receiver.next().fuse();
let socket_fut = self.socket.as_mut().unwrap().try_next().fuse();
pin_mut!(event_fut);
pin_mut!(socket_fut);
futures::select! {
event_res = event_fut => match event_res {
Some(event) => {
gst::debug!(CAT, obj: self.element, "Handling element level event {event:?}");
match event.view() {
gst::EventView::Eos(_) => Err(gst::FlowError::Eos),
ev => {
gst::error!(CAT, obj: self.element, "Unexpected event {ev:?} on channel");
Err(gst::FlowError::Error)
}
self.socket
.as_mut()
.unwrap()
.try_next()
.await
.map(|(buffer, _saddr)| buffer)
.map_err(|err| {
gst::error!(CAT, obj: self.element, "Got error {:?}", err);
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {}", err]
);
}
}
None => {
gst::error!(CAT, obj: self.element, "Unexpected return on event channel");
Err(gst::FlowError::Error)
}
},
socket_res = socket_fut => match socket_res {
Ok((buffer, _saddr)) => Ok(buffer),
Err(err) => {
gst::error!(CAT, obj: self.element, "Got error {err:#}");
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {err}"]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {err}"]
);
}
}
Err(gst::FlowError::Error)
}
},
}
gst::FlowError::Error
})
}
.boxed()
}
@ -404,40 +368,6 @@ impl TaskImpl for TcpClientSrcTask {
}
.boxed()
}
fn handle_loop_error(&mut self, err: gst::FlowError) -> BoxFuture<'_, task::Trigger> {
async move {
match err {
gst::FlowError::Flushing => {
gst::debug!(CAT, obj: self.element, "Flushing");
task::Trigger::FlushStart
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj: self.element, "EOS");
self.element
.imp()
.src_pad
.push_event(gst::event::Eos::new())
.await;
task::Trigger::Stop
}
err => {
gst::error!(CAT, obj: self.element, "Got error {err}");
gst::element_error!(
&self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
task::Trigger::Error
}
}
}
.boxed()
}
}
pub struct TcpClientSrc {
@ -445,7 +375,6 @@ pub struct TcpClientSrc {
task: Task,
configured_caps: Mutex<Option<gst::Caps>>,
settings: Mutex<Settings>,
state: Mutex<State>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
@ -502,24 +431,18 @@ impl TcpClientSrc {
let saddr = SocketAddr::new(host, port as u16);
let (sender, receiver) = channel(1);
// Don't block on `prepare` as the socket connection takes time.
// This will be performed in the background and we'll block on
// `start` which will also ensure `prepare` completed successfully.
let fut = self
.task
.prepare(
TcpClientSrcTask::new(self.obj().clone(), saddr, buffer_pool, receiver),
TcpClientSrcTask::new(self.obj().clone(), saddr, buffer_pool),
context,
)
.check()?;
drop(fut);
let mut state = self.state.lock().unwrap();
state.event_sender = Some(sender);
drop(state);
gst::debug!(CAT, imp: self, "Preparing asynchronously");
Ok(())
@ -551,10 +474,6 @@ impl TcpClientSrc {
gst::debug!(CAT, imp: self, "Paused");
Ok(())
}
fn state(&self) -> TaskState {
self.task.state()
}
}
#[glib::object_subclass]
@ -572,7 +491,6 @@ impl ObjectSubclass for TcpClientSrc {
task: Task::default(),
configured_caps: Default::default(),
settings: Default::default(),
state: Default::default(),
}
}
}
@ -746,31 +664,4 @@ impl ElementImpl for TcpClientSrc {
Ok(success)
}
fn send_event(&self, event: gst::Event) -> bool {
use gst::EventView;
gst::debug!(CAT, imp: self, "Handling element level event {event:?}");
match event.view() {
EventView::Eos(_) => {
if self.state() != TaskState::Started {
if let Err(err) = self.start() {
gst::error!(CAT, imp: self, "Failed to start task thread {err:?}");
}
}
if self.state() == TaskState::Started {
let mut state = self.state.lock().unwrap();
if let Some(event_tx) = state.event_sender.as_mut() {
return event_tx.try_send(event.clone()).is_ok();
}
}
false
}
_ => self.parent_send_event(event),
}
}
}

View file

@ -37,6 +37,8 @@ use std::collections::BTreeSet;
use std::net::{IpAddr, Ipv4Addr, SocketAddr, UdpSocket};
use std::sync::{Arc, Mutex};
use std::time::Duration;
use std::u16;
use std::u8;
const DEFAULT_HOST: Option<&str> = Some("127.0.0.1");
const DEFAULT_PORT: i32 = 5004;

View file

@ -27,17 +27,17 @@ use gst_net::*;
use once_cell::sync::Lazy;
use std::i32;
use std::io;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, UdpSocket};
use std::sync::Mutex;
use std::time::Duration;
use std::u16;
use crate::runtime::prelude::*;
use crate::runtime::{task, Async, Context, PadSrc, Task, TaskState};
use crate::runtime::{Async, Context, PadSrc, Task};
use crate::socket::{wrap_socket, GioSocketWrapper, Socket, SocketError, SocketRead};
use futures::channel::mpsc::{channel, Receiver, Sender};
use futures::pin_mut;
const DEFAULT_ADDRESS: Option<&str> = Some("0.0.0.0");
const DEFAULT_PORT: i32 = 5004;
@ -50,11 +50,6 @@ const DEFAULT_CONTEXT: &str = "";
const DEFAULT_CONTEXT_WAIT: Duration = Duration::ZERO;
const DEFAULT_RETRIEVE_SENDER_ADDRESS: bool = true;
#[derive(Debug, Default)]
struct State {
event_sender: Option<Sender<gst::Event>>,
}
#[derive(Debug, Clone)]
struct Settings {
address: Option<String>,
@ -187,18 +182,16 @@ struct UdpSrcTask {
retrieve_sender_address: bool,
need_initial_events: bool,
need_segment: bool,
event_receiver: Receiver<gst::Event>,
}
impl UdpSrcTask {
fn new(element: super::UdpSrc, event_receiver: Receiver<gst::Event>) -> Self {
fn new(element: super::UdpSrc) -> Self {
UdpSrcTask {
element,
socket: None,
retrieve_sender_address: DEFAULT_RETRIEVE_SENDER_ADDRESS,
need_initial_events: true,
need_segment: true,
event_receiver,
}
}
}
@ -430,69 +423,44 @@ impl TaskImpl for UdpSrcTask {
fn try_next(&mut self) -> BoxFuture<'_, Result<gst::Buffer, gst::FlowError>> {
async move {
let event_fut = self.event_receiver.next().fuse();
let socket_fut = self.socket.as_mut().unwrap().try_next().fuse();
pin_mut!(event_fut);
pin_mut!(socket_fut);
futures::select! {
event_res = event_fut => match event_res {
Some(event) => {
gst::debug!(CAT, obj: self.element, "Handling element level event {event:?}");
match event.view() {
gst::EventView::Eos(_) => Err(gst::FlowError::Eos),
ev => {
gst::error!(CAT, obj: self.element, "Unexpected event {ev:?} on channel");
Err(gst::FlowError::Error)
}
self.socket
.as_mut()
.unwrap()
.try_next()
.await
.map(|(mut buffer, saddr)| {
if let Some(saddr) = saddr {
if self.retrieve_sender_address {
NetAddressMeta::add(
buffer.get_mut().unwrap(),
&gio::InetSocketAddress::from(saddr),
);
}
}
None => {
gst::error!(CAT, obj: self.element, "Unexpected return on event channel");
Err(gst::FlowError::Error)
}
},
socket_res = socket_fut => match socket_res {
Ok((mut buffer, saddr)) => {
if let Some(saddr) = saddr {
if self.retrieve_sender_address {
NetAddressMeta::add(
buffer.get_mut().unwrap(),
&gio::InetSocketAddress::from(saddr),
);
}
buffer
})
.map_err(|err| {
gst::error!(CAT, obj: self.element, "Got error {:?}", err);
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
}
Ok(buffer)
},
Err(err) => {
gst::error!(CAT, obj: self.element, "Got error {err:#}");
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {err}"]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {err}"]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {}", err]
);
}
Err(gst::FlowError::Error)
}
},
}
gst::FlowError::Error
})
}
.boxed()
}
@ -576,40 +544,6 @@ impl TaskImpl for UdpSrcTask {
}
.boxed()
}
fn handle_loop_error(&mut self, err: gst::FlowError) -> BoxFuture<'_, task::Trigger> {
async move {
match err {
gst::FlowError::Flushing => {
gst::debug!(CAT, obj: self.element, "Flushing");
task::Trigger::FlushStart
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj: self.element, "EOS");
self.element
.imp()
.src_pad
.push_event(gst::event::Eos::new())
.await;
task::Trigger::Stop
}
err => {
gst::error!(CAT, obj: self.element, "Got error {err}");
gst::element_error!(
&self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
task::Trigger::Error
}
}
}
.boxed()
}
}
pub struct UdpSrc {
@ -617,7 +551,6 @@ pub struct UdpSrc {
task: Task,
configured_caps: Mutex<Option<gst::Caps>>,
settings: Mutex<Settings>,
state: Mutex<State>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
@ -642,17 +575,11 @@ impl UdpSrc {
})?;
drop(settings);
let (sender, receiver) = channel(1);
*self.configured_caps.lock().unwrap() = None;
self.task
.prepare(UdpSrcTask::new(self.obj().clone(), receiver), context)
.prepare(UdpSrcTask::new(self.obj().clone()), context)
.block_on()?;
let mut state = self.state.lock().unwrap();
state.event_sender = Some(sender);
drop(state);
gst::debug!(CAT, imp: self, "Prepared");
Ok(())
@ -684,10 +611,6 @@ impl UdpSrc {
gst::debug!(CAT, imp: self, "Paused");
Ok(())
}
fn state(&self) -> TaskState {
self.task.state()
}
}
#[glib::object_subclass]
@ -705,7 +628,6 @@ impl ObjectSubclass for UdpSrc {
task: Task::default(),
configured_caps: Default::default(),
settings: Default::default(),
state: Default::default(),
}
}
}
@ -936,31 +858,4 @@ impl ElementImpl for UdpSrc {
Ok(success)
}
fn send_event(&self, event: gst::Event) -> bool {
use gst::EventView;
gst::debug!(CAT, imp: self, "Handling element level event {event:?}");
match event.view() {
EventView::Eos(_) => {
if self.state() != TaskState::Started {
if let Err(err) = self.start() {
gst::error!(CAT, imp: self, "Failed to start task thread {err:?}");
}
}
if self.state() == TaskState::Started {
let mut state = self.state.lock().unwrap();
if let Some(event_tx) = state.event_sender.as_mut() {
return event_tx.try_send(event.clone()).is_ok();
}
}
false
}
_ => self.parent_send_event(event),
}
}
}

View file

@ -609,8 +609,6 @@ fn premature_shutdown() {
}
#[test]
// FIXME: racy: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/250
#[ignore]
fn socket_play_null_play() {
use gio::{
prelude::SocketExt, InetAddress, InetSocketAddress, SocketFamily, SocketProtocol,

View file

@ -76,8 +76,6 @@ fn test_client_management() {
}
#[test]
// FIXME: racy: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/250
#[ignore]
fn test_chain() {
init();

View file

@ -1,7 +1,7 @@
project('gst-plugins-rs',
'rust',
'c',
version: '0.13.0-alpha.1',
version: '0.12.1',
meson_version : '>= 1.1')
# dependencies.py needs a toml parsing module
@ -86,7 +86,7 @@ if get_option('tests').allowed()
deps += [['gstreamer-check-1.0', 'gstreamer', 'gst_check_dep', 'gst_check']]
endif
if get_option('gtk4').allowed()
deps += [['gstreamer-gl-1.0', 'gst-plugins-base', 'gstgl_dep', 'gstgl', get_option('gtk4')]]
deps += [['gstreamer-gl-1.0', 'gst-plugins-base', 'gst_gl_dep', 'gstgl']]
endif
if get_option('threadshare').allowed() or get_option('rtsp').allowed()
deps += [['gstreamer-net-1.0', 'gstreamer', 'gst_net_dep', 'gst_net']]
@ -97,7 +97,7 @@ deps_cache += {'glib-2.0': glib_dep}
foreach d: deps
dep = dependency(d[0], version: gst_req,
fallback : [d[1], d[2]], required: d.get(4, true))
fallback : [d[1], d[2]])
set_variable(d[2], dep)
deps_cache += {d[0]: dep}
if dep.type_name() == 'internal'
@ -118,7 +118,6 @@ plugins = {
'spotify': {'library': 'libgstspotify'},
'file': {'library': 'libgstrsfile'},
'originalbuffer': {'library': 'libgstoriginalbuffer'},
# sodium can have an external dependency, see below
'threadshare': {
'library': 'libgstthreadshare',
@ -171,7 +170,6 @@ plugins = {
'library': 'libgsturiplaylistbin',
'examples': ['playlist'],
'features': ['clap'],
'gst-version': '>=1.23.90',
},
'cdg': {'library': 'libgstcdg'},
@ -185,7 +183,7 @@ plugins = {
},
'dav1d': {
'library': 'libgstdav1d',
'extra-deps': {'dav1d': ['>=1.3']},
'extra-deps': {'dav1d': ['>=1.0', '<1.3']},
},
'ffv1': {'library': 'libgstffv1'},
'flavors': {'library': 'libgstrsflv'},
@ -204,10 +202,34 @@ plugins = {
'library': 'libgstrsvideofx',
'extra-deps': {'cairo-gobject': []},
},
'gopbuffer': {'library': 'libgstgopbuffer'},
'quinn': {'library': 'libgstquinn'},
}
if get_option('examples').allowed()
plugins += {
'fallbackswitch': {
'library': 'libgstfallbackswitch',
'examples': ['gtk-fallbackswitch'],
'features': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
'livesync': {
'library': 'libgstlivesync',
'examples': ['gtk-livesync'],
'features': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
'togglerecord': {
'library': 'libgsttogglerecord',
'examples': ['gtk-recording'],
'features': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
}
else
plugins += {
'fallbackswitch': { 'library': 'libgstfallbackswitch'},
'livesync': { 'library': 'libgstlivesync'},
'togglerecord': { 'library': 'libgsttogglerecord'},
}
endif
# Won't build on platforms where it bundles the sources because of:
# https://github.com/qnighy/libwebp-sys2-rs/issues/12
# the fix is:
@ -262,8 +284,8 @@ endif
if get_option('gtk4').allowed()
gtk4_features = []
gl_winsys = gstgl_dep.get_variable('gl_winsys').split()
gl_platforms = gstgl_dep.get_variable('gl_platforms').split()
gl_winsys = gst_gl_dep.get_variable('gl_winsys').split()
gl_platforms = gst_gl_dep.get_variable('gl_platforms').split()
if 'wayland' in gl_winsys
gtk4_features += 'wayland'
endif
@ -279,61 +301,13 @@ if get_option('gtk4').allowed()
gtk4_features += 'winegl'
endif
endif
gst_allocators_dep = dependency('gstreamer-allocators-1.0', version: '>=1.24', required: false)
gtk_dep = dependency('gtk4', version: '>=4.6', required: get_option('gtk4'))
if gtk_dep.found()
if host_system == 'linux' and gtk_dep.version().version_compare('>=4.14') and \
gst_allocators_dep.found() and 'wayland' in gtk4_features
gtk4_features += 'dmabuf'
endif
if gtk_dep.version().version_compare('>=4.14')
gtk4_features += 'gtk_v4_14'
elif gtk_dep.version().version_compare('>=4.12')
gtk4_features += 'gtk_v4_12'
elif gtk_dep.version().version_compare('>=4.10')
gtk4_features += 'gtk_v4_10'
endif
plugins += {
'gtk4': {
'library': 'libgstgtk4',
'examples': ['gtksink'],
'extra-deps': {'gtk4': ['>=4.6']},
'features': gtk4_features,
},
}
endif
endif
examples_opt = get_option('examples')
if examples_opt.allowed() and 'gtk4' in plugins
plugins += {
'fallbackswitch': {
'library': 'libgstfallbackswitch',
'examples_features': {
'gtk-fallbackswitch': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
'gtk4': {
'library': 'libgstgtk4',
'examples': ['gtksink'],
'extra-deps': {'gtk4': ['>=4.6']},
'features': gtk4_features,
},
'livesync': {
'library': 'libgstlivesync',
'examples_features': {
'gtk-livesync': ['gtk', 'gio', 'gst-plugin-gtk4'],
}
},
'togglerecord': {
'library': 'libgsttogglerecord',
'examples_features': {
'gtk-recording': ['gtk', 'gio', 'gst-plugin-gtk4'],
}
},
}
else
plugins += {
'fallbackswitch': { 'library': 'libgstfallbackswitch'},
'livesync': { 'library': 'libgstlivesync'},
'togglerecord': { 'library': 'libgsttogglerecord'},
}
endif
@ -399,107 +373,51 @@ endif
foreach plugin_name, details: plugins
plugin_opt = get_variable(f'@plugin_name@_option', get_option(plugin_name))
if not plugin_opt.allowed()
debug(f'@plugin_name@ is disabled')
continue
endif
plugin_deps_found = true
# Check whether we have all needed deps
foreach dep_name, dep_ver: details.get('extra-deps', {})
if dep_ver.length() != 0
dep = dependency(dep_name, version: dep_ver, required: plugin_opt)
else
dep = dependency(dep_name, required: plugin_opt)
endif
deps_cache += {dep_name: dep}
if not dep.found()
if plugin_opt.allowed()
plugin_deps_found = true
foreach dep_name, dep_ver: details.get('extra-deps', {})
if dep_ver.length() != 0
dep_ver_msg = ' '.join(dep_ver)
debug(f'@plugin_name@ dependency @dep_name@ @dep_ver_msg@ not found, skipping')
dep = dependency(dep_name, version: dep_ver, required: plugin_opt)
else
debug(f'@plugin_name@ dependency @dep_name@ not found, skipping')
dep = dependency(dep_name, required: plugin_opt)
endif
plugin_deps_found = false
break
endif
endforeach
if not plugin_deps_found
continue
endif
# Validate gst-plugin features
plugin_features = details.get('features', [])
foreach feature: plugin_features
if feature.startswith('gst-plugin') and not packages.contains(feature)
msg = f'@plugin_name@ required feature @feature@ not found'
if plugin_opt.enabled()
error(msg)
endif
message(msg + ', skipping')
plugin_deps_found = false
break
endif
endforeach
if not plugin_deps_found
continue
endif
# Check if we have the required GStreamer version
if details.has_key('gst-version') and not \
deps_cache['gstreamer-1.0'].version().version_compare(details['gst-version'])
msg = '@0@ requires gstreamer version @1@'.format(plugin_name, details['gst-version'])
if plugin_opt.enabled()
error(msg)
endif
message(msg + ', skipping')
continue
endif
# Parse and enable examples
plugin_examples = details.get('examples', [])
foreach example: plugin_examples
examples += example
endforeach
plugin_examples_features = details.get('examples_features', {})
foreach example, examples_features: plugin_examples_features
example_deps_found = true
foreach feature: examples_features
if feature.startswith('gst-plugin') and not packages.contains(feature)
msg = f'@plugin_name@ example @example@ required feature @feature@ not found'
if plugin_opt.enabled() and examples_opt.enabled()
error(msg)
endif
message(msg + ', skipping')
example_deps_found = false
deps_cache += {dep_name: dep}
if not dep.found()
plugin_deps_found = false
break
endif
endforeach
features += examples_features
if example_deps_found
examples += example
plugin_features = details.get('features', [])
if plugin_deps_found
# Validate gst-plugin features
foreach feature: plugin_features
if feature.startswith('gst-plugin') and not packages.contains(feature)
plugin_deps_found = false
break
endif
endforeach
endif
endforeach
if plugin_deps_found
packages += f'gst-plugin-@plugin_name@'
features += plugin_features
extra_features = run_command('dependencies.py', meson.current_source_dir(), plugin_name,
'--feature', '--gst-version', gst_dep.version(), capture: true, check: true).stdout().strip()
if extra_features != ''
features += extra_features.split(',')
endif
packages += f'gst-plugin-@plugin_name@'
features += plugin_features
extra_features = run_command('dependencies.py', meson.current_source_dir(), plugin_name,
'--feature', '--gst-version', gst_dep.version(), capture: true, check: true).stdout().strip()
if extra_features != ''
features += extra_features.split(',')
endif
lib = details.get('library')
# No 'lib' suffix with MSVC
if cc.get_argument_syntax() == 'msvc'
lib = lib.substring(3)
endif
if default_library in ['shared', 'both']
output += [lib + '.' + ext_dynamic]
endif
if default_library in ['static', 'both']
output += [lib + '.' + ext_static]
lib = details.get('library')
# No 'lib' suffix with MSVC
if cc.get_argument_syntax() == 'msvc'
lib = lib.substring(3)
endif
if default_library in ['shared', 'both']
output += [lib + '.' + ext_dynamic]
endif
if default_library in ['static', 'both']
output += [lib + '.' + ext_static]
endif
endif
endif
endforeach
@ -571,16 +489,6 @@ foreach plugin : plugins
plugin_name = plugin_name.substring(3)
endif
plugin_display_name = plugin_name
if plugin_name.startswith('gst')
plugin_display_name = plugin_name.substring(3)
endif
if plugin_display_name in plugin_names
# When default_library=both plugins are duplicated.
continue
endif
plugin_names += plugin_display_name
option_name = plugin_name.substring(3)
if option_name.startswith('rs')
option_name = option_name.substring(2)
@ -625,7 +533,13 @@ foreach plugin : plugins
warning('Static plugin @0@ is known to fail. It will not be included in libgstreamer-full.'.format(plugin_name))
else
gst_plugins += dep
pc_files += [plugin_name + '.pc']
if plugin_name.startswith('gst')
plugin_names += [plugin_name.substring(3)]
else
plugin_names += [plugin_name]
endif
endif
endforeach

View file

@ -9,8 +9,6 @@ option('spotify', type: 'feature', value: 'auto', description: 'Build spotify pl
# generic
option('file', type: 'feature', value: 'auto', description: 'Build file plugin')
option('originalbuffer', type: 'feature', value: 'auto', description: 'Build originalbuffer plugin')
option('gopbuffer', type: 'feature', value: 'auto', description: 'Build gopbuffer plugin')
option('sodium', type: 'feature', value: 'auto', description: 'Build sodium plugin')
option('sodium-source', type: 'combo',
choices: ['system', 'built-in'], value: 'built-in',
@ -34,7 +32,6 @@ option('rtsp', type: 'feature', value: 'auto', description: 'Build rtsp plugin')
option('rtp', type: 'feature', value: 'auto', description: 'Build rtp plugin')
option('webrtc', type: 'feature', value: 'auto', yield: true, description: 'Build webrtc plugin')
option('webrtchttp', type: 'feature', value: 'auto', description: 'Build webrtchttp plugin')
option('quinn', type: 'feature', value: 'auto', description: 'Build quinn plugin')
# text
option('textahead', type: 'feature', value: 'auto', description: 'Build textahead plugin')

View file

@ -11,7 +11,6 @@
pub use byteorder::{BigEndian, LittleEndian, ReadBytesExt, WriteBytesExt};
use std::io;
#[allow(unused)]
pub trait ReadBytesExtShort: io::Read {
fn read_u16le(&mut self) -> io::Result<u16> {
self.read_u16::<LittleEndian>()
@ -77,7 +76,6 @@ pub trait ReadBytesExtShort: io::Read {
impl<T> ReadBytesExtShort for T where T: ReadBytesExt {}
#[allow(unused)]
pub trait WriteBytesExtShort: WriteBytesExt {
fn write_u16le(&mut self, n: u16) -> io::Result<()> {
self.write_u16::<LittleEndian>(n)

View file

@ -14,9 +14,8 @@ gst = { workspace = true, features = ["v1_18"] }
gst-base = { workspace = true, features = ["v1_18"] }
gst-audio = { workspace = true, features = ["v1_18"] }
gst-video = { workspace = true, features = ["v1_18"] }
gst-pbutils = { workspace = true, features = ["v1_20"] }
gst-pbutils = { workspace = true, features = ["v1_18"] }
once_cell.workspace = true
bitstream-io = "2.3"
[lib]
name = "gstfmp4"
@ -26,10 +25,9 @@ path = "src/lib.rs"
[dev-dependencies]
gst-app = { workspace = true, features = ["v1_18"] }
gst-check = { workspace = true, features = ["v1_18"] }
gst-pbutils = { workspace = true, features = ["v1_20"] }
m3u8-rs = "5.0"
chrono = "0.4.35"
dash-mpd = { version = "0.16", default-features = false }
chrono = "0.4"
dash-mpd = { version = "0.14", default-features = false }
quick-xml = { version = "0.31", features = ["serialize"] }
serde = "1"

View file

@ -86,7 +86,7 @@ fn main() -> Result<(), Error> {
drop(map);
// Remove the header from the buffer list
buffer_list.make_mut().remove(0..1);
buffer_list.make_mut().remove(0, 1);
// If the list is now empty then it only contained the media header and nothing
// else.
@ -179,18 +179,19 @@ fn main() -> Result<(), Error> {
// Write the whole segment timeline out here, compressing multiple segments with
// the same duration to a repeated segment.
let mut segments = vec![];
let mut write_segment = |start: gst::ClockTime, duration: u64, repeat: usize| {
let mut s = dash_mpd::S {
t: Some(start.mseconds()),
d: duration,
..Default::default()
};
if repeat > 0 {
s.r = Some(repeat as i64);
}
let mut write_segment =
|start: gst::ClockTime, duration: gst::ClockTime, repeat: usize| {
let mut s = dash_mpd::S {
t: Some(start.mseconds() as i64),
d: duration.mseconds() as i64,
..Default::default()
};
if repeat > 0 {
s.r = Some(repeat as i64);
}
segments.push(s);
};
segments.push(s);
};
let mut start = None;
let mut num_segments = 0;
@ -200,15 +201,15 @@ fn main() -> Result<(), Error> {
start = Some(segment.start_time);
}
if last_duration.is_none() {
last_duration = Some(segment.duration.mseconds());
last_duration = Some(segment.duration);
}
// If the duration of this segment is different from the previous one then we
// have to write out the segment now.
if last_duration != Some(segment.duration.mseconds()) {
if last_duration != Some(segment.duration) {
write_segment(start.unwrap(), last_duration.unwrap(), num_segments - 1);
start = Some(segment.start_time);
last_duration = Some(segment.duration.mseconds());
last_duration = Some(segment.duration);
num_segments = 1;
} else {
num_segments += 1;

View file

@ -153,7 +153,7 @@ fn trim_segments(state: &mut StreamState) {
// safe side
removal_time: segment
.date_time
.checked_add_signed(Duration::try_seconds(20).unwrap())
.checked_add_signed(Duration::seconds(20))
.unwrap(),
path: segment.path.clone(),
});
@ -267,7 +267,7 @@ fn setup_appsink(appsink: &gst_app::AppSink, name: &str, path: &Path, is_video:
drop(map);
// Remove the header from the buffer list
buffer_list.make_mut().remove(0..1);
buffer_list.make_mut().remove(0, 1);
// If the list is now empty then it only contained the media header and nothing
// else.

View file

@ -170,7 +170,7 @@ fn setup_appsink(appsink: &gst_app::AppSink, name: &str, path: &Path, is_video:
drop(map);
// Remove the header from the buffer list
buffer_list.make_mut().remove(0..1);
buffer_list.make_mut().remove(0, 1);
// If the list is now empty then it only contained the media header and nothing
// else.
@ -360,10 +360,6 @@ impl AudioStream {
.property("samplesperbuffer", 4410)
.property_from_str("wave", &self.wave)
.build()?;
let taginject = gst::ElementFactory::make("taginject")
.property_from_str("tags", &format!("language-code={}", self.lang))
.property_from_str("scope", "stream")
.build()?;
let raw_capsfilter = gst::ElementFactory::make("capsfilter")
.property(
"caps",
@ -378,23 +374,9 @@ impl AudioStream {
.build()?;
let appsink = gst_app::AppSink::builder().buffer_list(true).build();
pipeline.add_many([
&src,
&taginject,
&raw_capsfilter,
&enc,
&mux,
appsink.upcast_ref(),
])?;
pipeline.add_many([&src, &raw_capsfilter, &enc, &mux, appsink.upcast_ref()])?;
gst::Element::link_many([
&src,
&taginject,
&raw_capsfilter,
&enc,
&mux,
appsink.upcast_ref(),
])?;
gst::Element::link_many([&src, &raw_capsfilter, &enc, &mux, appsink.upcast_ref()])?;
probe_encoder(state, enc);
@ -434,7 +416,7 @@ fn main() -> Result<(), Error> {
},
AudioStream {
name: "audio_1".to_string(),
lang: "fra".to_string(),
lang: "fre".to_string(),
default: false,
wave: "white-noise".to_string(),
},

View file

@ -9,7 +9,6 @@
use gst::prelude::*;
use anyhow::{anyhow, bail, Context, Error};
use std::convert::TryFrom;
use super::Buffer;
@ -161,10 +160,6 @@ fn cmaf_brands_from_caps(caps: &gst::CapsRef, compatible_brands: &mut Vec<&'stat
"audio/mpeg" => {
compatible_brands.push(b"caac");
}
"video/x-av1" => {
compatible_brands.push(b"av01");
compatible_brands.push(b"cmf2");
}
"video/x-h265" => {
let width = s.get::<i32>("width").ok();
let height = s.get::<i32>("height").ok();
@ -609,8 +604,9 @@ fn write_tkhd(
// Volume
let s = stream.caps.structure(0).unwrap();
match s.name().as_str() {
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => v.extend((1u16 << 8).to_be_bytes()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
v.extend((1u16 << 8).to_be_bytes())
}
_ => v.extend(0u16.to_be_bytes()),
}
@ -704,6 +700,7 @@ fn write_tref(
fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
let lang = lang.borrow();
// TODO: Need to relax this once we get the language code from tags
assert!(lang.iter().all(u8::is_ascii_lowercase));
(((lang[0] as u16 - 0x60) & 0x1F) << 10)
@ -713,7 +710,7 @@ fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
fn write_mdhd(
v: &mut Vec<u8>,
cfg: &super::HeaderConfiguration,
_cfg: &super::HeaderConfiguration,
stream: &super::HeaderStream,
creation_time: u64,
) -> Result<(), Error> {
@ -727,11 +724,8 @@ fn write_mdhd(
v.extend(0u64.to_be_bytes());
// Language as ISO-639-2/T
if let Some(lang) = cfg.language_code {
v.extend(language_code(lang).to_be_bytes());
} else {
v.extend(language_code(b"und").to_be_bytes());
}
// TODO: get actual language from the tags
v.extend(language_code(b"und").to_be_bytes());
// Pre-defined
v.extend([0u8; 2]);
@ -751,8 +745,9 @@ fn write_hdlr(
let (handler_type, name) = match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => (b"vide", b"VideoHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => (b"soun", b"SoundHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
(b"soun", b"SoundHandler\0".as_slice())
}
"application/x-onvif-metadata" => (b"meta", b"MetadataHandler\0".as_slice()),
_ => unreachable!(),
};
@ -782,8 +777,7 @@ fn write_minf(
// Flags are always 1 for unspecified reasons
write_full_box(v, b"vmhd", FULL_BOX_VERSION_0, 1, |v| write_vmhd(v, cfg))?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => {
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_full_box(v, b"smhd", FULL_BOX_VERSION_0, FULL_BOX_FLAGS_NONE, |v| {
write_smhd(v, cfg)
})?
@ -892,8 +886,9 @@ fn write_stsd(
match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => write_visual_sample_entry(v, cfg, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => write_audio_sample_entry(v, cfg, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_audio_sample_entry(v, cfg, stream)?
}
"application/x-onvif-metadata" => write_xml_meta_data_sample_entry(v, cfg, stream)?,
_ => unreachable!(),
}
@ -1103,9 +1098,9 @@ fn write_visual_sample_entry(
"professional" => 2,
_ => unreachable!(),
};
// TODO: Use `gst_codec_utils_av1_get_seq_level_idx` when exposed in bindings
let level = av1_seq_level_idx(s.get::<&str>("level").ok());
let tier = av1_tier(s.get::<&str>("tier").ok());
let level = 1; // FIXME
let tier = 0; // FIXME
let (high_bitdepth, twelve_bit) =
match s.get::<u32>("bit-depth-luma").unwrap() {
8 => (false, false),
@ -1150,10 +1145,6 @@ fn write_visual_sample_entry(
v.extend_from_slice(&codec_data);
}
if let Some(extra_data) = &stream.extra_header_data {
// configOBUs
v.extend_from_slice(extra_data.as_slice());
}
Ok(())
})?;
}
@ -1262,44 +1253,6 @@ fn write_visual_sample_entry(
Ok(())
}
fn av1_seq_level_idx(level: Option<&str>) -> u8 {
match level {
Some("2.0") => 0,
Some("2.1") => 1,
Some("2.2") => 2,
Some("2.3") => 3,
Some("3.0") => 4,
Some("3.1") => 5,
Some("3.2") => 6,
Some("3.3") => 7,
Some("4.0") => 8,
Some("4.1") => 9,
Some("4.2") => 10,
Some("4.3") => 11,
Some("5.0") => 12,
Some("5.1") => 13,
Some("5.2") => 14,
Some("5.3") => 15,
Some("6.0") => 16,
Some("6.1") => 17,
Some("6.2") => 18,
Some("6.3") => 19,
Some("7.0") => 20,
Some("7.1") => 21,
Some("7.2") => 22,
Some("7.3") => 23,
_ => 1,
}
}
fn av1_tier(tier: Option<&str>) -> u8 {
match tier {
Some("main") => 0,
Some("high") => 1,
_ => 0,
}
}
fn write_audio_sample_entry(
v: &mut Vec<u8>,
_cfg: &super::HeaderConfiguration,
@ -1309,7 +1262,6 @@ fn write_audio_sample_entry(
let fourcc = match s.name().as_str() {
"audio/mpeg" => b"mp4a",
"audio/x-opus" => b"Opus",
"audio/x-flac" => b"fLaC",
"audio/x-alaw" => b"alaw",
"audio/x-mulaw" => b"ulaw",
"audio/x-adpcm" => {
@ -1328,10 +1280,6 @@ fn write_audio_sample_entry(
let bitrate = s.get::<i32>("bitrate").context("no ADPCM bitrate field")?;
(bitrate / 8000) as u16
}
"audio/x-flac" => with_flac_metadata(&stream.caps, |streaminfo, _| {
1 + (u16::from_be_bytes([streaminfo[16], streaminfo[17]]) >> 4 & 0b11111)
})
.context("FLAC metadata error")?,
_ => 16u16,
};
@ -1374,9 +1322,6 @@ fn write_audio_sample_entry(
"audio/x-opus" => {
write_dops(v, &stream.caps)?;
}
"audio/x-flac" => {
write_dfla(v, &stream.caps)?;
}
"audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
// Nothing to do here
}
@ -1571,35 +1516,6 @@ fn write_dops(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
})
}
fn with_flac_metadata<R>(
caps: &gst::Caps,
cb: impl FnOnce(&[u8], &[gst::glib::SendValue]) -> R,
) -> Result<R, Error> {
let caps = caps.structure(0).unwrap();
let header = caps.get::<gst::ArrayRef>("streamheader").unwrap();
let (streaminfo, remainder) = header.as_ref().split_first().unwrap();
let streaminfo = streaminfo.get::<&gst::BufferRef>().unwrap();
let streaminfo = streaminfo.map_readable().unwrap();
// 13 bytes for the Ogg/FLAC prefix and 38 for the streaminfo itself.
match <&[_; 13 + 38]>::try_from(streaminfo.as_slice()) {
Ok(i) if i.starts_with(b"\x7FFLAC\x01\x00") => Ok(cb(&i[13..], remainder)),
Ok(_) | Err(_) => bail!("Unknown streamheader format"),
}
}
fn write_dfla(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
write_full_box(v, b"dfLa", 0, 0, move |v| {
with_flac_metadata(caps, |streaminfo, remainder| {
v.extend(streaminfo);
for metadata in remainder {
let metadata = metadata.get::<&gst::BufferRef>().unwrap();
let metadata = metadata.map_readable().unwrap();
v.extend(&metadata[..]);
}
})
})
}
fn write_xml_meta_data_sample_entry(
v: &mut Vec<u8>,
_cfg: &super::HeaderConfiguration,

View file

@ -16,7 +16,6 @@ use std::collections::VecDeque;
use std::mem;
use std::sync::Mutex;
use crate::fmp4mux::obu::read_seq_header_obu_bytes;
use once_cell::sync::Lazy;
use super::boxes;
@ -206,8 +205,6 @@ struct Stream {
caps: gst::Caps,
/// Whether this stream is intra-only and has frame reordering.
delta_frames: DeltaFrames,
/// Whether this stream might have header frames without timestamps that should be ignored.
discard_header_buffers: bool,
/// Currently queued GOPs, including incomplete ones.
queued_gops: VecDeque<Gop>,
@ -225,8 +222,6 @@ struct Stream {
/// Mapping between running time and UTC time in ONVIF mode.
running_time_utc_time_mapping: Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
extra_header_data: Option<Vec<u8>>,
}
#[derive(Default)]
@ -253,8 +248,6 @@ struct State {
end_pts: Option<gst::ClockTime>,
/// Start DTS of the whole stream
start_dts: Option<gst::ClockTime>,
/// Language code from tags
language_code: Option<[u8; 3]>,
/// Start PTS of the current fragment
fragment_start_pts: Option<gst::ClockTime>,
@ -278,17 +271,11 @@ pub(crate) struct FMP4Mux {
impl FMP4Mux {
/// Checks if a buffer is valid according to the stream configuration.
fn check_buffer(buffer: &gst::BufferRef, stream: &Stream) -> Result<(), gst::FlowError> {
let Stream {
sinkpad,
delta_frames,
discard_header_buffers,
..
} = stream;
if *discard_header_buffers && buffer.flags().contains(gst::BufferFlags::HEADER) {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
fn check_buffer(
buffer: &gst::BufferRef,
sinkpad: &super::FMP4MuxPad,
delta_frames: super::DeltaFrames,
) -> Result<(), gst::FlowError> {
if delta_frames.requires_dts() && buffer.dts().is_none() {
gst::error!(CAT, obj: sinkpad, "Require DTS for video streams");
return Err(gst::FlowError::Error);
@ -327,10 +314,12 @@ impl FMP4Mux {
}
// Pop buffer here, it will be stored in the pre-queue after calculating its timestamps
let Some(mut buffer) = stream.sinkpad.pop_buffer() else {
return Ok(None);
let mut buffer = match stream.sinkpad.pop_buffer() {
None => return Ok(None),
Some(buffer) => buffer,
};
Self::check_buffer(&buffer, stream)?;
Self::check_buffer(&buffer, &stream.sinkpad, stream.delta_frames)?;
let segment = match stream.sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
@ -803,22 +792,6 @@ impl FMP4Mux {
stream.dts_offset.display(),
);
// If the stream is AV1, we need to parse the SequenceHeader OBU to include in the
// extra data of the 'av1C' box. It makes the stream playable in some browsers.
let s = stream.caps.structure(0).unwrap();
if !buffer.flags().contains(gst::BufferFlags::DELTA_UNIT)
&& s.name().as_str() == "video/x-av1"
{
let buf_map = buffer.map_readable().map_err(|_| {
gst::error!(CAT, obj: stream.sinkpad, "Failed to map buffer");
gst::FlowError::Error
})?;
stream.extra_header_data = read_seq_header_obu_bytes(buf_map.as_slice()).map_err(|_| {
gst::error!(CAT, obj: stream.sinkpad, "Failed to parse AV1 SequenceHeader OBU");
gst::FlowError::Error
})?;
}
let gop = Gop {
start_pts: pts,
start_dts: dts,
@ -1063,7 +1036,7 @@ impl FMP4Mux {
// previously drained a partial GOP because the GOP is ending too far after the
// planned fragment end.
if gop.start_pts > fragment_end_pts
&& !gop.buffers.first().is_some_and(|b| {
&& !gop.buffers.first().map_or(false, |b| {
b.buffer.flags().contains(gst::BufferFlags::DELTA_UNIT)
})
{
@ -1144,7 +1117,7 @@ impl FMP4Mux {
if gop.end_pts >= chunk_end_pts
// only if there's another GOP or at least one further buffer
&& (gop_idx > 0
|| last_pts.is_some_and(|last_pts| last_pts.saturating_sub(chunk_start_pts) > chunk_duration))
|| last_pts.map_or(false, |last_pts| last_pts.saturating_sub(chunk_start_pts) > chunk_duration))
{
gst::debug!(CAT, obj: stream.sinkpad, "Stream queued enough data for this chunk");
stream.chunk_filled = true;
@ -1303,7 +1276,7 @@ impl FMP4Mux {
};
let fku_time =
if current_position.is_some_and(|current_position| current_position > fku_time) {
if current_position.map_or(false, |current_position| current_position > fku_time) {
gst::warning!(
CAT,
obj: stream.sinkpad,
@ -1856,14 +1829,14 @@ impl FMP4Mux {
.iter()
.find(|s| {
!s.sinkpad.is_eos()
&& s.queued_gops.back().is_some_and(|gop| {
&& s.queued_gops.back().map_or(false, |gop| {
gop.start_pts <= fragment_start_pts + settings.fragment_duration
// In chunk mode we might've drained a partial GOP as a chunk after
// the fragment end if the keyframe came too late. The GOP now
// starts with a non-keyframe after the fragment end but is part of
// the fragment: the fragment is extended after the end. Allow this
// situation here.
|| gop.buffers.first().is_some_and(|b| {
|| gop.buffers.first().map_or(false, |b| {
b.buffer.flags().contains(gst::BufferFlags::DELTA_UNIT)
})
})
@ -1934,7 +1907,7 @@ impl FMP4Mux {
// If nothing was dequeued for the first stream then this is OK if we're at
// EOS or this stream simply has only buffers after this chunk: we just
// consider the next stream as first stream then.
let stream_after_chunk = stream.queued_gops.back().is_some_and(|gop| {
let stream_after_chunk = stream.queued_gops.back().map_or(false, |gop| {
gop.start_pts
>= if fragment_filled {
fragment_start_pts + settings.fragment_duration
@ -2214,7 +2187,7 @@ impl FMP4Mux {
};
let fku_time =
if current_position.is_some_and(|current_position| current_position > fku_time) {
if current_position.map_or(false, |current_position| current_position > fku_time) {
gst::warning!(
CAT,
obj: stream.sinkpad,
@ -2582,7 +2555,6 @@ impl FMP4Mux {
let s = caps.structure(0).unwrap();
let mut delta_frames = DeltaFrames::IntraOnly;
let mut discard_header_buffers = false;
match s.name().as_str() {
"video/x-h264" | "video/x-h265" => {
if !s.has_field_with_type("codec_data", gst::Buffer::static_type()) {
@ -2626,13 +2598,6 @@ impl FMP4Mux {
return Err(gst::FlowError::NotNegotiated);
}
}
"audio/x-flac" => {
discard_header_buffers = true;
if let Err(e) = s.get::<gst::ArrayRef>("streamheader") {
gst::error!(CAT, obj: pad, "Muxing FLAC into MP4 needs streamheader: {}", e);
return Err(gst::FlowError::NotNegotiated);
};
}
"audio/x-alaw" | "audio/x-mulaw" => (),
"audio/x-adpcm" => (),
"application/x-onvif-metadata" => (),
@ -2643,7 +2608,6 @@ impl FMP4Mux {
sinkpad: pad,
caps,
delta_frames,
discard_header_buffers,
pre_queue: VecDeque::new(),
queued_gops: VecDeque::new(),
fragment_filled: false,
@ -2651,7 +2615,6 @@ impl FMP4Mux {
dts_offset: None,
current_position: gst::ClockTime::ZERO,
running_time_utc_time_mapping: None,
extra_header_data: None,
});
}
@ -2719,7 +2682,6 @@ impl FMP4Mux {
trak_timescale: s.sinkpad.imp().settings.lock().unwrap().trak_timescale,
delta_frames: s.delta_frames,
caps: s.caps.clone(),
extra_header_data: s.extra_header_data.clone(),
})
.collect::<Vec<_>>();
@ -2730,7 +2692,6 @@ impl FMP4Mux {
streams,
write_mehd: settings.write_mehd,
duration: if at_eos { duration } else { None },
language_code: state.language_code,
start_utc_time: if variant == super::Variant::ONVIF {
state
.earliest_pts
@ -3164,22 +3125,8 @@ impl AggregatorImpl for FMP4Mux {
self.parent_sink_event(aggregator_pad, event)
}
EventView::Tag(ev) => {
if let Some(tag_value) = ev.tag().get::<gst::tags::LanguageCode>() {
let lang = tag_value.get();
gst::trace!(CAT, imp: self, "Received language code from tags: {:?}", lang);
// Language as ISO-639-2/T
if lang.len() == 3 && lang.chars().all(|c| c.is_ascii_lowercase()) {
let mut state = self.state.lock().unwrap();
let mut language_code: [u8; 3] = [0; 3];
for (out, c) in Iterator::zip(language_code.iter_mut(), lang.chars()) {
*out = c as u8;
}
state.language_code = Some(language_code);
}
}
EventView::Tag(_ev) => {
// TODO: Maybe store for putting into the headers of the next fragment?
self.parent_sink_event(aggregator_pad, event)
}
@ -3518,11 +3465,6 @@ impl ElementImpl for ISOFMP4Mux {
.field("channels", gst::IntRange::new(1i32, 8))
.field("rate", gst::IntRange::new(1, i32::MAX))
.build(),
gst::Structure::builder("audio/x-flac")
.field("framed", true)
.field("channels", gst::IntRange::<i32>::new(1, 8))
.field("rate", gst::IntRange::<i32>::new(1, 10 * u16::MAX as i32))
.build(),
]
.into_iter()
.collect::<gst::Caps>(),
@ -3594,19 +3536,6 @@ impl ElementImpl for CMAFMux {
.field("width", gst::IntRange::new(1, u16::MAX as i32))
.field("height", gst::IntRange::new(1, u16::MAX as i32))
.build(),
gst::Structure::builder("video/x-av1")
.field("stream-format", "obu-stream")
.field("alignment", "tu")
.field("profile", gst::List::new(["main", "high", "professional"]))
.field(
"chroma-format",
gst::List::new(["4:0:0", "4:2:0", "4:2:2", "4:4:4"]),
)
.field("bit-depth-luma", gst::List::new([8u32, 10u32, 12u32]))
.field("bit-depth-chroma", gst::List::new([8u32, 10u32, 12u32]))
.field("width", gst::IntRange::new(1, u16::MAX as i32))
.field("height", gst::IntRange::new(1, u16::MAX as i32))
.build(),
gst::Structure::builder("video/x-h265")
.field("stream-format", gst::List::new(["hvc1", "hev1"]))
.field("alignment", "au")

View file

@ -12,8 +12,6 @@ use gst::prelude::*;
mod boxes;
mod imp;
mod obu;
glib::wrapper! {
pub(crate) struct FMP4MuxPad(ObjectSubclass<imp::FMP4MuxPad>) @extends gst_base::AggregatorPad, gst::Pad, gst::Object;
}
@ -87,7 +85,6 @@ pub(crate) struct HeaderConfiguration {
write_mehd: bool,
duration: Option<gst::ClockTime>,
language_code: Option<[u8; 3]>,
/// Start UTC time in ONVIF mode.
/// Since Jan 1 1601 in 100ns units.
@ -104,9 +101,6 @@ pub(crate) struct HeaderStream {
/// Pre-defined trak timescale if not 0.
trak_timescale: u32,
// More data to be included in the fragmented stream header
extra_header_data: Option<Vec<u8>>,
}
#[derive(Debug)]

View file

@ -1,303 +0,0 @@
//
// Copyright (C) 2022 Vivienne Watermeier <vwatermeier@igalia.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(non_camel_case_types)]
use bitstream_io::{BigEndian, BitRead, BitReader, Endianness};
use std::io::{self, Cursor, Read, Seek, SeekFrom};
pub fn parse_leb128<R, E>(reader: &mut BitReader<R, E>) -> io::Result<(u32, u32)>
where
R: Read + Seek,
E: Endianness,
{
let mut value = 0;
let mut num_bytes = 0;
for i in 0..8 {
let byte = reader.read::<u32>(8)?;
value |= (byte & 0x7f) << (i * 7);
num_bytes += 1;
if byte & 0x80 == 0 {
break;
}
}
reader.byte_align();
Ok((value, num_bytes))
}
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
pub struct SizedObu {
pub obu_type: ObuType,
pub has_extension: bool,
/// If the OBU header is followed by a leb128 size field.
pub has_size_field: bool,
pub temporal_id: u8,
pub spatial_id: u8,
/// size of the OBU payload in bytes.
/// This may refer to different sizes in different contexts, not always
/// to the entire OBU payload as it is in the AV1 bitstream.
pub size: u32,
/// the number of bytes the leb128 size field will take up
/// when written with write_leb128().
/// This does not imply `has_size_field`, and does not necessarily match with
/// the length of the internal size field if present.
pub leb_size: u32,
pub header_len: u32,
/// indicates that only part of this OBU has been processed so far
pub is_fragment: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ObuType {
Reserved,
SequenceHeader,
TemporalDelimiter,
FrameHeader,
TileGroup,
Metadata,
Frame,
RedundantFrameHeader,
TileList,
Padding,
}
impl Default for ObuType {
fn default() -> Self {
Self::Reserved
}
}
impl SizedObu {
/// Parse an OBU header and size field. If the OBU is not expected to contain
/// a size field, but the size is known from external information,
/// parse as an `UnsizedObu` and use `to_sized`.
pub fn parse<R, E>(reader: &mut BitReader<R, E>) -> io::Result<Self>
where
R: Read + Seek,
E: Endianness,
{
// check the forbidden bit
if reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"forbidden bit in OBU header is set",
));
}
let obu_type = reader.read::<u8>(4)?.into();
let has_extension = reader.read_bit()?;
// require a size field
if !reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"expected a size field",
));
}
// ignore the reserved bit
let _ = reader.read_bit()?;
let (temporal_id, spatial_id) = if has_extension {
(reader.read::<u8>(3)?, reader.read::<u8>(2)?)
} else {
(0, 0)
};
reader.byte_align();
let (size, leb_size) = parse_leb128(reader)?;
Ok(Self {
obu_type,
has_extension,
has_size_field: true,
temporal_id,
spatial_id,
size,
leb_size,
header_len: has_extension as u32 + 1,
is_fragment: false,
})
}
/// The amount of bytes this OBU will take up, including the space needed for
/// its leb128 size field.
pub fn full_size(&self) -> u32 {
self.size + self.leb_size + self.header_len
}
}
pub fn read_seq_header_obu_bytes(data: &[u8]) -> io::Result<Option<Vec<u8>>> {
let mut cursor = Cursor::new(data);
while cursor.position() < data.len() as u64 {
let obu_start = cursor.position();
let Ok(obu) = SizedObu::parse(&mut BitReader::endian(&mut cursor, BigEndian)) else {
break;
};
// set reader to the beginning of the OBU
cursor.seek(SeekFrom::Start(obu_start))?;
if obu.obu_type != ObuType::SequenceHeader {
// Skip the full OBU
cursor.seek(SeekFrom::Current(obu.full_size() as i64))?;
continue;
};
// read the full OBU
let mut bytes = vec![0; obu.full_size() as usize];
cursor.read_exact(&mut bytes)?;
return Ok(Some(bytes));
}
Ok(None)
}
impl From<u8> for ObuType {
fn from(n: u8) -> Self {
assert!(n < 16);
match n {
1 => Self::SequenceHeader,
2 => Self::TemporalDelimiter,
3 => Self::FrameHeader,
4 => Self::TileGroup,
5 => Self::Metadata,
6 => Self::Frame,
7 => Self::RedundantFrameHeader,
8 => Self::TileList,
15 => Self::Padding,
_ => Self::Reserved,
}
}
}
impl From<ObuType> for u8 {
fn from(ty: ObuType) -> Self {
match ty {
ObuType::Reserved => 0,
ObuType::SequenceHeader => 1,
ObuType::TemporalDelimiter => 2,
ObuType::FrameHeader => 3,
ObuType::TileGroup => 4,
ObuType::Metadata => 5,
ObuType::Frame => 6,
ObuType::RedundantFrameHeader => 7,
ObuType::TileList => 8,
ObuType::Padding => 15,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use bitstream_io::{BigEndian, BitReader};
use once_cell::sync::Lazy;
use std::io::Cursor;
#[allow(clippy::type_complexity)]
static OBUS: Lazy<Vec<(SizedObu, Vec<u8>)>> = Lazy::new(|| {
vec![
(
SizedObu {
obu_type: ObuType::TemporalDelimiter,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 0,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0001_0010, 0b0000_0000],
),
(
SizedObu {
obu_type: ObuType::Padding,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 10,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0111_1010, 0b0000_1010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
),
(
SizedObu {
obu_type: ObuType::SequenceHeader,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0000_1110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
(
SizedObu {
obu_type: ObuType::Frame,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0011_0110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
]
});
#[test]
fn test_parse_rtp_obu() {
for (idx, (sized_obu, raw_bytes)) in (*OBUS).iter().enumerate() {
println!("running test {idx}...");
let mut reader = BitReader::endian(Cursor::new(&raw_bytes), BigEndian);
let obu_parsed = SizedObu::parse(&mut reader).unwrap();
assert_eq!(&obu_parsed, sized_obu);
if let Some(seq_header_obu_bytes) = read_seq_header_obu_bytes(raw_bytes).unwrap() {
println!("validation of sequence header obu read/write...");
assert_eq!(&seq_header_obu_bytes, raw_bytes);
}
}
}
#[test]
fn test_read_seq_header_from_bitstream() {
let mut bitstream = Vec::new();
let mut seq_header_bytes_raw = None;
for (obu, raw_bytes) in (*OBUS).iter() {
bitstream.extend(raw_bytes);
if obu.obu_type == ObuType::SequenceHeader {
seq_header_bytes_raw = Some(raw_bytes.clone());
}
}
let seq_header_obu_bytes = read_seq_header_obu_bytes(&bitstream).unwrap().unwrap();
assert_eq!(seq_header_obu_bytes, seq_header_bytes_raw.unwrap());
}
}

View file

@ -19,33 +19,6 @@ fn init() {
});
}
fn to_completion(pipeline: &gst::Pipeline) {
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in pipeline.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
fn test_buffer_flags_single_stream(cmaf: bool, set_dts: bool, caps: gst::Caps) {
let mut h = if cmaf {
gst_check::Harness::new("cmafmux")
@ -236,26 +209,6 @@ fn test_buffer_flags_single_vp9_stream_iso() {
test_buffer_flags_single_stream(false, false, caps);
}
#[test]
fn test_buffer_flags_single_av1_stream_cmaf() {
init();
let caps = gst::Caps::builder("video/x-av1")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("profile", "main")
.field("tier", "main")
.field("level", "4.1")
.field("chroma-format", "4:2:0")
.field("bit-depth-luma", 8u32)
.field("bit-depth-chroma", 8u32)
.field("colorimetry", "bt709")
.build();
test_buffer_flags_single_stream(true, false, caps);
}
#[test]
fn test_buffer_flags_multi_stream() {
init();
@ -2040,21 +1993,3 @@ fn test_chunking_single_stream_gops_after_fragment_end_after_next_chunk_end() {
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Eos);
}
#[test]
fn test_roundtrip_vp9_flac() {
init();
let pipeline = gst::parse::launch(
r#"
videotestsrc num-buffers=99 ! vp9enc ! vp9parse ! mux.
audiotestsrc num-buffers=149 ! flacenc ! flacparse ! mux.
isofmp4mux name=mux ! qtdemux name=demux
demux.audio_0 ! queue ! flacdec ! fakesink
demux.video_0 ! queue ! vp9dec ! fakesink
"#,
)
.unwrap();
let pipeline = pipeline.downcast().unwrap();
to_completion(&pipeline);
}

View file

@ -16,7 +16,6 @@ gst-audio = { workspace = true, features = ["v1_18"] }
gst-video = { workspace = true, features = ["v1_18"] }
gst-pbutils = { workspace = true, features = ["v1_18"] }
once_cell.workspace = true
bitstream-io = "2.3"
[lib]
name = "gstmp4"

View file

@ -9,7 +9,7 @@
use gst::prelude::*;
use anyhow::{anyhow, bail, Context, Error};
use std::convert::TryFrom;
use std::str::FromStr;
fn write_box<T, F: FnOnce(&mut Vec<u8>) -> Result<T, Error>>(
@ -56,31 +56,18 @@ fn write_full_box<T, F: FnOnce(&mut Vec<u8>) -> Result<T, Error>>(
}
/// Creates `ftyp` box
pub(super) fn create_ftyp(
variant: super::Variant,
content_caps: &[&gst::CapsRef],
) -> Result<gst::Buffer, Error> {
pub(super) fn create_ftyp(variant: super::Variant) -> Result<gst::Buffer, Error> {
let mut v = vec![];
let mut minor_version = 0u32;
let (brand, mut compatible_brands) = match variant {
let (brand, compatible_brands) = match variant {
super::Variant::ISO | super::Variant::ONVIF => (b"iso4", vec![b"mp41", b"mp42", b"isom"]),
};
for caps in content_caps {
let s = caps.structure(0).unwrap();
if let (super::Variant::ISO, "video/x-av1") = (variant, s.name().as_str()) {
minor_version = 1;
compatible_brands = vec![b"iso4", b"av01"];
break;
}
}
write_box(&mut v, b"ftyp", |v| {
// major brand
v.extend(brand);
// minor version
v.extend(minor_version.to_be_bytes());
v.extend(0u32.to_be_bytes());
// compatible brands
v.extend(compatible_brands.into_iter().flatten());
@ -395,8 +382,9 @@ fn write_tkhd(
// Volume
let s = stream.caps.structure(0).unwrap();
match s.name().as_str() {
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => v.extend((1u16 << 8).to_be_bytes()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
v.extend((1u16 << 8).to_be_bytes())
}
_ => v.extend(0u16.to_be_bytes()),
}
@ -472,6 +460,7 @@ fn write_mdia(
fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
let lang = lang.borrow();
// TODO: Need to relax this once we get the language code from tags
assert!(lang.iter().all(u8::is_ascii_lowercase));
(((lang[0] as u16 - 0x60) & 0x1F) << 10)
@ -481,7 +470,7 @@ fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
fn write_mdhd(
v: &mut Vec<u8>,
header: &super::Header,
_header: &super::Header,
stream: &super::Stream,
creation_time: u64,
) -> Result<(), Error> {
@ -504,11 +493,8 @@ fn write_mdhd(
v.extend(duration.to_be_bytes());
// Language as ISO-639-2/T
if let Some(lang) = header.language_code {
v.extend(language_code(lang).to_be_bytes());
} else {
v.extend(language_code(b"und").to_be_bytes());
}
// TODO: get actual language from the tags
v.extend(language_code(b"und").to_be_bytes());
// Pre-defined
v.extend([0u8; 2]);
@ -528,8 +514,9 @@ fn write_hdlr(
let (handler_type, name) = match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => (b"vide", b"VideoHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => (b"soun", b"SoundHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
(b"soun", b"SoundHandler\0".as_slice())
}
"application/x-onvif-metadata" => (b"meta", b"MetadataHandler\0".as_slice()),
_ => unreachable!(),
};
@ -559,8 +546,7 @@ fn write_minf(
// Flags are always 1 for unspecified reasons
write_full_box(v, b"vmhd", FULL_BOX_VERSION_0, 1, |v| write_vmhd(v, header))?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => {
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_full_box(v, b"smhd", FULL_BOX_VERSION_0, FULL_BOX_FLAGS_NONE, |v| {
write_smhd(v, header)
})?
@ -717,8 +703,9 @@ fn write_stsd(
match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => write_visual_sample_entry(v, header, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => write_audio_sample_entry(v, header, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_audio_sample_entry(v, header, stream)?
}
"application/x-onvif-metadata" => write_xml_meta_data_sample_entry(v, header, stream)?,
_ => unreachable!(),
}
@ -929,9 +916,8 @@ fn write_visual_sample_entry(
_ => unreachable!(),
};
// TODO: Use `gst_codec_utils_av1_get_seq_level_idx` when exposed in bindings
let level = av1_seq_level_idx(s.get::<&str>("level").ok());
let tier = av1_tier(s.get::<&str>("tier").ok());
let level = 1; // FIXME
let tier = 0; // FIXME
let (high_bitdepth, twelve_bit) =
match s.get::<u32>("bit-depth-luma").unwrap() {
8 => (false, false),
@ -976,10 +962,6 @@ fn write_visual_sample_entry(
v.extend_from_slice(&codec_data);
}
if let Some(extra_data) = &stream.extra_header_data {
// unsigned int(8) configOBUs[];
v.extend_from_slice(extra_data.as_slice());
}
Ok(())
})?;
}
@ -1088,44 +1070,6 @@ fn write_visual_sample_entry(
Ok(())
}
fn av1_seq_level_idx(level: Option<&str>) -> u8 {
match level {
Some("2.0") => 0,
Some("2.1") => 1,
Some("2.2") => 2,
Some("2.3") => 3,
Some("3.0") => 4,
Some("3.1") => 5,
Some("3.2") => 6,
Some("3.3") => 7,
Some("4.0") => 8,
Some("4.1") => 9,
Some("4.2") => 10,
Some("4.3") => 11,
Some("5.0") => 12,
Some("5.1") => 13,
Some("5.2") => 14,
Some("5.3") => 15,
Some("6.0") => 16,
Some("6.1") => 17,
Some("6.2") => 18,
Some("6.3") => 19,
Some("7.0") => 20,
Some("7.1") => 21,
Some("7.2") => 22,
Some("7.3") => 23,
_ => 1,
}
}
fn av1_tier(tier: Option<&str>) -> u8 {
match tier {
Some("main") => 0,
Some("high") => 1,
_ => 0,
}
}
fn write_audio_sample_entry(
v: &mut Vec<u8>,
_header: &super::Header,
@ -1135,7 +1079,6 @@ fn write_audio_sample_entry(
let fourcc = match s.name().as_str() {
"audio/mpeg" => b"mp4a",
"audio/x-opus" => b"Opus",
"audio/x-flac" => b"fLaC",
"audio/x-alaw" => b"alaw",
"audio/x-mulaw" => b"ulaw",
"audio/x-adpcm" => {
@ -1154,10 +1097,6 @@ fn write_audio_sample_entry(
let bitrate = s.get::<i32>("bitrate").context("no ADPCM bitrate field")?;
(bitrate / 8000) as u16
}
"audio/x-flac" => with_flac_metadata(&stream.caps, |streaminfo, _| {
1 + (u16::from_be_bytes([streaminfo[16], streaminfo[17]]) >> 4 & 0b11111)
})
.context("FLAC metadata error")?,
_ => 16u16,
};
@ -1200,9 +1139,6 @@ fn write_audio_sample_entry(
"audio/x-opus" => {
write_dops(v, &stream.caps)?;
}
"audio/x-flac" => {
write_dfla(v, &stream.caps)?;
}
"audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
// Nothing to do here
}
@ -1397,35 +1333,6 @@ fn write_dops(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
})
}
fn with_flac_metadata<R>(
caps: &gst::Caps,
cb: impl FnOnce(&[u8], &[gst::glib::SendValue]) -> R,
) -> Result<R, Error> {
let caps = caps.structure(0).unwrap();
let header = caps.get::<gst::ArrayRef>("streamheader").unwrap();
let (streaminfo, remainder) = header.as_ref().split_first().unwrap();
let streaminfo = streaminfo.get::<&gst::BufferRef>().unwrap();
let streaminfo = streaminfo.map_readable().unwrap();
// 13 bytes for the Ogg/FLAC prefix and 38 for the streaminfo itself.
match <&[_; 13 + 38]>::try_from(streaminfo.as_slice()) {
Ok(i) if i.starts_with(b"\x7FFLAC\x01\x00") => Ok(cb(&i[13..], remainder)),
Ok(_) | Err(_) => bail!("Unknown streamheader format"),
}
}
fn write_dfla(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
write_full_box(v, b"dfLa", 0, 0, move |v| {
with_flac_metadata(caps, |streaminfo, remainder| {
v.extend(streaminfo);
for metadata in remainder {
let metadata = metadata.get::<&gst::BufferRef>().unwrap();
let metadata = metadata.map_readable().unwrap();
v.extend(&metadata[..]);
}
})
})
}
fn write_xml_meta_data_sample_entry(
v: &mut Vec<u8>,
_header: &super::Header,

View file

@ -15,7 +15,6 @@ use gst_base::subclass::prelude::*;
use std::collections::VecDeque;
use std::sync::Mutex;
use crate::mp4mux::obu::read_seq_header_obu_bytes;
use once_cell::sync::Lazy;
use super::boxes;
@ -109,8 +108,6 @@ struct Stream {
caps: gst::Caps,
/// Whether this stream is intra-only and has frame reordering.
delta_frames: super::DeltaFrames,
/// Whether this stream might have header frames without timestamps that should be ignored.
discard_header_buffers: bool,
/// Already written out chunks with their samples for this stream
chunks: Vec<super::Chunk>,
@ -136,8 +133,6 @@ struct Stream {
/// In ONVIF mode, the mapping between running time and UTC time (UNIX)
running_time_utc_time_mapping: Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
extra_header_data: Option<Vec<u8>>,
}
#[derive(Default)]
@ -156,9 +151,6 @@ struct State {
/// Size of the `mdat` as written so far.
mdat_size: u64,
/// Language code from tags
language_code: Option<[u8; 3]>,
}
#[derive(Default)]
@ -173,12 +165,7 @@ impl MP4Mux {
buffer: &gst::BufferRef,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
discard_headers: bool,
) -> Result<(), gst::FlowError> {
if discard_headers && buffer.flags().contains(gst::BufferFlags::HEADER) {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
if delta_frames.requires_dts() && buffer.dts().is_none() {
gst::error!(CAT, obj: sinkpad, "Require DTS for video streams");
return Err(gst::FlowError::Error);
@ -201,7 +188,6 @@ impl MP4Mux {
&self,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
discard_headers: bool,
pre_queue: &mut VecDeque<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>,
running_time_utc_time_mapping: &Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
) -> Result<Option<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>, gst::FlowError> {
@ -209,10 +195,13 @@ impl MP4Mux {
return Ok(Some((segment.clone(), buffer.clone())));
}
let Some(mut buffer) = sinkpad.peek_buffer() else {
return Ok(None);
let mut buffer = match sinkpad.peek_buffer() {
None => return Ok(None),
Some(buffer) => buffer,
};
Self::check_buffer(&buffer, sinkpad, delta_frames, discard_headers)?;
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
let mut segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
None => {
@ -287,20 +276,19 @@ impl MP4Mux {
fn pop_buffer(
&self,
stream: &mut Stream,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
pre_queue: &mut VecDeque<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>,
running_time_utc_time_mapping: &mut Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
) -> Result<Option<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>, gst::FlowError> {
let Stream {
sinkpad, pre_queue, ..
} = stream;
// In ONVIF mode we need to get UTC times for each buffer and synchronize based on that.
// Queue up to 6s of data to get the first UTC time and then backdate.
if self.obj().class().as_ref().variant == super::Variant::ONVIF
&& stream.running_time_utc_time_mapping.is_none()
&& running_time_utc_time_mapping.is_none()
{
if let Some((last, first)) = Option::zip(pre_queue.back(), pre_queue.front()) {
// Existence of PTS/DTS checked below
let (last, first) = if stream.delta_frames.requires_dts() {
let (last, first) = if delta_frames.requires_dts() {
(
last.0.to_running_time_full(last.1.dts()).unwrap(),
first.0.to_running_time_full(first.1.dts()).unwrap(),
@ -324,20 +312,19 @@ impl MP4Mux {
}
}
let Some(buffer) = sinkpad.pop_buffer() else {
if sinkpad.is_eos() {
gst::error!(CAT, obj: sinkpad, "Got no UTC time before EOS");
return Err(gst::FlowError::Error);
} else {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
let buffer = match sinkpad.pop_buffer() {
None => {
if sinkpad.is_eos() {
gst::error!(CAT, obj: sinkpad, "Got no UTC time before EOS");
return Err(gst::FlowError::Error);
} else {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
}
Some(buffer) => buffer,
};
Self::check_buffer(
&buffer,
sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
)?;
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
let segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
@ -363,7 +350,7 @@ impl MP4Mux {
);
let mapping = (running_time, utc_time);
stream.running_time_utc_time_mapping = Some(mapping);
*running_time_utc_time_mapping = Some(mapping);
// Push the buffer onto the pre-queue and re-timestamp it and all other buffers
// based on the mapping above.
@ -404,7 +391,7 @@ impl MP4Mux {
// Fall through below and pop the first buffer finally
}
if let Some((segment, buffer)) = stream.pre_queue.pop_front() {
if let Some((segment, buffer)) = pre_queue.pop_front() {
return Ok(Some((segment, buffer)));
}
@ -413,26 +400,23 @@ impl MP4Mux {
// for calculating the duration to the previous buffer, and then put into the pre-queue
// - or this is the very first buffer and we just put it into the queue overselves above
if self.obj().class().as_ref().variant == super::Variant::ONVIF {
if stream.sinkpad.is_eos() {
if sinkpad.is_eos() {
return Ok(None);
}
unreachable!();
}
let Some(buffer) = stream.sinkpad.pop_buffer() else {
return Ok(None);
let buffer = match sinkpad.pop_buffer() {
None => return Ok(None),
Some(buffer) => buffer,
};
Self::check_buffer(
&buffer,
&stream.sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
)?;
let segment = match stream.sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
let segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
None => {
gst::error!(CAT, obj: stream.sinkpad, "Got buffer before segment");
gst::error!(CAT, obj: sinkpad, "Got buffer before segment");
return Err(gst::FlowError::Error);
}
};
@ -458,12 +442,6 @@ impl MP4Mux {
Some(PendingBuffer {
duration: Some(_), ..
}) => return Ok(()),
Some(PendingBuffer { ref buffer, .. })
if stream.discard_header_buffers
&& buffer.flags().contains(gst::BufferFlags::HEADER) =>
{
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
Some(PendingBuffer {
timestamp,
pts,
@ -471,15 +449,13 @@ impl MP4Mux {
ref mut duration,
..
}) => {
let peek_outcome = self.peek_buffer(
// Already have a pending buffer but no duration, so try to get that now
let (segment, buffer) = match self.peek_buffer(
&stream.sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
&mut stream.pre_queue,
&stream.running_time_utc_time_mapping,
)?;
// Already have a pending buffer but no duration, so try to get that now
let (segment, buffer) = match peek_outcome {
)? {
Some(res) => res,
None => {
if stream.sinkpad.is_eos() {
@ -551,28 +527,17 @@ impl MP4Mux {
*duration = Some(dur);
// If the stream is AV1, we need to parse the SequenceHeader OBU to include in the
// extra data of the 'av1C' box. It makes the stream playable in some browsers.
let s = stream.caps.structure(0).unwrap();
if !buffer.flags().contains(gst::BufferFlags::DELTA_UNIT)
&& s.name().as_str() == "video/x-av1"
{
let buf_map = buffer.map_readable().map_err(|_| {
gst::error!(CAT, obj: stream.sinkpad, "Failed to map buffer");
gst::FlowError::Error
})?;
stream.extra_header_data = read_seq_header_obu_bytes(buf_map.as_slice()).map_err(|_| {
gst::error!(CAT, obj: stream.sinkpad, "Failed to parse AV1 SequenceHeader OBU");
gst::FlowError::Error
})?;
}
return Ok(());
}
None => {
// Have no buffer queued at all yet
let (segment, buffer) = match self.pop_buffer(stream)? {
let (segment, buffer) = match self.pop_buffer(
&stream.sinkpad,
stream.delta_frames,
&mut stream.pre_queue,
&mut stream.running_time_utc_time_mapping,
)? {
Some(res) => res,
None => {
if stream.sinkpad.is_eos() {
@ -905,7 +870,6 @@ impl MP4Mux {
let s = caps.structure(0).unwrap();
let mut delta_frames = super::DeltaFrames::IntraOnly;
let mut discard_header_buffers = false;
match s.name().as_str() {
"video/x-h264" | "video/x-h265" => {
if !s.has_field_with_type("codec_data", gst::Buffer::static_type()) {
@ -949,13 +913,6 @@ impl MP4Mux {
return Err(gst::FlowError::NotNegotiated);
}
}
"audio/x-flac" => {
discard_header_buffers = true;
if let Err(e) = s.get::<gst::ArrayRef>("streamheader") {
gst::error!(CAT, obj: pad, "Muxing FLAC into MP4 needs streamheader: {}", e);
return Err(gst::FlowError::NotNegotiated);
};
}
"audio/x-alaw" | "audio/x-mulaw" => (),
"audio/x-adpcm" => (),
"application/x-onvif-metadata" => (),
@ -967,7 +924,6 @@ impl MP4Mux {
pre_queue: VecDeque::new(),
caps,
delta_frames,
discard_header_buffers,
chunks: Vec::new(),
pending_buffer: None,
queued_chunk_time: gst::ClockTime::ZERO,
@ -976,7 +932,6 @@ impl MP4Mux {
earliest_pts: None,
end_pts: None,
running_time_utc_time_mapping: None,
extra_header_data: None,
});
}
@ -1189,25 +1144,6 @@ impl AggregatorImpl for MP4Mux {
}
self.parent_sink_event_pre_queue(aggregator_pad, event)
}
EventView::Tag(ev) => {
if let Some(tag_value) = ev.tag().get::<gst::tags::LanguageCode>() {
let lang = tag_value.get();
gst::trace!(CAT, imp: self, "Received language code from tags: {:?}", lang);
// Language as ISO-639-2/T
if lang.len() == 3 && lang.chars().all(|c| c.is_ascii_lowercase()) {
let mut state = self.state.lock().unwrap();
let mut language_code: [u8; 3] = [0; 3];
for (out, c) in Iterator::zip(language_code.iter_mut(), lang.chars()) {
*out = c as u8;
}
state.language_code = Some(language_code);
}
}
self.parent_sink_event_pre_queue(aggregator_pad, event)
}
_ => self.parent_sink_event_pre_queue(aggregator_pad, event),
}
}
@ -1341,15 +1277,7 @@ impl AggregatorImpl for MP4Mux {
// ... and then create the ftyp box plus mdat box header so we can start outputting
// actual data
let ftyp = boxes::create_ftyp(
self.obj().class().as_ref().variant,
&state
.streams
.iter()
.map(|s| s.caps.as_ref())
.collect::<Vec<_>>(),
)
.map_err(|err| {
let ftyp = boxes::create_ftyp(self.obj().class().as_ref().variant).map_err(|err| {
gst::error!(CAT, imp: self, "Failed to create ftyp box: {err}");
gst::FlowError::Error
})?;
@ -1408,7 +1336,6 @@ impl AggregatorImpl for MP4Mux {
earliest_pts,
end_pts,
chunks: stream.chunks,
extra_header_data: stream.extra_header_data.clone(),
});
}
@ -1416,7 +1343,6 @@ impl AggregatorImpl for MP4Mux {
variant: self.obj().class().as_ref().variant,
movie_timescale: settings.movie_timescale,
streams,
language_code: state.language_code,
})
.map_err(|err| {
gst::error!(CAT, imp: self, "Failed to create moov box: {err}");
@ -1597,11 +1523,6 @@ impl ElementImpl for ISOMP4Mux {
.field("channels", gst::IntRange::new(1i32, 8))
.field("rate", gst::IntRange::new(1, i32::MAX))
.build(),
gst::Structure::builder("audio/x-flac")
.field("framed", true)
.field("channels", gst::IntRange::<i32>::new(1, 8))
.field("rate", gst::IntRange::<i32>::new(1, 10 * u16::MAX as i32))
.build(),
]
.into_iter()
.collect::<gst::Caps>(),

View file

@ -11,7 +11,6 @@ use gst::prelude::*;
mod boxes;
mod imp;
mod obu;
glib::wrapper! {
pub(crate) struct MP4MuxPad(ObjectSubclass<imp::MP4MuxPad>) @extends gst_base::AggregatorPad, gst::Pad, gst::Object;
@ -127,9 +126,6 @@ pub(crate) struct Stream {
/// All the chunks stored for this stream
chunks: Vec<Chunk>,
// More data to be included in the fragmented stream header
extra_header_data: Option<Vec<u8>>,
}
#[derive(Debug)]
@ -139,7 +135,6 @@ pub(crate) struct Header {
/// Pre-defined movie timescale if not 0.
movie_timescale: u32,
streams: Vec<Stream>,
language_code: Option<[u8; 3]>,
}
#[allow(clippy::upper_case_acronyms)]

View file

@ -1,303 +0,0 @@
//
// Copyright (C) 2022 Vivienne Watermeier <vwatermeier@igalia.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(non_camel_case_types)]
use bitstream_io::{BigEndian, BitRead, BitReader, Endianness};
use std::io::{self, Cursor, Read, Seek, SeekFrom};
pub fn parse_leb128<R, E>(reader: &mut BitReader<R, E>) -> io::Result<(u32, u32)>
where
R: Read + Seek,
E: Endianness,
{
let mut value = 0;
let mut num_bytes = 0;
for i in 0..8 {
let byte = reader.read::<u32>(8)?;
value |= (byte & 0x7f) << (i * 7);
num_bytes += 1;
if byte & 0x80 == 0 {
break;
}
}
reader.byte_align();
Ok((value, num_bytes))
}
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
pub struct SizedObu {
pub obu_type: ObuType,
pub has_extension: bool,
/// If the OBU header is followed by a leb128 size field.
pub has_size_field: bool,
pub temporal_id: u8,
pub spatial_id: u8,
/// size of the OBU payload in bytes.
/// This may refer to different sizes in different contexts, not always
/// to the entire OBU payload as it is in the AV1 bitstream.
pub size: u32,
/// the number of bytes the leb128 size field will take up
/// when written with write_leb128().
/// This does not imply `has_size_field`, and does not necessarily match with
/// the length of the internal size field if present.
pub leb_size: u32,
pub header_len: u32,
/// indicates that only part of this OBU has been processed so far
pub is_fragment: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ObuType {
Reserved,
SequenceHeader,
TemporalDelimiter,
FrameHeader,
TileGroup,
Metadata,
Frame,
RedundantFrameHeader,
TileList,
Padding,
}
impl Default for ObuType {
fn default() -> Self {
Self::Reserved
}
}
impl SizedObu {
/// Parse an OBU header and size field. If the OBU is not expected to contain
/// a size field, but the size is known from external information,
/// parse as an `UnsizedObu` and use `to_sized`.
pub fn parse<R, E>(reader: &mut BitReader<R, E>) -> io::Result<Self>
where
R: Read + Seek,
E: Endianness,
{
// check the forbidden bit
if reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"forbidden bit in OBU header is set",
));
}
let obu_type = reader.read::<u8>(4)?.into();
let has_extension = reader.read_bit()?;
// require a size field
if !reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"expected a size field",
));
}
// ignore the reserved bit
let _ = reader.read_bit()?;
let (temporal_id, spatial_id) = if has_extension {
(reader.read::<u8>(3)?, reader.read::<u8>(2)?)
} else {
(0, 0)
};
reader.byte_align();
let (size, leb_size) = parse_leb128(reader)?;
Ok(Self {
obu_type,
has_extension,
has_size_field: true,
temporal_id,
spatial_id,
size,
leb_size,
header_len: has_extension as u32 + 1,
is_fragment: false,
})
}
/// The amount of bytes this OBU will take up, including the space needed for
/// its leb128 size field.
pub fn full_size(&self) -> u32 {
self.size + self.leb_size + self.header_len
}
}
pub fn read_seq_header_obu_bytes(data: &[u8]) -> io::Result<Option<Vec<u8>>> {
let mut cursor = Cursor::new(data);
while cursor.position() < data.len() as u64 {
let obu_start = cursor.position();
let Ok(obu) = SizedObu::parse(&mut BitReader::endian(&mut cursor, BigEndian)) else {
break;
};
// set reader to the beginning of the OBU
cursor.seek(SeekFrom::Start(obu_start))?;
if obu.obu_type != ObuType::SequenceHeader {
// Skip the full OBU
cursor.seek(SeekFrom::Current(obu.full_size() as i64))?;
continue;
};
// read the full OBU
let mut bytes = vec![0; obu.full_size() as usize];
cursor.read_exact(&mut bytes)?;
return Ok(Some(bytes));
}
Ok(None)
}
impl From<u8> for ObuType {
fn from(n: u8) -> Self {
assert!(n < 16);
match n {
1 => Self::SequenceHeader,
2 => Self::TemporalDelimiter,
3 => Self::FrameHeader,
4 => Self::TileGroup,
5 => Self::Metadata,
6 => Self::Frame,
7 => Self::RedundantFrameHeader,
8 => Self::TileList,
15 => Self::Padding,
_ => Self::Reserved,
}
}
}
impl From<ObuType> for u8 {
fn from(ty: ObuType) -> Self {
match ty {
ObuType::Reserved => 0,
ObuType::SequenceHeader => 1,
ObuType::TemporalDelimiter => 2,
ObuType::FrameHeader => 3,
ObuType::TileGroup => 4,
ObuType::Metadata => 5,
ObuType::Frame => 6,
ObuType::RedundantFrameHeader => 7,
ObuType::TileList => 8,
ObuType::Padding => 15,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use bitstream_io::{BigEndian, BitReader};
use once_cell::sync::Lazy;
use std::io::Cursor;
#[allow(clippy::type_complexity)]
static OBUS: Lazy<Vec<(SizedObu, Vec<u8>)>> = Lazy::new(|| {
vec![
(
SizedObu {
obu_type: ObuType::TemporalDelimiter,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 0,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0001_0010, 0b0000_0000],
),
(
SizedObu {
obu_type: ObuType::Padding,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 10,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0111_1010, 0b0000_1010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
),
(
SizedObu {
obu_type: ObuType::SequenceHeader,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0000_1110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
(
SizedObu {
obu_type: ObuType::Frame,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0011_0110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
]
});
#[test]
fn test_parse_rtp_obu() {
for (idx, (sized_obu, raw_bytes)) in (*OBUS).iter().enumerate() {
println!("running test {idx}...");
let mut reader = BitReader::endian(Cursor::new(&raw_bytes), BigEndian);
let obu_parsed = SizedObu::parse(&mut reader).unwrap();
assert_eq!(&obu_parsed, sized_obu);
if let Some(seq_header_obu_bytes) = read_seq_header_obu_bytes(raw_bytes).unwrap() {
println!("validation of sequence header obu read/write...");
assert_eq!(&seq_header_obu_bytes, raw_bytes);
}
}
}
#[test]
fn test_read_seq_header_from_bitstream() {
let mut bitstream = Vec::new();
let mut seq_header_bytes_raw = None;
for (obu, raw_bytes) in (*OBUS).iter() {
bitstream.extend(raw_bytes);
if obu.obu_type == ObuType::SequenceHeader {
seq_header_bytes_raw = Some(raw_bytes.clone());
}
}
let seq_header_obu_bytes = read_seq_header_obu_bytes(&bitstream).unwrap().unwrap();
assert_eq!(seq_header_obu_bytes, seq_header_bytes_raw.unwrap());
}
}

View file

@ -7,8 +7,6 @@
// SPDX-License-Identifier: MPL-2.0
//
use std::path::Path;
use gst::prelude::*;
use gst_pbutils::prelude::*;
@ -22,57 +20,33 @@ fn init() {
});
}
struct Pipeline(gst::Pipeline);
impl std::ops::Deref for Pipeline {
type Target = gst::Pipeline;
#[test]
fn test_basic() {
init();
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Drop for Pipeline {
fn drop(&mut self) {
let _ = self.0.set_state(gst::State::Null);
}
}
struct Pipeline(gst::Pipeline);
impl std::ops::Deref for Pipeline {
type Target = gst::Pipeline;
impl Pipeline {
fn into_completion(self) {
self.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in self.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Drop for Pipeline {
fn drop(&mut self) {
let _ = self.0.set_state(gst::State::Null);
}
self.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
}
fn test_basic_with(video_enc: &str, audio_enc: &str, cb: impl FnOnce(&Path)) {
let Ok(pipeline) = gst::parse::launch(&format!(
"videotestsrc num-buffers=99 ! {video_enc} ! mux. \
audiotestsrc num-buffers=140 ! {audio_enc} ! mux. \
isomp4mux name=mux ! filesink name=sink"
)) else {
println!("could not build encoding pipeline");
return;
let pipeline = match gst::parse::launch(
"videotestsrc num-buffers=99 ! x264enc ! mux. \
audiotestsrc num-buffers=140 ! fdkaacenc ! mux. \
isomp4mux name=mux ! filesink name=sink \
",
) {
Ok(pipeline) => Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap()),
Err(_) => return,
};
let pipeline = Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap());
let dir = tempfile::TempDir::new().unwrap();
let mut location = dir.path().to_owned();
@ -80,95 +54,73 @@ fn test_basic_with(video_enc: &str, audio_enc: &str, cb: impl FnOnce(&Path)) {
let sink = pipeline.by_name("sink").unwrap();
sink.set_property("location", location.to_str().expect("Non-UTF8 filename"));
pipeline.into_completion();
cb(&location)
}
#[test]
fn test_basic_x264_aac() {
init();
test_basic_with("x264enc", "fdkaacenc", |location| {
let discoverer = gst_pbutils::Discoverer::new(gst::ClockTime::from_seconds(5))
.expect("Failed to create discoverer");
let info = discoverer
.discover_uri(
url::Url::from_file_path(location)
.expect("Failed to convert filename to URL")
.as_str(),
)
.expect("Failed to discover MP4 file");
assert_eq!(info.duration(), Some(gst::ClockTime::from_mseconds(3_300)));
let audio_streams = info.audio_streams();
assert_eq!(audio_streams.len(), 1);
let audio_stream = &audio_streams[0];
assert_eq!(audio_stream.channels(), 1);
assert_eq!(audio_stream.sample_rate(), 44_100);
let caps = audio_stream.caps().unwrap();
assert!(
caps.can_intersect(
&gst::Caps::builder("audio/mpeg")
.any_features()
.field("mpegversion", 4i32)
.build()
),
"Unexpected audio caps {caps:?}"
);
let video_streams = info.video_streams();
assert_eq!(video_streams.len(), 1);
let video_stream = &video_streams[0];
assert_eq!(video_stream.width(), 320);
assert_eq!(video_stream.height(), 240);
assert_eq!(video_stream.framerate(), gst::Fraction::new(30, 1));
assert_eq!(video_stream.par(), gst::Fraction::new(1, 1));
assert!(!video_stream.is_interlaced());
let caps = video_stream.caps().unwrap();
assert!(
caps.can_intersect(&gst::Caps::builder("video/x-h264").any_features().build()),
"Unexpected video caps {caps:?}"
);
})
}
#[test]
fn test_roundtrip_vp9_flac() {
init();
test_basic_with("vp9enc ! vp9parse", "flacenc ! flacparse", |location| {
let Ok(pipeline) = gst::parse::launch(
"filesrc name=src ! qtdemux name=demux \
demux.audio_0 ! queue ! flacdec ! fakesink \
demux.video_0 ! queue ! vp9dec ! fakesink",
) else {
panic!("could not build decoding pipeline")
};
let pipeline = Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap());
pipeline
.by_name("src")
.unwrap()
.set_property("location", location.display().to_string());
pipeline.into_completion();
})
}
#[test]
fn test_roundtrip_av1_aac() {
init();
test_basic_with("av1enc ! av1parse", "avenc_aac ! aacparse", |location| {
let Ok(pipeline) = gst::parse::launch(
"filesrc name=src ! qtdemux name=demux \
demux.audio_0 ! queue ! avdec_aac ! fakesink \
demux.video_0 ! queue ! av1dec ! fakesink",
) else {
panic!("could not build decoding pipeline")
};
let pipeline = Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap());
pipeline
.by_name("src")
.unwrap()
.set_property("location", location.display().to_string());
pipeline.into_completion();
})
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in pipeline.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
drop(pipeline);
let discoverer = gst_pbutils::Discoverer::new(gst::ClockTime::from_seconds(5))
.expect("Failed to create discoverer");
let info = discoverer
.discover_uri(
url::Url::from_file_path(&location)
.expect("Failed to convert filename to URL")
.as_str(),
)
.expect("Failed to discover MP4 file");
assert_eq!(info.duration(), Some(gst::ClockTime::from_mseconds(3_300)));
let audio_streams = info.audio_streams();
assert_eq!(audio_streams.len(), 1);
let audio_stream = &audio_streams[0];
assert_eq!(audio_stream.channels(), 1);
assert_eq!(audio_stream.sample_rate(), 44_100);
let caps = audio_stream.caps().unwrap();
assert!(
caps.can_intersect(
&gst::Caps::builder("audio/mpeg")
.any_features()
.field("mpegversion", 4i32)
.build()
),
"Unexpected audio caps {caps:?}"
);
let video_streams = info.video_streams();
assert_eq!(video_streams.len(), 1);
let video_stream = &video_streams[0];
assert_eq!(video_stream.width(), 320);
assert_eq!(video_stream.height(), 240);
assert_eq!(video_stream.framerate(), gst::Fraction::new(30, 1));
assert_eq!(video_stream.par(), gst::Fraction::new(1, 1));
assert!(!video_stream.is_interlaced());
let caps = video_stream.caps().unwrap();
assert!(
caps.can_intersect(&gst::Caps::builder("video/x-h264").any_features().build()),
"Unexpected video caps {caps:?}"
);
}

View file

@ -12,7 +12,7 @@ rust-version.workspace = true
[dependencies]
async-stream = "0.3.4"
base32 = "0.5"
base32 = "0.4"
aws-config = "1.0"
aws-sdk-s3 = "1.0"
aws-sdk-transcribestreaming = "1.0"
@ -35,7 +35,7 @@ once_cell.workspace = true
[dev-dependencies]
chrono = { version = "0.4", features = [ "alloc" ] }
env_logger = "0.11"
env_logger = "0.10"
gst-check = { workspace = true, features = ["v1_18"] }
rand = "0.8"
test-with = { version = "0.12", default-features = false }

View file

@ -18,7 +18,7 @@ mod s3hlssink;
mod s3sink;
mod s3src;
mod s3url;
pub mod s3utils;
mod s3utils;
mod transcribe_parse;
mod transcriber;

View file

@ -8,6 +8,7 @@
//
// SPDX-License-Identifier: MPL-2.0
use futures::future;
use once_cell::sync::Lazy;
use std::io::Write;
use std::str::FromStr;
@ -38,7 +39,6 @@ const S3_CHANNEL_SIZE: usize = 32;
const S3_ACL_DEFAULT: ObjectCannedAcl = ObjectCannedAcl::Private;
const DEFAULT_RETRY_ATTEMPTS: u32 = 5;
const DEFAULT_TIMEOUT_IN_MSECS: u64 = 15000;
const DEFAULT_FORCE_PATH_STYLE: bool = false;
struct Settings {
access_key: Option<String>,
@ -57,7 +57,6 @@ struct Settings {
video_sink: bool,
config: Option<SdkConfig>,
endpoint_uri: Option<String>,
force_path_style: bool,
}
impl Default for Settings {
@ -80,7 +79,6 @@ impl Default for Settings {
video_sink: false,
config: None,
endpoint_uri: None,
force_path_style: DEFAULT_FORCE_PATH_STYLE,
}
}
}
@ -89,7 +87,7 @@ pub struct S3HlsSink {
settings: Mutex<Settings>,
state: Mutex<State>,
hlssink: gst::Element,
canceller: Mutex<s3utils::Canceller>,
canceller: Mutex<Option<future::AbortHandle>>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
@ -278,9 +276,10 @@ impl S3HlsSink {
gst::error!(
CAT,
imp: self,
"Put object request for S3 key {} of data length {} failed with error {err}",
"Put object request for S3 key {} of data length {} failed with error {:?}",
s3_key,
s3_data_len,
err,
);
element_imp_error!(
self,
@ -321,8 +320,9 @@ impl S3HlsSink {
gst::error!(
CAT,
imp: self,
"Delete object request for S3 key {} failed with error {err}",
"Delete object request for S3 key {} failed with error {:?}",
s3_key,
err
);
element_imp_error!(
self,
@ -378,7 +378,6 @@ impl S3HlsSink {
let sdk_config = settings.config.as_ref().expect("SDK config must be set");
let config_builder = config::Builder::from(sdk_config)
.force_path_style(settings.force_path_style)
.region(settings.s3_region.clone())
.retry_config(RetryConfig::standard().with_max_attempts(settings.retry_attempts));
@ -458,7 +457,7 @@ impl ObjectSubclass for S3HlsSink {
settings: Mutex::new(Settings::default()),
state: Mutex::new(State::Stopped),
hlssink,
canceller: Mutex::new(s3utils::Canceller::default()),
canceller: Mutex::new(None),
}
}
}
@ -532,11 +531,6 @@ impl ObjectImpl for S3HlsSink {
.blurb("The S3 endpoint URI to use")
.mutable_ready()
.build(),
glib::ParamSpecBoolean::builder("force-path-style")
.nick("Force path style")
.blurb("Force client to use path-style addressing for buckets")
.default_value(DEFAULT_FORCE_PATH_STYLE)
.build(),
]
});
@ -594,9 +588,6 @@ impl ObjectImpl for S3HlsSink {
.get::<Option<String>>()
.expect("type checked upstream");
}
"force-path-style" => {
settings.force_path_style = value.get::<bool>().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
@ -617,7 +608,6 @@ impl ObjectImpl for S3HlsSink {
"request-timeout" => (settings.request_timeout.as_millis() as u64).to_value(),
"stats" => self.create_stats().to_value(),
"endpoint-uri" => settings.endpoint_uri.to_value(),
"force-path-style" => settings.force_path_style.to_value(),
_ => unimplemented!(),
}
}
@ -652,7 +642,10 @@ impl ObjectImpl for S3HlsSink {
self.hlssink.connect("get-playlist-stream", false, {
let self_weak = self.downgrade();
move |args| -> Option<glib::Value> {
let self_ = self_weak.upgrade()?;
let Some(self_) = self_weak.upgrade() else {
return None;
};
let s3client = self_.s3client_from_settings();
let settings = self_.settings.lock().unwrap();
let mut state = self_.state.lock().unwrap();
@ -683,7 +676,10 @@ impl ObjectImpl for S3HlsSink {
self.hlssink.connect("get-fragment-stream", false, {
let self_weak = self.downgrade();
move |args| -> Option<glib::Value> {
let self_ = self_weak.upgrade()?;
let Some(self_) = self_weak.upgrade() else {
return None;
};
let s3client = self_.s3client_from_settings();
let settings = self_.settings.lock().unwrap();
let mut state = self_.state.lock().unwrap();
@ -714,7 +710,10 @@ impl ObjectImpl for S3HlsSink {
self.hlssink.connect("delete-fragment", false, {
let self_weak = self.downgrade();
move |args| -> Option<glib::Value> {
let self_ = self_weak.upgrade()?;
let Some(self_) = self_weak.upgrade() else {
return None;
};
let s3_client = self_.s3client_from_settings();
let settings = self_.settings.lock().unwrap();
@ -740,11 +739,7 @@ impl ObjectImpl for S3HlsSink {
// The signature on delete-fragment signal is different for
// hlssink2 and hlssink3.
if self_
.hlssink
.factory()
.is_some_and(|factory| factory.name() == "hlssink3")
{
if self_.hlssink.factory().unwrap().name().contains("hlssink3") {
if res.is_ok() {
Some(true.to_value())
} else {
@ -806,19 +801,10 @@ impl ElementImpl for S3HlsSink {
PAD_TEMPLATES.as_ref()
}
#[allow(clippy::single_match)]
fn change_state(
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
match transition {
gst::StateChange::PausedToReady => {
let mut canceller = self.canceller.lock().unwrap();
canceller.abort();
}
_ => (),
}
let ret = self.parent_change_state(transition)?;
/*
* The settings lock must not be taken before the parent state change.
@ -862,11 +848,6 @@ impl ElementImpl for S3HlsSink {
}
}
gst::StateChange::PausedToReady => {
let mut canceller = self.canceller.lock().unwrap();
*canceller = s3utils::Canceller::None;
}
gst::StateChange::ReadyToNull => {
drop(settings);
/*

View file

@ -14,7 +14,6 @@ use gst_base::subclass::prelude::*;
use aws_sdk_s3::{
config::{self, retry::RetryConfig, Credentials, Region},
error::ProvideErrorMetadata,
operation::{
abort_multipart_upload::builders::AbortMultipartUploadFluentBuilder,
complete_multipart_upload::builders::CompleteMultipartUploadFluentBuilder,
@ -26,6 +25,7 @@ use aws_sdk_s3::{
Client,
};
use futures::future;
use once_cell::sync::Lazy;
use std::collections::HashMap;
use std::convert::From;
@ -37,7 +37,6 @@ use crate::s3utils::{self, duration_from_millis, duration_to_millis, WaitError};
use super::OnError;
const DEFAULT_FORCE_PATH_STYLE: bool = false;
const DEFAULT_RETRY_ATTEMPTS: u32 = 5;
const DEFAULT_BUFFER_SIZE: u64 = 5 * 1024 * 1024;
const DEFAULT_MULTIPART_UPLOAD_ON_ERROR: OnError = OnError::DoNothing;
@ -103,11 +102,8 @@ struct Settings {
region: Region,
bucket: Option<String>,
key: Option<String>,
cache_control: Option<String>,
content_type: Option<String>,
content_disposition: Option<String>,
content_encoding: Option<String>,
content_language: Option<String>,
buffer_size: u64,
access_key: Option<String>,
secret_access_key: Option<String>,
@ -117,7 +113,6 @@ struct Settings {
multipart_upload_on_error: OnError,
request_timeout: Duration,
endpoint_uri: Option<String>,
force_path_style: bool,
}
impl Settings {
@ -161,11 +156,8 @@ impl Default for Settings {
region: Region::new("us-west-2"),
bucket: None,
key: None,
cache_control: None,
content_type: None,
content_disposition: None,
content_encoding: None,
content_language: None,
access_key: None,
secret_access_key: None,
session_token: None,
@ -175,7 +167,6 @@ impl Default for Settings {
multipart_upload_on_error: DEFAULT_MULTIPART_UPLOAD_ON_ERROR,
request_timeout: Duration::from_millis(DEFAULT_REQUEST_TIMEOUT_MSEC),
endpoint_uri: None,
force_path_style: DEFAULT_FORCE_PATH_STYLE,
}
}
}
@ -185,8 +176,8 @@ pub struct S3Sink {
url: Mutex<Option<GstS3Url>>,
settings: Mutex<Settings>,
state: Mutex<State>,
canceller: Mutex<s3utils::Canceller>,
abort_multipart_canceller: Mutex<s3utils::Canceller>,
canceller: Mutex<Option<future::AbortHandle>>,
abort_multipart_canceller: Mutex<Option<future::AbortHandle>>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
@ -274,7 +265,7 @@ impl S3Sink {
self.flush_multipart_upload(state);
Some(gst::error_msg!(
gst::ResourceError::OpenWrite,
["Failed to upload part: {err}: {}", err.meta()]
["Failed to upload part: {}", err]
))
}
WaitError::Cancelled => None,
@ -363,22 +354,16 @@ impl S3Sink {
) -> CreateMultipartUploadFluentBuilder {
let bucket = Some(url.bucket.clone());
let key = Some(url.object.clone());
let cache_control = settings.cache_control.clone();
let content_type = settings.content_type.clone();
let content_disposition = settings.content_disposition.clone();
let content_encoding = settings.content_encoding.clone();
let content_language = settings.content_language.clone();
let metadata = settings.to_metadata(self);
client
.create_multipart_upload()
.set_bucket(bucket)
.set_key(key)
.set_cache_control(cache_control)
.set_content_type(content_type)
.set_content_disposition(content_disposition)
.set_content_encoding(content_encoding)
.set_content_language(content_language)
.set_metadata(metadata)
}
@ -422,7 +407,7 @@ impl S3Sink {
WaitError::FutureError(err) => {
gst::error_msg!(
gst::ResourceError::Write,
["Failed to abort multipart upload: {err}: {}", err.meta()]
["Failed to abort multipart upload: {}.", err.to_string()]
)
}
WaitError::Cancelled => {
@ -446,7 +431,7 @@ impl S3Sink {
.map_err(|err| match err {
WaitError::FutureError(err) => gst::error_msg!(
gst::ResourceError::Write,
["Failed to complete multipart upload: {err}: {}", err.meta()]
["Failed to complete multipart upload: {}.", err.to_string()]
),
WaitError::Cancelled => {
gst::error_msg!(
@ -527,7 +512,7 @@ impl S3Sink {
.map_err(|err| match err {
WaitError::FutureError(err) => gst::error_msg!(
gst::ResourceError::OpenWrite,
["Failed to create SDK config: {err}"]
["Failed to create SDK config: {}", err]
),
WaitError::Cancelled => {
gst::error_msg!(
@ -538,7 +523,6 @@ impl S3Sink {
})?;
let config_builder = config::Builder::from(&sdk_config)
.force_path_style(settings.force_path_style)
.retry_config(RetryConfig::standard().with_max_attempts(settings.retry_attempts));
let config = if let Some(ref uri) = settings.endpoint_uri {
@ -557,7 +541,7 @@ impl S3Sink {
|err| match err {
WaitError::FutureError(err) => gst::error_msg!(
gst::ResourceError::OpenWrite,
["Failed to create multipart upload: {err}: {}", err.meta()]
["Failed to create multipart upload: {}", err]
),
WaitError::Cancelled => {
gst::error_msg!(
@ -617,6 +601,19 @@ impl S3Sink {
Ok(())
}
fn cancel(&self) {
let mut canceller = self.canceller.lock().unwrap();
let mut abort_canceller = self.abort_multipart_canceller.lock().unwrap();
if let Some(c) = abort_canceller.take() {
c.abort()
};
if let Some(c) = canceller.take() {
c.abort()
};
}
fn set_uri(self: &S3Sink, url_str: Option<&str>) -> Result<(), glib::Error> {
let state = self.state.lock().unwrap();
@ -769,10 +766,6 @@ impl ObjectImpl for S3Sink {
.nick("S3 endpoint URI")
.blurb("The S3 endpoint URI to use")
.build(),
glib::ParamSpecString::builder("cache-control")
.nick("cache-control")
.blurb("Cache-Control header to set for uploaded object")
.build(),
glib::ParamSpecString::builder("content-type")
.nick("content-type")
.blurb("Content-Type header to set for uploaded object")
@ -781,19 +774,6 @@ impl ObjectImpl for S3Sink {
.nick("content-disposition")
.blurb("Content-Disposition header to set for uploaded object")
.build(),
glib::ParamSpecString::builder("content-encoding")
.nick("content-encoding")
.blurb("Content-Encoding header to set for uploaded object")
.build(),
glib::ParamSpecString::builder("content-language")
.nick("content-language")
.blurb("Content-Language header to set for uploaded object")
.build(),
glib::ParamSpecBoolean::builder("force-path-style")
.nick("Force path style")
.blurb("Force client to use path-style addressing for buckets")
.default_value(DEFAULT_FORCE_PATH_STYLE)
.build(),
]
});
@ -897,11 +877,6 @@ impl ObjectImpl for S3Sink {
let _ = self.set_uri(Some(&settings.to_uri()));
}
}
"cache-control" => {
settings.cache_control = value
.get::<Option<String>>()
.expect("type checked upstream");
}
"content-type" => {
settings.content_type = value
.get::<Option<String>>()
@ -912,19 +887,6 @@ impl ObjectImpl for S3Sink {
.get::<Option<String>>()
.expect("type checked upstream");
}
"content-encoding" => {
settings.content_encoding = value
.get::<Option<String>>()
.expect("type checked upstream");
}
"content-language" => {
settings.content_language = value
.get::<Option<String>>()
.expect("type checked upstream");
}
"force-path-style" => {
settings.force_path_style = value.get::<bool>().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
@ -964,12 +926,8 @@ impl ObjectImpl for S3Sink {
(settings.retry_attempts as i64 * request_timeout).to_value()
}
"endpoint-uri" => settings.endpoint_uri.to_value(),
"cache-control" => settings.cache_control.to_value(),
"content-type" => settings.content_type.to_value(),
"content-disposition" => settings.content_disposition.to_value(),
"content-encoding" => settings.content_encoding.to_value(),
"content-language" => settings.content_language.to_value(),
"force-path-style" => settings.force_path_style.to_value(),
_ => unimplemented!(),
}
}
@ -1089,18 +1047,8 @@ impl BaseSinkImpl for S3Sink {
}
fn unlock(&self) -> Result<(), gst::ErrorMessage> {
let mut canceller = self.canceller.lock().unwrap();
let mut abort_canceller = self.abort_multipart_canceller.lock().unwrap();
canceller.abort();
abort_canceller.abort();
Ok(())
}
self.cancel();
fn unlock_stop(&self) -> Result<(), gst::ErrorMessage> {
let mut canceller = self.canceller.lock().unwrap();
let mut abort_canceller = self.abort_multipart_canceller.lock().unwrap();
*canceller = s3utils::Canceller::None;
*abort_canceller = s3utils::Canceller::None;
Ok(())
}

View file

@ -15,12 +15,12 @@ use gst_base::subclass::prelude::*;
use aws_sdk_s3::{
config::{self, retry::RetryConfig, Credentials, Region},
error::ProvideErrorMetadata,
operation::put_object::builders::PutObjectFluentBuilder,
primitives::ByteStream,
Client,
};
use futures::future;
use once_cell::sync::Lazy;
use std::collections::HashMap;
use std::convert::From;
@ -35,7 +35,6 @@ const DEFAULT_FLUSH_INTERVAL_BUFFERS: u64 = 1;
const DEFAULT_FLUSH_INTERVAL_BYTES: u64 = 0;
const DEFAULT_FLUSH_INTERVAL_TIME: gst::ClockTime = gst::ClockTime::from_nseconds(0);
const DEFAULT_FLUSH_ON_ERROR: bool = false;
const DEFAULT_FORCE_PATH_STYLE: bool = false;
// General setting for create / abort requests
const DEFAULT_REQUEST_TIMEOUT_MSEC: u64 = 15_000;
@ -71,11 +70,8 @@ struct Settings {
region: Region,
bucket: Option<String>,
key: Option<String>,
cache_control: Option<String>,
content_type: Option<String>,
content_disposition: Option<String>,
content_encoding: Option<String>,
content_language: Option<String>,
access_key: Option<String>,
secret_access_key: Option<String>,
session_token: Option<String>,
@ -83,7 +79,6 @@ struct Settings {
retry_attempts: u32,
request_timeout: Duration,
endpoint_uri: Option<String>,
force_path_style: bool,
flush_interval_buffers: u64,
flush_interval_bytes: u64,
flush_interval_time: Option<gst::ClockTime>,
@ -131,11 +126,8 @@ impl Default for Settings {
region: Region::new("us-west-2"),
bucket: None,
key: None,
cache_control: None,
content_type: None,
content_disposition: None,
content_encoding: None,
content_language: None,
access_key: None,
secret_access_key: None,
session_token: None,
@ -143,7 +135,6 @@ impl Default for Settings {
retry_attempts: DEFAULT_RETRY_ATTEMPTS,
request_timeout: Duration::from_millis(DEFAULT_REQUEST_TIMEOUT_MSEC),
endpoint_uri: None,
force_path_style: DEFAULT_FORCE_PATH_STYLE,
flush_interval_buffers: DEFAULT_FLUSH_INTERVAL_BUFFERS,
flush_interval_bytes: DEFAULT_FLUSH_INTERVAL_BYTES,
flush_interval_time: Some(DEFAULT_FLUSH_INTERVAL_TIME),
@ -157,7 +148,7 @@ pub struct S3PutObjectSink {
url: Mutex<Option<GstS3Url>>,
settings: Mutex<Settings>,
state: Mutex<State>,
canceller: Mutex<s3utils::Canceller>,
canceller: Mutex<Option<future::AbortHandle>>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
@ -211,7 +202,7 @@ impl S3PutObjectSink {
s3utils::wait(&self.canceller, put_object_req_future).map_err(|err| match err {
WaitError::FutureError(err) => Some(gst::error_msg!(
gst::ResourceError::OpenWrite,
["Failed to upload object: {err}: {}", err.meta()]
["Failed to upload object: {}", err]
)),
WaitError::Cancelled => None,
})?;
@ -236,11 +227,6 @@ impl S3PutObjectSink {
let bucket = Some(url.as_ref().unwrap().bucket.to_owned());
let key = Some(url.as_ref().unwrap().object.to_owned());
let cache_control = settings.cache_control.clone();
let content_type = settings.content_type.clone();
let content_disposition = settings.content_disposition.clone();
let content_encoding = settings.content_encoding.clone();
let content_language = settings.content_language.clone();
let metadata = settings.to_metadata(self);
let client = &state.client;
@ -249,11 +235,6 @@ impl S3PutObjectSink {
.put_object()
.set_body(body)
.set_bucket(bucket)
.set_cache_control(cache_control)
.set_content_disposition(content_disposition)
.set_content_encoding(content_encoding)
.set_content_type(content_type)
.set_content_language(content_language)
.set_key(key)
.set_metadata(metadata)
}
@ -311,7 +292,6 @@ impl S3PutObjectSink {
})?;
let config_builder = config::Builder::from(&sdk_config)
.force_path_style(settings.force_path_style)
.retry_config(RetryConfig::standard().with_max_attempts(settings.retry_attempts));
let config = if let Some(ref uri) = settings.endpoint_uri {
@ -327,6 +307,14 @@ impl S3PutObjectSink {
Ok(())
}
fn cancel(&self) {
let mut canceller = self.canceller.lock().unwrap();
if let Some(c) = canceller.take() {
c.abort()
};
}
fn set_uri(self: &S3PutObjectSink, url_str: Option<&str>) -> Result<(), glib::Error> {
let state = self.state.lock().unwrap();
@ -429,10 +417,6 @@ impl ObjectImpl for S3PutObjectSink {
.nick("S3 endpoint URI")
.blurb("The S3 endpoint URI to use")
.build(),
glib::ParamSpecString::builder("cache-control")
.nick("cache-control")
.blurb("Cache-Control header to set for uploaded object")
.build(),
glib::ParamSpecString::builder("content-type")
.nick("content-type")
.blurb("Content-Type header to set for uploaded object")
@ -441,14 +425,6 @@ impl ObjectImpl for S3PutObjectSink {
.nick("content-disposition")
.blurb("Content-Disposition header to set for uploaded object")
.build(),
glib::ParamSpecString::builder("content-encoding")
.nick("content-encoding")
.blurb("Content-Encoding header to set for uploaded object")
.build(),
glib::ParamSpecString::builder("content-language")
.nick("content-language")
.blurb("Content-Language header to set for uploaded object")
.build(),
glib::ParamSpecUInt64::builder("flush-interval-buffers")
.nick("Flush interval in buffers")
.blurb("Number of buffers to accumulate before doing a write (0 => disable)")
@ -469,11 +445,6 @@ impl ObjectImpl for S3PutObjectSink {
.blurb("Whether to write out the data on error (like stopping without an EOS)")
.default_value(DEFAULT_FLUSH_ON_ERROR)
.build(),
glib::ParamSpecBoolean::builder("force-path-style")
.nick("Force path style")
.blurb("Force client to use path-style addressing for buckets")
.default_value(DEFAULT_FORCE_PATH_STYLE)
.build(),
]
});
@ -545,11 +516,6 @@ impl ObjectImpl for S3PutObjectSink {
let _ = self.set_uri(Some(&settings.to_uri()));
}
}
"cache-control" => {
settings.cache_control = value
.get::<Option<String>>()
.expect("type checked upstream");
}
"content-type" => {
settings.content_type = value
.get::<Option<String>>()
@ -560,16 +526,6 @@ impl ObjectImpl for S3PutObjectSink {
.get::<Option<String>>()
.expect("type checked upstream");
}
"content-encoding" => {
settings.content_encoding = value
.get::<Option<String>>()
.expect("type checked upstream");
}
"content-language" => {
settings.content_language = value
.get::<Option<String>>()
.expect("type checked upstream");
}
"flush-interval-buffers" => {
settings.flush_interval_buffers =
value.get::<u64>().expect("type checked upstream");
@ -585,9 +541,6 @@ impl ObjectImpl for S3PutObjectSink {
"flush-on-error" => {
settings.flush_on_error = value.get::<bool>().expect("type checked upstream");
}
"force-path-style" => {
settings.force_path_style = value.get::<bool>().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
@ -615,16 +568,12 @@ impl ObjectImpl for S3PutObjectSink {
"retry-attempts" => settings.retry_attempts.to_value(),
"request-timeout" => duration_to_millis(Some(settings.request_timeout)).to_value(),
"endpoint-uri" => settings.endpoint_uri.to_value(),
"cache-control" => settings.cache_control.to_value(),
"content-type" => settings.content_type.to_value(),
"content-disposition" => settings.content_disposition.to_value(),
"content-encoding" => settings.content_encoding.to_value(),
"content-language" => settings.content_language.to_value(),
"flush-interval-buffers" => settings.flush_interval_buffers.to_value(),
"flush-interval-bytes" => settings.flush_interval_bytes.to_value(),
"flush-interval-time" => settings.flush_interval_time.to_value(),
"flush-on-error" => settings.flush_on_error.to_value(),
"force-path-style" => settings.force_path_style.to_value(),
_ => unimplemented!(),
}
}
@ -747,14 +696,8 @@ impl BaseSinkImpl for S3PutObjectSink {
}
fn unlock(&self) -> Result<(), gst::ErrorMessage> {
let mut canceller = self.canceller.lock().unwrap();
canceller.abort();
Ok(())
}
self.cancel();
fn unlock_stop(&self) -> Result<(), gst::ErrorMessage> {
let mut canceller = self.canceller.lock().unwrap();
*canceller = s3utils::Canceller::None;
Ok(())
}

View file

@ -7,13 +7,13 @@
// SPDX-License-Identifier: MPL-2.0
use bytes::Bytes;
use futures::future;
use once_cell::sync::Lazy;
use std::sync::Mutex;
use std::time::Duration;
use aws_sdk_s3::{
config::{self, retry::RetryConfig, Credentials},
error::ProvideErrorMetadata,
Client,
};
@ -28,7 +28,6 @@ use gst_base::subclass::prelude::*;
use crate::s3url::*;
use crate::s3utils::{self, duration_from_millis, duration_to_millis, WaitError};
const DEFAULT_FORCE_PATH_STYLE: bool = false;
const DEFAULT_RETRY_ATTEMPTS: u32 = 5;
const DEFAULT_REQUEST_TIMEOUT_MSEC: u64 = 15000;
const DEFAULT_RETRY_DURATION_MSEC: u64 = 60_000;
@ -53,7 +52,6 @@ struct Settings {
retry_attempts: u32,
request_timeout: Duration,
endpoint_uri: Option<String>,
force_path_style: bool,
}
impl Default for Settings {
@ -67,7 +65,6 @@ impl Default for Settings {
retry_attempts: DEFAULT_RETRY_ATTEMPTS,
request_timeout: duration,
endpoint_uri: None,
force_path_style: DEFAULT_FORCE_PATH_STYLE,
}
}
}
@ -76,7 +73,7 @@ impl Default for Settings {
pub struct S3Src {
settings: Mutex<Settings>,
state: Mutex<StreamingState>,
canceller: Mutex<s3utils::Canceller>,
canceller: Mutex<Option<future::AbortHandle>>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
@ -88,6 +85,14 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
});
impl S3Src {
fn cancel(&self) {
let mut canceller = self.canceller.lock().unwrap();
if let Some(c) = canceller.take() {
c.abort()
};
}
fn connect(self: &S3Src, url: &GstS3Url) -> Result<Client, gst::ErrorMessage> {
let settings = self.settings.lock().unwrap();
let timeout_config = s3utils::timeout_config(settings.request_timeout);
@ -122,7 +127,6 @@ impl S3Src {
})?;
let config_builder = config::Builder::from(&sdk_config)
.force_path_style(settings.force_path_style)
.retry_config(RetryConfig::standard().with_max_attempts(settings.retry_attempts));
let config = if let Some(ref uri) = settings.endpoint_uri {
@ -180,7 +184,7 @@ impl S3Src {
s3utils::wait(&self.canceller, head_object_future).map_err(|err| match err {
WaitError::FutureError(err) => gst::error_msg!(
gst::ResourceError::NotFound,
["Failed to get HEAD object: {err}: {}", err.meta()]
["Failed to get HEAD object: {:?}", err]
),
WaitError::Cancelled => {
gst::error_msg!(
@ -239,7 +243,7 @@ impl S3Src {
s3utils::wait(&self.canceller, get_object_future).map_err(|err| match err {
WaitError::FutureError(err) => Some(gst::error_msg!(
gst::ResourceError::Read,
["Could not read: {err}: {}", err.meta()]
["Could not read: {}", err]
)),
WaitError::Cancelled => None,
})?;
@ -249,7 +253,7 @@ impl S3Src {
s3utils::wait_stream(&self.canceller, &mut output.body).map_err(|err| match err {
WaitError::FutureError(err) => Some(gst::error_msg!(
gst::ResourceError::Read,
["Could not read: {err}"]
["Could not read: {}", err]
)),
WaitError::Cancelled => None,
})
@ -311,11 +315,6 @@ impl ObjectImpl for S3Src {
.nick("S3 endpoint URI")
.blurb("The S3 endpoint URI to use")
.build(),
glib::ParamSpecBoolean::builder("force-path-style")
.nick("Force path style")
.blurb("Force client to use path-style addressing for buckets")
.default_value(DEFAULT_FORCE_PATH_STYLE)
.build(),
]
});
@ -365,9 +364,6 @@ impl ObjectImpl for S3Src {
.get::<Option<String>>()
.expect("type checked upstream");
}
"force-path-style" => {
settings.force_path_style = value.get::<bool>().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
@ -394,7 +390,6 @@ impl ObjectImpl for S3Src {
}
"retry-attempts" => settings.retry_attempts.to_value(),
"endpoint-uri" => settings.endpoint_uri.to_value(),
"force-path-style" => settings.force_path_style.to_value(),
_ => unimplemented!(),
}
}
@ -512,6 +507,9 @@ impl BaseSrcImpl for S3Src {
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
// First, stop any asynchronous tasks if we're running, as they will have the state lock
self.cancel();
let mut state = self.state.lock().unwrap();
if let StreamingState::Stopped = *state {
@ -575,14 +573,7 @@ impl BaseSrcImpl for S3Src {
}
fn unlock(&self) -> Result<(), gst::ErrorMessage> {
let mut canceller = self.canceller.lock().unwrap();
canceller.abort();
Ok(())
}
fn unlock_stop(&self) -> Result<(), gst::ErrorMessage> {
let mut canceller = self.canceller.lock().unwrap();
*canceller = s3utils::Canceller::None;
self.cancel();
Ok(())
}
}

View file

@ -25,10 +25,9 @@ const FRAGMENT: &AsciiSet = &CONTROLS.add(b' ').add(b'"').add(b'<').add(b'>').ad
const PATH: &AsciiSet = &FRAGMENT.add(b'#').add(b'?').add(b'{').add(b'}');
const PATH_SEGMENT: &AsciiSet = &PATH.add(b'/').add(b'%');
impl std::fmt::Display for GstS3Url {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
impl ToString for GstS3Url {
fn to_string(&self) -> String {
format!(
"s3://{}/{}/{}{}",
self.region,
self.bucket,
@ -60,9 +59,9 @@ pub fn parse_s3_url(url_str: &str) -> Result<GstS3Url, String> {
.or_else(|_| {
let (name, endpoint) = host.split_once('+').ok_or(())?;
let name =
base32::decode(base32::Alphabet::Rfc4648 { padding: true }, name).ok_or(())?;
base32::decode(base32::Alphabet::RFC4648 { padding: true }, name).ok_or(())?;
let endpoint =
base32::decode(base32::Alphabet::Rfc4648 { padding: true }, endpoint).ok_or(())?;
base32::decode(base32::Alphabet::RFC4648 { padding: true }, endpoint).ok_or(())?;
let name = String::from_utf8(name).map_err(|_| ())?;
let endpoint = String::from_utf8(endpoint).map_err(|_| ())?;
Ok(format!("{name}{endpoint}"))

View file

@ -9,7 +9,6 @@
use aws_config::meta::region::RegionProviderChain;
use aws_sdk_s3::{
config::{timeout::TimeoutConfig, Credentials, Region},
error::ProvideErrorMetadata,
primitives::{ByteStream, ByteStreamError},
};
use aws_types::sdk_config::SdkConfig;
@ -17,15 +16,11 @@ use aws_types::sdk_config::SdkConfig;
use bytes::{buf::BufMut, Bytes, BytesMut};
use futures::{future, Future};
use once_cell::sync::Lazy;
use std::fmt;
use std::sync::Mutex;
use std::time::Duration;
use tokio::runtime;
pub const DEFAULT_S3_REGION: &str = "us-west-2";
pub static AWS_BEHAVIOR_VERSION: Lazy<aws_config::BehaviorVersion> =
Lazy::new(aws_config::BehaviorVersion::v2023_11_09);
const DEFAULT_S3_REGION: &str = "us-west-2";
static RUNTIME: Lazy<runtime::Runtime> = Lazy::new(|| {
runtime::Builder::new_multi_thread()
@ -42,47 +37,21 @@ pub enum WaitError<E> {
FutureError(E),
}
impl<E: ProvideErrorMetadata + std::error::Error> fmt::Display for WaitError<E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
WaitError::Cancelled => f.write_str("Cancelled"),
WaitError::FutureError(err) => write!(f, "{err}: {}", err.meta()),
}
}
}
#[derive(Default)]
pub enum Canceller {
#[default]
None,
Handle(future::AbortHandle),
Cancelled,
}
impl Canceller {
pub fn abort(&mut self) {
if let Canceller::Handle(ref canceller) = *self {
canceller.abort();
}
*self = Canceller::Cancelled;
}
}
pub fn wait<F, T, E>(canceller_mutex: &Mutex<Canceller>, future: F) -> Result<T, WaitError<E>>
pub fn wait<F, T, E>(
canceller: &Mutex<Option<future::AbortHandle>>,
future: F,
) -> Result<T, WaitError<E>>
where
F: Send + Future<Output = Result<T, E>>,
F::Output: Send,
T: Send,
E: Send,
{
let mut canceller = canceller_mutex.lock().unwrap();
if matches!(*canceller, Canceller::Cancelled) {
return Err(WaitError::Cancelled);
}
let mut canceller_guard = canceller.lock().unwrap();
let (abort_handle, abort_registration) = future::AbortHandle::new_pair();
*canceller = Canceller::Handle(abort_handle);
drop(canceller);
canceller_guard.replace(abort_handle);
drop(canceller_guard);
let abortable_future = future::Abortable::new(future, abort_registration);
@ -103,21 +72,17 @@ where
};
/* Clear out the canceller */
let mut canceller = canceller_mutex.lock().unwrap();
if matches!(*canceller, Canceller::Cancelled) {
return Err(WaitError::Cancelled);
}
*canceller = Canceller::None;
drop(canceller);
canceller_guard = canceller.lock().unwrap();
*canceller_guard = None;
res
}
pub fn wait_stream(
canceller_mutex: &Mutex<Canceller>,
canceller: &Mutex<Option<future::AbortHandle>>,
stream: &mut ByteStream,
) -> Result<Bytes, WaitError<ByteStreamError>> {
wait(canceller_mutex, async move {
wait(canceller, async move {
let mut collect = BytesMut::new();
// Loop over the stream and collect till we're done
@ -137,7 +102,7 @@ pub fn timeout_config(request_timeout: Duration) -> TimeoutConfig {
}
pub fn wait_config(
canceller_mutex: &Mutex<Canceller>,
canceller: &Mutex<Option<future::AbortHandle>>,
region: Region,
timeout_config: TimeoutConfig,
credentials: Option<Credentials>,
@ -146,24 +111,22 @@ pub fn wait_config(
.or_default_provider()
.or_else(Region::new(DEFAULT_S3_REGION));
let config_future = match credentials {
Some(cred) => aws_config::defaults(AWS_BEHAVIOR_VERSION.clone())
Some(cred) => aws_config::defaults(aws_config::BehaviorVersion::latest())
.timeout_config(timeout_config)
.region(region_provider)
.credentials_provider(cred)
.load(),
None => aws_config::defaults(AWS_BEHAVIOR_VERSION.clone())
None => aws_config::defaults(aws_config::BehaviorVersion::latest())
.timeout_config(timeout_config)
.region(region_provider)
.load(),
};
let mut canceller = canceller_mutex.lock().unwrap();
if matches!(*canceller, Canceller::Cancelled) {
return Err(WaitError::Cancelled);
}
let mut canceller_guard = canceller.lock().unwrap();
let (abort_handle, abort_registration) = future::AbortHandle::new_pair();
*canceller = Canceller::Handle(abort_handle);
drop(canceller);
canceller_guard.replace(abort_handle);
drop(canceller_guard);
let abortable_future = future::Abortable::new(config_future, abort_registration);
@ -180,12 +143,8 @@ pub fn wait_config(
};
/* Clear out the canceller */
let mut canceller = canceller_mutex.lock().unwrap();
if matches!(*canceller, Canceller::Cancelled) {
return Err(WaitError::Cancelled);
}
*canceller = Canceller::None;
drop(canceller);
canceller_guard = canceller.lock().unwrap();
*canceller_guard = None;
res
}

View file

@ -47,9 +47,6 @@ static RUNTIME: Lazy<runtime::Runtime> = Lazy::new(|| {
.unwrap()
});
static AWS_BEHAVIOR_VERSION: Lazy<aws_config::BehaviorVersion> =
Lazy::new(aws_config::BehaviorVersion::v2023_11_09);
const DEFAULT_TRANSCRIBER_REGION: &str = "us-east-1";
// Deprecated in 0.11.0: due to evolutions of the transcriber element,
@ -534,14 +531,13 @@ impl Transcriber {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Preparing");
let (access_key, secret_access_key, session_token) = {
let (access_key, secret_access_key, session_token);
{
let settings = self.settings.lock().unwrap();
(
settings.access_key.clone(),
settings.secret_access_key.clone(),
settings.session_token.clone(),
)
};
access_key = settings.access_key.to_owned();
secret_access_key = settings.secret_access_key.to_owned();
session_token = settings.session_token.to_owned();
}
gst::info!(CAT, imp: self, "Loading aws config...");
let _enter_guard = RUNTIME.enter();
@ -549,7 +545,7 @@ impl Transcriber {
let config_loader = match (access_key, secret_access_key) {
(Some(key), Some(secret_key)) => {
gst::debug!(CAT, imp: self, "Using settings credentials");
aws_config::defaults(AWS_BEHAVIOR_VERSION.clone()).credentials_provider(
aws_config::ConfigLoader::default().credentials_provider(
aws_transcribe::config::Credentials::new(
key,
secret_key,
@ -561,7 +557,7 @@ impl Transcriber {
}
_ => {
gst::debug!(CAT, imp: self, "Attempting to get credentials from env...");
aws_config::defaults(AWS_BEHAVIOR_VERSION.clone())
aws_config::defaults(aws_config::BehaviorVersion::latest())
}
};
@ -1635,7 +1631,9 @@ impl TranslateSrcPad {
#[inline]
fn needs_translation(input_lang: &str, output_lang: Option<&str>) -> bool {
output_lang.is_some_and(|other| !input_lang.eq_ignore_ascii_case(other.as_ref()))
output_lang.map_or(false, |other| {
!input_lang.eq_ignore_ascii_case(other.as_ref())
})
}
#[inline]

View file

@ -11,7 +11,6 @@ use gst::subclass::prelude::*;
use gst::{glib, prelude::*};
use aws_sdk_transcribestreaming as aws_transcribe;
use aws_sdk_transcribestreaming::error::ProvideErrorMetadata;
use aws_sdk_transcribestreaming::types;
use futures::channel::mpsc;
@ -166,7 +165,7 @@ impl TranscriberStream {
.send()
.await
.map_err(|err| {
let err = format!("Transcribe ws init error: {err}: {}", err.meta());
let err = format!("Transcribe ws init error: {err}");
gst::error!(CAT, imp: imp, "{err}");
gst::error_msg!(gst::LibraryError::Init, ["{err}"])
})?;
@ -188,7 +187,7 @@ impl TranscriberStream {
.recv()
.await
.map_err(|err| {
let err = format!("Transcribe ws stream error: {err}: {}", err.meta());
let err = format!("Transcribe ws stream error: {err}");
gst::error!(CAT, imp: self.imp, "{err}");
gst::error_msg!(gst::LibraryError::Failed, ["{err}"])
})?;

View file

@ -10,7 +10,6 @@ use gst::glib;
use gst::subclass::prelude::*;
use aws_sdk_translate as aws_translate;
use aws_sdk_translate::error::ProvideErrorMetadata;
use futures::channel::mpsc;
use futures::prelude::*;
@ -79,10 +78,7 @@ impl TranslateLoop {
pub async fn check_language(&self) -> Result<(), gst::ErrorMessage> {
let language_list = self.client.list_languages().send().await.map_err(|err| {
let err = format!(
"Failed to call list_languages service: {err}: {}",
err.meta()
);
let err = format!("Failed to call list_languages service: {err}");
gst::info!(CAT, imp: self.pad, "{err}");
gst::error_msg!(gst::LibraryError::Failed, ["{err}"])
})?;
@ -147,7 +143,7 @@ impl TranslateLoop {
.send()
.await
.map_err(|err| {
let err = format!("Failed to call translation service: {err}: {}", err.meta());
let err = format!("Failed to call translation service: {err}");
gst::info!(CAT, imp: self.pad, "{err}");
gst::error_msg!(gst::LibraryError::Failed, ["{err}"])
})?

View file

@ -8,18 +8,13 @@
// SPDX-License-Identifier: MPL-2.0
//
// Note: these tests need valid AWS credentials to run. To avoid failures on CI, we test for the
// existence of AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables and skip the test
// if those don't exist. That means that in local testing, other methods of providing credentials
// (such as ~/.aws/credentials) will not work.
// The test times out on Windows for some reason, skip until we figure out why
#[cfg(not(target_os = "windows"))]
#[cfg(test)]
mod tests {
use gst::prelude::*;
use gstaws::s3utils::{AWS_BEHAVIOR_VERSION, DEFAULT_S3_REGION};
const DEFAULT_S3_REGION: &str = "us-west-2";
fn init() {
use std::sync::Once;
@ -45,7 +40,7 @@ mod tests {
)
.or_default_provider();
let config = aws_config::defaults(AWS_BEHAVIOR_VERSION.clone())
let config = aws_config::defaults(aws_config::BehaviorVersion::latest())
.region(region_provider)
.load()
.await;
@ -178,66 +173,57 @@ mod tests {
delete_object(region.clone(), &bucket, &key).await;
}
#[test_with::env(AWS_ACCESS_KEY_ID)]
#[test_with::env(AWS_SECRET_ACCESS_KEY)]
#[ignore = "failing, needs investigation"]
#[tokio::test]
async fn test_s3_multipart_simple() {
do_s3_multipart_test("s3-test").await;
}
#[test_with::env(AWS_ACCESS_KEY_ID)]
#[test_with::env(AWS_SECRET_ACCESS_KEY)]
#[ignore = "failing, needs investigation"]
#[tokio::test]
async fn test_s3_multipart_whitespace() {
do_s3_multipart_test("s3 test").await;
}
#[test_with::env(AWS_ACCESS_KEY_ID)]
#[test_with::env(AWS_SECRET_ACCESS_KEY)]
#[ignore = "failing, needs investigation"]
#[tokio::test]
async fn test_s3_multipart_unicode() {
do_s3_multipart_test("s3 🧪 😱").await;
}
#[test_with::env(AWS_ACCESS_KEY_ID)]
#[test_with::env(AWS_SECRET_ACCESS_KEY)]
#[ignore = "failing, needs investigation"]
#[tokio::test]
async fn test_s3_put_object_simple() {
do_s3_putobject_test("s3-put-object-test", None, None, None, true).await;
}
#[test_with::env(AWS_ACCESS_KEY_ID)]
#[test_with::env(AWS_SECRET_ACCESS_KEY)]
#[ignore = "failing, needs investigation"]
#[tokio::test]
async fn test_s3_put_object_whitespace() {
do_s3_putobject_test("s3 put object test", None, None, None, true).await;
}
#[test_with::env(AWS_ACCESS_KEY_ID)]
#[test_with::env(AWS_SECRET_ACCESS_KEY)]
#[ignore = "failing, needs investigation"]
#[tokio::test]
async fn test_s3_put_object_unicode() {
do_s3_putobject_test("s3 put object 🧪 😱", None, None, None, true).await;
}
#[test_with::env(AWS_ACCESS_KEY_ID)]
#[test_with::env(AWS_SECRET_ACCESS_KEY)]
#[ignore = "failing, needs investigation"]
#[tokio::test]
async fn test_s3_put_object_flush_buffers() {
// Awkward threshold as we push 5 buffers
do_s3_putobject_test("s3-put-object-test fbuf", Some(2), None, None, true).await;
}
#[test_with::env(AWS_ACCESS_KEY_ID)]
#[test_with::env(AWS_SECRET_ACCESS_KEY)]
#[ignore = "failing, needs investigation"]
#[tokio::test]
async fn test_s3_put_object_flush_bytes() {
// Awkward threshold as we push 14 bytes per buffer
do_s3_putobject_test("s3-put-object-test fbytes", None, Some(30), None, true).await;
}
#[test_with::env(AWS_ACCESS_KEY_ID)]
#[test_with::env(AWS_SECRET_ACCESS_KEY)]
#[ignore = "failing, needs investigation"]
#[tokio::test]
async fn test_s3_put_object_flush_time() {
do_s3_putobject_test(
@ -251,8 +237,7 @@ mod tests {
.await;
}
#[test_with::env(AWS_ACCESS_KEY_ID)]
#[test_with::env(AWS_SECRET_ACCESS_KEY)]
#[ignore = "failing, needs investigation"]
#[tokio::test]
async fn test_s3_put_object_on_eos() {
// Disable all flush thresholds, so only EOS causes a flush
@ -266,8 +251,7 @@ mod tests {
.await;
}
#[test_with::env(AWS_ACCESS_KEY_ID)]
#[test_with::env(AWS_SECRET_ACCESS_KEY)]
#[ignore = "failing, needs investigation"]
#[tokio::test]
async fn test_s3_put_object_without_eos() {
// Disable all flush thresholds, skip EOS, and cause a flush on error

View file

@ -107,7 +107,7 @@ fn setup_video_sink(pipeline: &gst::Pipeline, path: &Path, name: &str) -> Result
let enc = gst::ElementFactory::make("x264enc")
.property("bframes", 0u32)
.property("bitrate", VIDEO_BITRATE / 1000u32)
.property("key-int-max", i32::MAX as u32)
.property("key-int-max", std::i32::MAX as u32)
.property_from_str("tune", "zerolatency")
.build()?;
let h264_capsfilter = gst::ElementFactory::make("capsfilter")

View file

@ -1,557 +0,0 @@
// Copyright (C) 2021 Rafael Caricio <rafael@caricio.com>
// Copyright (C) 2023 Seungha Yang <seungha@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use crate::playlist::Playlist;
use chrono::{DateTime, Duration, Utc};
use gio::prelude::*;
use gst::glib;
use gst::prelude::*;
use gst::subclass::prelude::*;
use m3u8_rs::MediaSegment;
use once_cell::sync::Lazy;
use std::fs;
use std::io::Write;
use std::path;
use std::sync::Mutex;
const DEFAULT_PLAYLIST_LOCATION: &str = "playlist.m3u8";
const DEFAULT_MAX_NUM_SEGMENT_FILES: u32 = 10;
const DEFAULT_PLAYLIST_LENGTH: u32 = 5;
const DEFAULT_PROGRAM_DATE_TIME_TAG: bool = false;
const DEFAULT_CLOCK_TRACKING_FOR_PDT: bool = true;
const DEFAULT_ENDLIST: bool = true;
const SIGNAL_GET_PLAYLIST_STREAM: &str = "get-playlist-stream";
const SIGNAL_GET_FRAGMENT_STREAM: &str = "get-fragment-stream";
const SIGNAL_DELETE_FRAGMENT: &str = "delete-fragment";
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"hlsbasesink",
gst::DebugColorFlags::empty(),
Some("HLS Base sink"),
)
});
struct Settings {
playlist_location: String,
playlist_root: Option<String>,
playlist_length: u32,
max_num_segment_files: usize,
enable_program_date_time: bool,
pdt_follows_pipeline_clock: bool,
enable_endlist: bool,
}
impl Default for Settings {
fn default() -> Self {
Self {
playlist_location: String::from(DEFAULT_PLAYLIST_LOCATION),
playlist_root: None,
playlist_length: DEFAULT_PLAYLIST_LENGTH,
max_num_segment_files: DEFAULT_MAX_NUM_SEGMENT_FILES as usize,
enable_program_date_time: DEFAULT_PROGRAM_DATE_TIME_TAG,
pdt_follows_pipeline_clock: DEFAULT_CLOCK_TRACKING_FOR_PDT,
enable_endlist: DEFAULT_ENDLIST,
}
}
}
pub struct PlaylistContext {
pdt_base_utc: Option<DateTime<Utc>>,
pdt_base_running_time: Option<gst::ClockTime>,
playlist: Playlist,
old_segment_locations: Vec<String>,
segment_template: String,
playlist_location: String,
max_num_segment_files: usize,
playlist_length: u32,
}
#[derive(Default)]
pub struct State {
context: Option<PlaylistContext>,
}
#[derive(Default)]
pub struct HlsBaseSink {
settings: Mutex<Settings>,
state: Mutex<State>,
}
#[glib::object_subclass]
impl ObjectSubclass for HlsBaseSink {
const NAME: &'static str = "GstHlsBaseSink";
type Type = super::HlsBaseSink;
type ParentType = gst::Bin;
}
pub trait HlsBaseSinkImpl: BinImpl {}
unsafe impl<T: HlsBaseSinkImpl> IsSubclassable<T> for super::HlsBaseSink {}
impl ObjectImpl for HlsBaseSink {
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
obj.set_suppressed_flags(gst::ElementFlags::SINK | gst::ElementFlags::SOURCE);
obj.set_element_flags(gst::ElementFlags::SINK);
}
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
vec![
glib::ParamSpecString::builder("playlist-location")
.nick("Playlist Location")
.blurb("Location of the playlist to write.")
.default_value(Some(DEFAULT_PLAYLIST_LOCATION))
.build(),
glib::ParamSpecString::builder("playlist-root")
.nick("Playlist Root")
.blurb("Base path for the segments in the playlist file.")
.build(),
glib::ParamSpecUInt::builder("max-files")
.nick("Max files")
.blurb("Maximum number of files to keep on disk. Once the maximum is reached, old files start to be deleted to make room for new ones.")
.build(),
glib::ParamSpecUInt::builder("playlist-length")
.nick("Playlist length")
.blurb("Length of HLS playlist. To allow players to conform to section 6.3.3 of the HLS specification, this should be at least 3. If set to 0, the playlist will be infinite.")
.default_value(DEFAULT_PLAYLIST_LENGTH)
.build(),
glib::ParamSpecBoolean::builder("enable-program-date-time")
.nick("add EXT-X-PROGRAM-DATE-TIME tag")
.blurb("put EXT-X-PROGRAM-DATE-TIME tag in the playlist")
.default_value(DEFAULT_PROGRAM_DATE_TIME_TAG)
.build(),
glib::ParamSpecBoolean::builder("pdt-follows-pipeline-clock")
.nick("Whether Program-Date-Time should follow the pipeline clock")
.blurb("As there might be drift between the wallclock and pipeline clock, this controls whether the Program-Date-Time markers should follow the pipeline clock rate (true), or be skewed to match the wallclock rate (false).")
.default_value(DEFAULT_CLOCK_TRACKING_FOR_PDT)
.build(),
glib::ParamSpecBoolean::builder("enable-endlist")
.nick("Enable Endlist")
.blurb("Write \"EXT-X-ENDLIST\" tag to manifest at the end of stream")
.default_value(DEFAULT_ENDLIST)
.build(),
]
});
PROPERTIES.as_ref()
}
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
let mut settings = self.settings.lock().unwrap();
match pspec.name() {
"playlist-location" => {
settings.playlist_location = value
.get::<Option<String>>()
.expect("type checked upstream")
.unwrap_or_else(|| String::from(DEFAULT_PLAYLIST_LOCATION));
}
"playlist-root" => {
settings.playlist_root = value
.get::<Option<String>>()
.expect("type checked upstream");
}
"max-files" => {
let max_files: u32 = value.get().expect("type checked upstream");
settings.max_num_segment_files = max_files as usize;
}
"playlist-length" => {
settings.playlist_length = value.get().expect("type checked upstream");
}
"enable-program-date-time" => {
settings.enable_program_date_time = value.get().expect("type checked upstream");
}
"pdt-follows-pipeline-clock" => {
settings.pdt_follows_pipeline_clock = value.get().expect("type checked upstream");
}
"enable-endlist" => {
settings.enable_endlist = value.get().expect("type checked upstream");
}
_ => unimplemented!(),
};
}
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
let settings = self.settings.lock().unwrap();
match pspec.name() {
"playlist-location" => settings.playlist_location.to_value(),
"playlist-root" => settings.playlist_root.to_value(),
"max-files" => {
let max_files = settings.max_num_segment_files as u32;
max_files.to_value()
}
"playlist-length" => settings.playlist_length.to_value(),
"enable-program-date-time" => settings.enable_program_date_time.to_value(),
"pdt-follows-pipeline-clock" => settings.pdt_follows_pipeline_clock.to_value(),
"enable-endlist" => settings.enable_endlist.to_value(),
_ => unimplemented!(),
}
}
fn signals() -> &'static [glib::subclass::Signal] {
static SIGNALS: Lazy<Vec<glib::subclass::Signal>> = Lazy::new(|| {
vec![
glib::subclass::Signal::builder(SIGNAL_GET_PLAYLIST_STREAM)
.param_types([String::static_type()])
.return_type::<Option<gio::OutputStream>>()
.class_handler(|_, args| {
let elem = args[0].get::<super::HlsBaseSink>().expect("signal arg");
let playlist_location = args[1].get::<String>().expect("signal arg");
let imp = elem.imp();
Some(imp.new_file_stream(&playlist_location).ok().to_value())
})
.accumulator(|_hint, ret, value| {
// First signal handler wins
*ret = value.clone();
false
})
.build(),
glib::subclass::Signal::builder(SIGNAL_GET_FRAGMENT_STREAM)
.param_types([String::static_type()])
.return_type::<Option<gio::OutputStream>>()
.class_handler(|_, args| {
let elem = args[0].get::<super::HlsBaseSink>().expect("signal arg");
let fragment_location = args[1].get::<String>().expect("signal arg");
let imp = elem.imp();
Some(imp.new_file_stream(&fragment_location).ok().to_value())
})
.accumulator(|_hint, ret, value| {
// First signal handler wins
*ret = value.clone();
false
})
.build(),
glib::subclass::Signal::builder(SIGNAL_DELETE_FRAGMENT)
.param_types([String::static_type()])
.return_type::<bool>()
.class_handler(|_, args| {
let elem = args[0].get::<super::HlsBaseSink>().expect("signal arg");
let fragment_location = args[1].get::<String>().expect("signal arg");
let imp = elem.imp();
imp.delete_fragment(&fragment_location);
Some(true.to_value())
})
.accumulator(|_hint, ret, value| {
// First signal handler wins
*ret = value.clone();
false
})
.build(),
]
});
SIGNALS.as_ref()
}
}
impl GstObjectImpl for HlsBaseSink {}
impl ElementImpl for HlsBaseSink {
fn change_state(
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
let ret = self.parent_change_state(transition)?;
match transition {
gst::StateChange::PlayingToPaused => {
let mut state = self.state.lock().unwrap();
if let Some(context) = state.context.as_mut() {
// reset mapping from rt to utc. during pause
// rt is stopped but utc keep moving so need to
// calculate the mapping again
context.pdt_base_running_time = None;
context.pdt_base_utc = None
}
}
gst::StateChange::PausedToReady => {
self.close_playlist();
}
_ => (),
}
Ok(ret)
}
}
impl BinImpl for HlsBaseSink {}
impl HlsBaseSinkImpl for HlsBaseSink {}
impl HlsBaseSink {
pub fn open_playlist(&self, playlist: Playlist, segment_template: String) {
let mut state = self.state.lock().unwrap();
let settings = self.settings.lock().unwrap();
state.context = Some(PlaylistContext {
pdt_base_utc: None,
pdt_base_running_time: None,
playlist,
old_segment_locations: Vec::new(),
segment_template,
playlist_location: settings.playlist_location.clone(),
max_num_segment_files: settings.max_num_segment_files,
playlist_length: settings.playlist_length,
});
}
fn close_playlist(&self) {
let mut state = self.state.lock().unwrap();
if let Some(mut context) = state.context.take() {
if context.playlist.is_rendering() {
context
.playlist
.stop(self.settings.lock().unwrap().enable_endlist);
let _ = self.write_playlist(&mut context);
}
}
}
pub fn get_fragment_stream(&self, fragment_id: u32) -> Option<(gio::OutputStream, String)> {
let mut state = self.state.lock().unwrap();
let context = match state.context.as_mut() {
Some(context) => context,
None => {
gst::error!(
CAT,
imp: self,
"Playlist is not configured",
);
return None;
}
};
let location = match sprintf::sprintf!(&context.segment_template, fragment_id) {
Ok(file_name) => file_name,
Err(err) => {
gst::error!(
CAT,
imp: self,
"Couldn't build file name, err: {:?}", err,
);
return None;
}
};
gst::trace!(
CAT,
imp: self,
"Segment location formatted: {}",
location
);
let stream = match self
.obj()
.emit_by_name::<Option<gio::OutputStream>>(SIGNAL_GET_FRAGMENT_STREAM, &[&location])
{
Some(stream) => stream,
None => return None,
};
Some((stream, location))
}
pub fn get_segment_uri(&self, location: &str) -> String {
let settings = self.settings.lock().unwrap();
let file_name = path::Path::new(&location)
.file_name()
.unwrap()
.to_str()
.unwrap();
if let Some(playlist_root) = &settings.playlist_root {
format!("{playlist_root}/{file_name}")
} else {
file_name.to_string()
}
}
pub fn add_segment(
&self,
location: &str,
running_time: Option<gst::ClockTime>,
mut segment: MediaSegment,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut state = self.state.lock().unwrap();
let context = match state.context.as_mut() {
Some(context) => context,
None => {
gst::error!(
CAT,
imp: self,
"Playlist is not configured",
);
return Err(gst::FlowError::Error);
}
};
if let Some(running_time) = running_time {
if context.pdt_base_running_time.is_none() {
context.pdt_base_running_time = Some(running_time);
}
let settings = self.settings.lock().unwrap();
// Calculate the mapping from running time to UTC
// calculate pdt_base_utc for each segment for !pdt_follows_pipeline_clock
// when pdt_follows_pipeline_clock is set, we calculate the base time every time
// this avoids the drift between pdt tag and external clock (if gst clock has skew w.r.t external clock)
if context.pdt_base_utc.is_none() || !settings.pdt_follows_pipeline_clock {
let obj = self.obj();
let now_utc = Utc::now();
let now_gst = obj.clock().unwrap().time().unwrap();
let pts_clock_time = running_time + obj.base_time().unwrap();
let diff = now_gst.nseconds() as i64 - pts_clock_time.nseconds() as i64;
let pts_utc = now_utc
.checked_sub_signed(Duration::nanoseconds(diff))
.expect("offsetting the utc with gstreamer clock-diff overflow");
context.pdt_base_utc = Some(pts_utc);
}
if settings.enable_program_date_time {
// Add the diff of running time to UTC time
// date_time = first_segment_utc + (current_seg_running_time - first_seg_running_time)
let date_time =
context
.pdt_base_utc
.unwrap()
.checked_add_signed(Duration::nanoseconds(
running_time
.opt_checked_sub(context.pdt_base_running_time)
.unwrap()
.unwrap()
.nseconds() as i64,
));
if let Some(date_time) = date_time {
segment.program_date_time = Some(date_time.into());
}
}
}
context.playlist.add_segment(segment);
if context.playlist.is_type_undefined() {
context.old_segment_locations.push(location.to_string());
}
self.write_playlist(context)
}
fn write_playlist(
&self,
context: &mut PlaylistContext,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::info!(CAT, imp: self, "Preparing to write new playlist, COUNT {}", context.playlist.len());
context
.playlist
.update_playlist_state(context.playlist_length as usize);
// Acquires the playlist file handle so we can update it with new content. By default, this
// is expected to be the same file every time.
let mut playlist_stream = self
.obj()
.emit_by_name::<Option<gio::OutputStream>>(
SIGNAL_GET_PLAYLIST_STREAM,
&[&context.playlist_location],
)
.ok_or_else(|| {
gst::error!(
CAT,
imp: self,
"Could not get stream to write playlist content",
);
gst::FlowError::Error
})?
.into_write();
context
.playlist
.write_to(&mut playlist_stream)
.map_err(|err| {
gst::error!(
CAT,
imp: self,
"Could not write new playlist: {}",
err.to_string()
);
gst::FlowError::Error
})?;
playlist_stream.flush().map_err(|err| {
gst::error!(
CAT,
imp: self,
"Could not flush playlist: {}",
err.to_string()
);
gst::FlowError::Error
})?;
if context.playlist.is_type_undefined() && context.max_num_segment_files > 0 {
// Cleanup old segments from filesystem
while context.old_segment_locations.len() > context.max_num_segment_files {
let old_segment_location = context.old_segment_locations.remove(0);
if !self
.obj()
.emit_by_name::<bool>(SIGNAL_DELETE_FRAGMENT, &[&old_segment_location])
{
gst::error!(CAT, imp: self, "Could not delete fragment");
}
}
}
gst::debug!(CAT, imp: self, "Wrote new playlist file!");
Ok(gst::FlowSuccess::Ok)
}
pub fn new_file_stream<P>(&self, location: &P) -> Result<gio::OutputStream, String>
where
P: AsRef<path::Path>,
{
let file = fs::File::create(location).map_err(move |err| {
let error_msg = gst::error_msg!(
gst::ResourceError::OpenWrite,
[
"Could not open file {} for writing: {}",
location.as_ref().to_str().unwrap(),
err.to_string(),
]
);
self.post_error_message(error_msg);
err.to_string()
})?;
Ok(gio::WriteOutputStream::new(file).upcast())
}
fn delete_fragment<P>(&self, location: &P)
where
P: AsRef<path::Path>,
{
let _ = fs::remove_file(location).map_err(|err| {
gst::warning!(
CAT,
imp: self,
"Could not delete segment file: {}",
err.to_string()
);
});
}
}

View file

@ -1,527 +0,0 @@
// Copyright (C) 2023 Seungha Yang <seungha@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use crate::hlsbasesink::HlsBaseSinkImpl;
use crate::hlssink3::HlsSink3PlaylistType;
use crate::playlist::Playlist;
use crate::HlsBaseSink;
use gio::prelude::*;
use gst::glib;
use gst::prelude::*;
use gst::subclass::prelude::*;
use m3u8_rs::{MediaPlaylist, MediaPlaylistType, MediaSegment};
use once_cell::sync::Lazy;
use std::io::Write;
use std::sync::Mutex;
const DEFAULT_INIT_LOCATION: &str = "init%05d.mp4";
const DEFAULT_CMAF_LOCATION: &str = "segment%05d.m4s";
const DEFAULT_TARGET_DURATION: u32 = 15;
const DEFAULT_PLAYLIST_TYPE: HlsSink3PlaylistType = HlsSink3PlaylistType::Unspecified;
const DEFAULT_SYNC: bool = true;
const DEFAULT_LATENCY: gst::ClockTime =
gst::ClockTime::from_mseconds((DEFAULT_TARGET_DURATION * 500) as u64);
const SIGNAL_GET_INIT_STREAM: &str = "get-init-stream";
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"hlscmafsink",
gst::DebugColorFlags::empty(),
Some("HLS CMAF sink"),
)
});
macro_rules! base_imp {
($i:expr) => {
$i.obj().upcast_ref::<HlsBaseSink>().imp()
};
}
struct HlsCmafSinkSettings {
init_location: String,
location: String,
target_duration: u32,
playlist_type: Option<MediaPlaylistType>,
sync: bool,
latency: gst::ClockTime,
cmafmux: gst::Element,
appsink: gst_app::AppSink,
}
impl Default for HlsCmafSinkSettings {
fn default() -> Self {
let cmafmux = gst::ElementFactory::make("cmafmux")
.name("muxer")
.property(
"fragment-duration",
gst::ClockTime::from_seconds(DEFAULT_TARGET_DURATION as u64),
)
.property("latency", DEFAULT_LATENCY)
.build()
.expect("Could not make element cmafmux");
let appsink = gst_app::AppSink::builder()
.buffer_list(true)
.sync(DEFAULT_SYNC)
.name("sink")
.build();
Self {
init_location: String::from(DEFAULT_INIT_LOCATION),
location: String::from(DEFAULT_CMAF_LOCATION),
target_duration: DEFAULT_TARGET_DURATION,
playlist_type: None,
sync: DEFAULT_SYNC,
latency: DEFAULT_LATENCY,
cmafmux,
appsink,
}
}
}
#[derive(Default)]
struct HlsCmafSinkState {
init_idx: u32,
segment_idx: u32,
init_segment: Option<m3u8_rs::Map>,
new_header: bool,
}
#[derive(Default)]
pub struct HlsCmafSink {
settings: Mutex<HlsCmafSinkSettings>,
state: Mutex<HlsCmafSinkState>,
}
#[glib::object_subclass]
impl ObjectSubclass for HlsCmafSink {
const NAME: &'static str = "GstHlsCmafSink";
type Type = super::HlsCmafSink;
type ParentType = HlsBaseSink;
}
impl ObjectImpl for HlsCmafSink {
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
vec![
glib::ParamSpecString::builder("init-location")
.nick("Init Location")
.blurb("Location of the init fragment file to write")
.default_value(Some(DEFAULT_INIT_LOCATION))
.build(),
glib::ParamSpecString::builder("location")
.nick("Location")
.blurb("Location of the fragment file to write")
.default_value(Some(DEFAULT_CMAF_LOCATION))
.build(),
glib::ParamSpecUInt::builder("target-duration")
.nick("Target duration")
.blurb("The target duration in seconds of a segment/file. (0 - disabled, useful for management of segment duration by the streaming server)")
.default_value(DEFAULT_TARGET_DURATION)
.mutable_ready()
.build(),
glib::ParamSpecEnum::builder_with_default("playlist-type", DEFAULT_PLAYLIST_TYPE)
.nick("Playlist Type")
.blurb("The type of the playlist to use. When VOD type is set, the playlist will be live until the pipeline ends execution.")
.mutable_ready()
.build(),
glib::ParamSpecBoolean::builder("sync")
.nick("Sync")
.blurb("Sync on the clock")
.default_value(DEFAULT_SYNC)
.build(),
glib::ParamSpecUInt64::builder("latency")
.nick("Latency")
.blurb(
"Additional latency to allow upstream to take longer to \
produce buffers for the current position (in nanoseconds)",
)
.maximum(i64::MAX as u64)
.default_value(DEFAULT_LATENCY.nseconds())
.build(),
]
});
PROPERTIES.as_ref()
}
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
let mut settings = self.settings.lock().unwrap();
match pspec.name() {
"init-location" => {
settings.init_location = value
.get::<Option<String>>()
.expect("type checked upstream")
.unwrap_or_else(|| DEFAULT_INIT_LOCATION.into());
}
"location" => {
settings.location = value
.get::<Option<String>>()
.expect("type checked upstream")
.unwrap_or_else(|| DEFAULT_CMAF_LOCATION.into());
}
"target-duration" => {
settings.target_duration = value.get().expect("type checked upstream");
settings.cmafmux.set_property(
"fragment-duration",
gst::ClockTime::from_seconds(settings.target_duration as u64),
);
}
"playlist-type" => {
settings.playlist_type = value
.get::<HlsSink3PlaylistType>()
.expect("type checked upstream")
.into();
}
"sync" => {
settings.sync = value.get().expect("type checked upstream");
settings.appsink.set_property("sync", settings.sync);
}
"latency" => {
settings.latency = value.get().expect("type checked upstream");
settings.cmafmux.set_property("latency", settings.latency);
}
_ => unimplemented!(),
};
}
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
let settings = self.settings.lock().unwrap();
match pspec.name() {
"init-location" => settings.init_location.to_value(),
"location" => settings.location.to_value(),
"target-duration" => settings.target_duration.to_value(),
"playlist-type" => {
let playlist_type: HlsSink3PlaylistType = settings.playlist_type.as_ref().into();
playlist_type.to_value()
}
"sync" => settings.sync.to_value(),
"latency" => settings.latency.to_value(),
_ => unimplemented!(),
}
}
fn signals() -> &'static [glib::subclass::Signal] {
static SIGNALS: Lazy<Vec<glib::subclass::Signal>> = Lazy::new(|| {
vec![glib::subclass::Signal::builder(SIGNAL_GET_INIT_STREAM)
.param_types([String::static_type()])
.return_type::<Option<gio::OutputStream>>()
.class_handler(|_, args| {
let elem = args[0].get::<HlsBaseSink>().expect("signal arg");
let init_location = args[1].get::<String>().expect("signal arg");
let imp = elem.imp();
Some(imp.new_file_stream(&init_location).ok().to_value())
})
.accumulator(|_hint, ret, value| {
// First signal handler wins
*ret = value.clone();
false
})
.build()]
});
SIGNALS.as_ref()
}
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
let settings = self.settings.lock().unwrap();
obj.add_many([&settings.cmafmux, settings.appsink.upcast_ref()])
.unwrap();
settings.cmafmux.link(&settings.appsink).unwrap();
let sinkpad = settings.cmafmux.static_pad("sink").unwrap();
let gpad = gst::GhostPad::with_target(&sinkpad).unwrap();
obj.add_pad(&gpad).unwrap();
let self_weak = self.downgrade();
settings.appsink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |sink| {
let Some(imp) = self_weak.upgrade() else {
return Err(gst::FlowError::Eos);
};
let sample = sink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
imp.on_new_sample(sample)
})
.build(),
);
}
}
impl GstObjectImpl for HlsCmafSink {}
impl ElementImpl for HlsCmafSink {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"HTTP Live Streaming CMAF Sink",
"Sink/Muxer",
"HTTP Live Streaming CMAF Sink",
"Seungha Yang <seungha@centricular.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&[
gst::Structure::builder("video/x-h264")
.field("stream-format", gst::List::new(["avc", "avc3"]))
.field("alignment", "au")
.field("width", gst::IntRange::new(1, u16::MAX as i32))
.field("height", gst::IntRange::new(1, u16::MAX as i32))
.build(),
gst::Structure::builder("video/x-h265")
.field("stream-format", gst::List::new(["hvc1", "hev1"]))
.field("alignment", "au")
.field("width", gst::IntRange::new(1, u16::MAX as i32))
.field("height", gst::IntRange::new(1, u16::MAX as i32))
.build(),
gst::Structure::builder("audio/mpeg")
.field("mpegversion", 4i32)
.field("stream-format", "raw")
.field("channels", gst::IntRange::new(1, u16::MAX as i32))
.field("rate", gst::IntRange::new(1, i32::MAX))
.build(),
]
.into_iter()
.collect::<gst::Caps>(),
)
.unwrap();
vec![pad_template]
});
PAD_TEMPLATES.as_ref()
}
fn change_state(
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
if transition == gst::StateChange::ReadyToPaused {
let (target_duration, playlist_type, segment_template) = {
let settings = self.settings.lock().unwrap();
(
settings.target_duration,
settings.playlist_type.clone(),
settings.location.clone(),
)
};
let playlist = self.start(target_duration, playlist_type);
base_imp!(self).open_playlist(playlist, segment_template);
}
self.parent_change_state(transition)
}
}
impl BinImpl for HlsCmafSink {}
impl HlsBaseSinkImpl for HlsCmafSink {}
impl HlsCmafSink {
fn start(&self, target_duration: u32, playlist_type: Option<MediaPlaylistType>) -> Playlist {
gst::info!(CAT, imp: self, "Starting");
let mut state = self.state.lock().unwrap();
*state = HlsCmafSinkState::default();
let (turn_vod, playlist_type) = if playlist_type == Some(MediaPlaylistType::Vod) {
(true, Some(MediaPlaylistType::Event))
} else {
(false, playlist_type)
};
let playlist = MediaPlaylist {
version: Some(6),
target_duration: target_duration as f32,
playlist_type,
independent_segments: true,
..Default::default()
};
Playlist::new(playlist, turn_vod, true)
}
fn on_init_segment(&self) -> Result<gio::OutputStreamWrite<gio::OutputStream>, String> {
let settings = self.settings.lock().unwrap();
let mut state = self.state.lock().unwrap();
let location = match sprintf::sprintf!(&settings.init_location, state.init_idx) {
Ok(location) => location,
Err(err) => {
gst::error!(
CAT,
imp: self,
"Couldn't build file name, err: {:?}", err,
);
return Err(String::from("Invalid init segment file pattern"));
}
};
let stream = self
.obj()
.emit_by_name::<Option<gio::OutputStream>>(SIGNAL_GET_INIT_STREAM, &[&location])
.ok_or_else(|| String::from("Error while getting fragment stream"))?
.into_write();
let uri = base_imp!(self).get_segment_uri(&location);
state.init_segment = Some(m3u8_rs::Map {
uri,
..Default::default()
});
state.new_header = true;
state.init_idx += 1;
Ok(stream)
}
fn on_new_fragment(
&self,
) -> Result<(gio::OutputStreamWrite<gio::OutputStream>, String), String> {
let mut state = self.state.lock().unwrap();
let (stream, location) = base_imp!(self)
.get_fragment_stream(state.segment_idx)
.ok_or_else(|| String::from("Error while getting fragment stream"))?;
state.segment_idx += 1;
Ok((stream.into_write(), location))
}
fn add_segment(
&self,
duration: f32,
running_time: Option<gst::ClockTime>,
location: String,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let uri = base_imp!(self).get_segment_uri(&location);
let mut state = self.state.lock().unwrap();
let map = if state.new_header {
state.new_header = false;
state.init_segment.clone()
} else {
None
};
base_imp!(self).add_segment(
&location,
running_time,
MediaSegment {
uri,
duration,
map,
..Default::default()
},
)
}
fn on_new_sample(&self, sample: gst::Sample) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut buffer_list = sample.buffer_list_owned().unwrap();
let mut first = buffer_list.get(0).unwrap();
if first
.flags()
.contains(gst::BufferFlags::DISCONT | gst::BufferFlags::HEADER)
{
let mut stream = self.on_init_segment().map_err(|err| {
gst::error!(
CAT,
imp: self,
"Couldn't get output stream for init segment, {err}",
);
gst::FlowError::Error
})?;
let map = first.map_readable().unwrap();
stream.write(&map).map_err(|_| {
gst::error!(
CAT,
imp: self,
"Couldn't write init segment to output stream",
);
gst::FlowError::Error
})?;
stream.flush().map_err(|_| {
gst::error!(
CAT,
imp: self,
"Couldn't flush output stream",
);
gst::FlowError::Error
})?;
drop(map);
buffer_list.make_mut().remove(0..1);
if buffer_list.is_empty() {
return Ok(gst::FlowSuccess::Ok);
}
first = buffer_list.get(0).unwrap();
}
let segment = sample
.segment()
.unwrap()
.downcast_ref::<gst::ClockTime>()
.unwrap();
let running_time = segment.to_running_time(first.pts().unwrap());
let dur = first.duration().unwrap();
let (mut stream, location) = self.on_new_fragment().map_err(|err| {
gst::error!(
CAT,
imp: self,
"Couldn't get output stream for segment, {err}",
);
gst::FlowError::Error
})?;
for buffer in &*buffer_list {
let map = buffer.map_readable().unwrap();
stream.write(&map).map_err(|_| {
gst::error!(
CAT,
imp: self,
"Couldn't write segment to output stream",
);
gst::FlowError::Error
})?;
}
stream.flush().map_err(|_| {
gst::error!(
CAT,
imp: self,
"Couldn't flush output stream",
);
gst::FlowError::Error
})?;
self.add_segment(dur.mseconds() as f32 / 1_000f32, running_time, location)
}
}

View file

@ -1,34 +0,0 @@
// Copyright (C) 2023 Seungha Yang <seungha@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(clippy::non_send_fields_in_send_ty, unused_doc_comments)]
use crate::HlsBaseSink;
/**
* plugin-hlssink3:
*
* Since: plugins-rs-0.8.0
*/
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub struct HlsCmafSink(ObjectSubclass<imp::HlsCmafSink>) @extends HlsBaseSink, gst::Bin, gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"hlscmafsink",
gst::Rank::NONE,
HlsCmafSink::static_type(),
)?;
Ok(())
}

View file

@ -1,581 +0,0 @@
// Copyright (C) 2021 Rafael Caricio <rafael@caricio.com>
// Copyright (C) 2023 Seungha Yang <seungha@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use crate::hlsbasesink::HlsBaseSinkImpl;
use crate::hlssink3::HlsSink3PlaylistType;
use crate::playlist::Playlist;
use crate::HlsBaseSink;
use gio::prelude::*;
use gst::glib;
use gst::prelude::*;
use gst::subclass::prelude::*;
use m3u8_rs::{MediaPlaylist, MediaPlaylistType, MediaSegment};
use once_cell::sync::Lazy;
use std::sync::Mutex;
const DEFAULT_TS_LOCATION: &str = "segment%05d.ts";
const DEFAULT_TARGET_DURATION: u32 = 15;
const DEFAULT_PLAYLIST_TYPE: HlsSink3PlaylistType = HlsSink3PlaylistType::Unspecified;
const DEFAULT_I_FRAMES_ONLY_PLAYLIST: bool = false;
const DEFAULT_SEND_KEYFRAME_REQUESTS: bool = true;
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new("hlssink3", gst::DebugColorFlags::empty(), Some("HLS sink"))
});
macro_rules! base_imp {
($i:expr) => {
$i.obj().upcast_ref::<HlsBaseSink>().imp()
};
}
impl From<HlsSink3PlaylistType> for Option<MediaPlaylistType> {
fn from(pl_type: HlsSink3PlaylistType) -> Self {
use HlsSink3PlaylistType::*;
match pl_type {
Unspecified => None,
Event => Some(MediaPlaylistType::Event),
Vod => Some(MediaPlaylistType::Vod),
}
}
}
impl From<Option<&MediaPlaylistType>> for HlsSink3PlaylistType {
fn from(inner_pl_type: Option<&MediaPlaylistType>) -> Self {
use HlsSink3PlaylistType::*;
match inner_pl_type {
None | Some(MediaPlaylistType::Other(_)) => Unspecified,
Some(MediaPlaylistType::Event) => Event,
Some(MediaPlaylistType::Vod) => Vod,
}
}
}
struct HlsSink3Settings {
location: String,
target_duration: u32,
playlist_type: Option<MediaPlaylistType>,
i_frames_only: bool,
send_keyframe_requests: bool,
splitmuxsink: gst::Element,
giostreamsink: gst::Element,
video_sink: bool,
audio_sink: bool,
}
impl Default for HlsSink3Settings {
fn default() -> Self {
let muxer = gst::ElementFactory::make("mpegtsmux")
.name("mpeg-ts_mux")
.build()
.expect("Could not make element mpegtsmux");
let giostreamsink = gst::ElementFactory::make("giostreamsink")
.name("giostream_sink")
.build()
.expect("Could not make element giostreamsink");
let splitmuxsink = gst::ElementFactory::make("splitmuxsink")
.name("split_mux_sink")
.property("muxer", &muxer)
.property("reset-muxer", false)
.property("send-keyframe-requests", DEFAULT_SEND_KEYFRAME_REQUESTS)
.property(
"max-size-time",
gst::ClockTime::from_seconds(DEFAULT_TARGET_DURATION as u64),
)
.property("sink", &giostreamsink)
.build()
.expect("Could not make element splitmuxsink");
// giostreamsink doesn't let go of its stream until the element is finalized, which might
// be too late for the calling application. Let's try to force it to close while tearing
// down the pipeline.
if giostreamsink.has_property("close-on-stop", Some(bool::static_type())) {
giostreamsink.set_property("close-on-stop", true);
} else {
gst::warning!(
CAT,
"hlssink3 may sometimes fail to write out the final playlist update. This can be fixed by using giostreamsink from GStreamer 1.24 or later."
)
}
Self {
location: String::from(DEFAULT_TS_LOCATION),
target_duration: DEFAULT_TARGET_DURATION,
playlist_type: None,
send_keyframe_requests: DEFAULT_SEND_KEYFRAME_REQUESTS,
i_frames_only: DEFAULT_I_FRAMES_ONLY_PLAYLIST,
splitmuxsink,
giostreamsink,
video_sink: false,
audio_sink: false,
}
}
}
#[derive(Default)]
struct HlsSink3State {
fragment_opened_at: Option<gst::ClockTime>,
fragment_running_time: Option<gst::ClockTime>,
current_segment_location: Option<String>,
}
#[derive(Default)]
pub struct HlsSink3 {
settings: Mutex<HlsSink3Settings>,
state: Mutex<HlsSink3State>,
}
#[glib::object_subclass]
impl ObjectSubclass for HlsSink3 {
const NAME: &'static str = "GstHlsSink3";
type Type = super::HlsSink3;
type ParentType = HlsBaseSink;
}
impl ObjectImpl for HlsSink3 {
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
vec![
glib::ParamSpecString::builder("location")
.nick("File Location")
.blurb("Location of the file to write")
.default_value(Some(DEFAULT_TS_LOCATION))
.build(),
glib::ParamSpecUInt::builder("target-duration")
.nick("Target duration")
.blurb("The target duration in seconds of a segment/file. (0 - disabled, useful for management of segment duration by the streaming server)")
.default_value(DEFAULT_TARGET_DURATION)
.build(),
glib::ParamSpecEnum::builder_with_default("playlist-type", DEFAULT_PLAYLIST_TYPE)
.nick("Playlist Type")
.blurb("The type of the playlist to use. When VOD type is set, the playlist will be live until the pipeline ends execution.")
.build(),
glib::ParamSpecBoolean::builder("i-frames-only")
.nick("I-Frames only playlist")
.blurb("Each video segments is single iframe, So put EXT-X-I-FRAMES-ONLY tag in the playlist")
.default_value(DEFAULT_I_FRAMES_ONLY_PLAYLIST)
.build(),
glib::ParamSpecBoolean::builder("send-keyframe-requests")
.nick("Send Keyframe Requests")
.blurb("Send keyframe requests to ensure correct fragmentation. If this is disabled then the input must have keyframes in regular intervals.")
.default_value(DEFAULT_SEND_KEYFRAME_REQUESTS)
.build(),
]
});
PROPERTIES.as_ref()
}
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
let mut settings = self.settings.lock().unwrap();
match pspec.name() {
"location" => {
settings.location = value
.get::<Option<String>>()
.expect("type checked upstream")
.unwrap_or_else(|| DEFAULT_TS_LOCATION.into());
settings
.splitmuxsink
.set_property("location", &settings.location);
}
"target-duration" => {
settings.target_duration = value.get().expect("type checked upstream");
settings.splitmuxsink.set_property(
"max-size-time",
gst::ClockTime::from_seconds(settings.target_duration as u64),
);
}
"playlist-type" => {
settings.playlist_type = value
.get::<HlsSink3PlaylistType>()
.expect("type checked upstream")
.into();
}
"i-frames-only" => {
settings.i_frames_only = value.get().expect("type checked upstream");
if settings.i_frames_only && settings.audio_sink {
gst::element_error!(
self.obj(),
gst::StreamError::WrongType,
("Invalid configuration"),
["Audio not allowed for i-frames-only-stream"]
);
}
}
"send-keyframe-requests" => {
settings.send_keyframe_requests = value.get().expect("type checked upstream");
settings
.splitmuxsink
.set_property("send-keyframe-requests", settings.send_keyframe_requests);
}
_ => unimplemented!(),
};
}
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
let settings = self.settings.lock().unwrap();
match pspec.name() {
"location" => settings.location.to_value(),
"target-duration" => settings.target_duration.to_value(),
"playlist-type" => {
let playlist_type: HlsSink3PlaylistType = settings.playlist_type.as_ref().into();
playlist_type.to_value()
}
"i-frames-only" => settings.i_frames_only.to_value(),
"send-keyframe-requests" => settings.send_keyframe_requests.to_value(),
_ => unimplemented!(),
}
}
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
let settings = self.settings.lock().unwrap();
obj.add(&settings.splitmuxsink).unwrap();
settings
.splitmuxsink
.connect("format-location-full", false, {
let imp_weak = self.downgrade();
move |args| {
let Some(imp) = imp_weak.upgrade() else {
return Some(None::<String>.to_value());
};
let fragment_id = args[1].get::<u32>().unwrap();
gst::info!(CAT, imp: imp, "Got fragment-id: {}", fragment_id);
let sample = args[2].get::<gst::Sample>().unwrap();
let buffer = sample.buffer();
let running_time = if let Some(buffer) = buffer {
let segment = sample
.segment()
.expect("segment not available")
.downcast_ref::<gst::ClockTime>()
.expect("no time segment");
segment.to_running_time(buffer.pts().unwrap())
} else {
gst::warning!(
CAT,
imp: imp,
"buffer null for fragment-id: {}",
fragment_id
);
None
};
match imp.on_format_location(fragment_id, running_time) {
Ok(segment_location) => Some(segment_location.to_value()),
Err(err) => {
gst::error!(CAT, imp: imp, "on format-location handler: {}", err);
Some("unknown_segment".to_value())
}
}
}
});
}
}
impl GstObjectImpl for HlsSink3 {}
impl ElementImpl for HlsSink3 {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"HTTP Live Streaming sink",
"Sink/Muxer",
"HTTP Live Streaming sink",
"Alessandro Decina <alessandro.d@gmail.com>, \
Sebastian Dröge <sebastian@centricular.com>, \
Rafael Caricio <rafael@caricio.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let caps = gst::Caps::new_any();
let video_pad_template = gst::PadTemplate::new(
"video",
gst::PadDirection::Sink,
gst::PadPresence::Request,
&caps,
)
.unwrap();
let caps = gst::Caps::new_any();
let audio_pad_template = gst::PadTemplate::new(
"audio",
gst::PadDirection::Sink,
gst::PadPresence::Request,
&caps,
)
.unwrap();
vec![video_pad_template, audio_pad_template]
});
PAD_TEMPLATES.as_ref()
}
fn change_state(
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
if transition == gst::StateChange::ReadyToPaused {
let (target_duration, playlist_type, i_frames_only, segment_template) = {
let settings = self.settings.lock().unwrap();
(
settings.target_duration,
settings.playlist_type.clone(),
settings.i_frames_only,
settings.location.clone(),
)
};
let playlist = self.start(target_duration, playlist_type, i_frames_only);
base_imp!(self).open_playlist(playlist, segment_template);
}
self.parent_change_state(transition)
}
fn request_new_pad(
&self,
templ: &gst::PadTemplate,
_name: Option<&str>,
_caps: Option<&gst::Caps>,
) -> Option<gst::Pad> {
let mut settings = self.settings.lock().unwrap();
match templ.name_template() {
"audio" => {
if settings.audio_sink {
gst::debug!(
CAT,
imp: self,
"requested_new_pad: audio pad is already set"
);
return None;
}
if settings.i_frames_only {
gst::element_error!(
self.obj(),
gst::StreamError::WrongType,
("Invalid configuration"),
["Audio not allowed for i-frames-only-stream"]
);
return None;
}
let peer_pad = settings.splitmuxsink.request_pad_simple("audio_0").unwrap();
let sink_pad = gst::GhostPad::from_template_with_target(templ, &peer_pad).unwrap();
self.obj().add_pad(&sink_pad).unwrap();
sink_pad.set_active(true).unwrap();
settings.audio_sink = true;
Some(sink_pad.upcast())
}
"video" => {
if settings.video_sink {
gst::debug!(
CAT,
imp: self,
"requested_new_pad: video pad is already set"
);
return None;
}
let peer_pad = settings.splitmuxsink.request_pad_simple("video").unwrap();
let sink_pad = gst::GhostPad::from_template_with_target(templ, &peer_pad).unwrap();
self.obj().add_pad(&sink_pad).unwrap();
sink_pad.set_active(true).unwrap();
settings.video_sink = true;
Some(sink_pad.upcast())
}
other_name => {
gst::debug!(
CAT,
imp: self,
"requested_new_pad: name \"{}\" is not audio or video",
other_name
);
None
}
}
}
fn release_pad(&self, pad: &gst::Pad) {
let mut settings = self.settings.lock().unwrap();
if !settings.audio_sink && !settings.video_sink {
return;
}
let ghost_pad = pad.downcast_ref::<gst::GhostPad>().unwrap();
if let Some(peer) = ghost_pad.target() {
settings.splitmuxsink.release_request_pad(&peer);
}
pad.set_active(false).unwrap();
self.obj().remove_pad(pad).unwrap();
if "audio" == ghost_pad.name() {
settings.audio_sink = false;
} else {
settings.video_sink = false;
}
}
}
impl BinImpl for HlsSink3 {
#[allow(clippy::single_match)]
fn handle_message(&self, msg: gst::Message) {
use gst::MessageView;
match msg.view() {
MessageView::Element(msg) => {
let event_is_from_splitmuxsink = {
let settings = self.settings.lock().unwrap();
msg.src() == Some(settings.splitmuxsink.upcast_ref())
};
if !event_is_from_splitmuxsink {
return;
}
let s = msg.structure().unwrap();
match s.name().as_str() {
"splitmuxsink-fragment-opened" => {
if let Ok(new_fragment_opened_at) = s.get::<gst::ClockTime>("running-time")
{
let mut state = self.state.lock().unwrap();
state.fragment_opened_at = Some(new_fragment_opened_at);
}
}
"splitmuxsink-fragment-closed" => {
let s = msg.structure().unwrap();
if let Ok(fragment_closed_at) = s.get::<gst::ClockTime>("running-time") {
self.on_fragment_closed(fragment_closed_at);
}
}
_ => {}
}
}
_ => self.parent_handle_message(msg),
}
}
}
impl HlsBaseSinkImpl for HlsSink3 {}
impl HlsSink3 {
fn start(
&self,
target_duration: u32,
playlist_type: Option<MediaPlaylistType>,
i_frames_only: bool,
) -> Playlist {
gst::info!(CAT, imp: self, "Starting");
let mut state = self.state.lock().unwrap();
*state = HlsSink3State::default();
let (turn_vod, playlist_type) = if playlist_type == Some(MediaPlaylistType::Vod) {
(true, Some(MediaPlaylistType::Event))
} else {
(false, playlist_type)
};
let playlist = MediaPlaylist {
version: if i_frames_only { Some(4) } else { Some(3) },
target_duration: target_duration as f32,
playlist_type,
i_frames_only,
..Default::default()
};
Playlist::new(playlist, turn_vod, false)
}
fn on_format_location(
&self,
fragment_id: u32,
running_time: Option<gst::ClockTime>,
) -> Result<String, String> {
gst::info!(
CAT,
imp: self,
"Starting the formatting of the fragment-id: {}",
fragment_id
);
let (fragment_stream, segment_file_location) = base_imp!(self)
.get_fragment_stream(fragment_id)
.ok_or_else(|| String::from("Error while getting fragment stream"))?;
let mut state = self.state.lock().unwrap();
state.current_segment_location = Some(segment_file_location.clone());
state.fragment_running_time = running_time;
let settings = self.settings.lock().unwrap();
settings
.giostreamsink
.set_property("stream", &fragment_stream);
gst::info!(
CAT,
imp: self,
"New segment location: {:?}",
state.current_segment_location.as_ref()
);
Ok(segment_file_location)
}
fn on_fragment_closed(&self, closed_at: gst::ClockTime) {
let mut state = self.state.lock().unwrap();
let location = match state.current_segment_location.take() {
Some(location) => location,
None => {
gst::error!(CAT, imp: self, "Unknown segment location");
return;
}
};
let opened_at = match state.fragment_opened_at.take() {
Some(opened_at) => opened_at,
None => {
gst::error!(CAT, imp: self, "Unknown segment duration");
return;
}
};
let duration = ((closed_at - opened_at).mseconds() as f32) / 1_000f32;
let running_time = state.fragment_running_time;
drop(state);
let obj = self.obj();
let base_imp = obj.upcast_ref::<HlsBaseSink>().imp();
let uri = base_imp.get_segment_uri(&location);
let _ = base_imp.add_segment(
&location,
running_time,
MediaSegment {
uri,
duration,
..Default::default()
},
);
}
}

View file

@ -1,63 +0,0 @@
// Copyright (C) 2021 Rafael Caricio <rafael@caricio.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(clippy::non_send_fields_in_send_ty, unused_doc_comments)]
use crate::HlsBaseSink;
/**
* plugin-hlssink3:
*
* Since: plugins-rs-0.8.0
*/
use gst::glib;
use gst::prelude::*;
mod imp;
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy, glib::Enum)]
#[repr(u32)]
#[enum_type(name = "GstHlsSink3PlaylistType")]
#[non_exhaustive]
pub enum HlsSink3PlaylistType {
#[enum_value(
name = "Unspecified: The tag `#EXT-X-PLAYLIST-TYPE` won't be present in the playlist during the pipeline processing.",
nick = "unspecified"
)]
Unspecified = 0,
#[enum_value(
name = "Event: No segments will be removed from the playlist. At the end of the processing, the tag `#EXT-X-ENDLIST` is added to the playlist. The tag `#EXT-X-PLAYLIST-TYPE:EVENT` will be present in the playlist.",
nick = "event"
)]
Event = 1,
#[enum_value(
name = "Vod: The playlist behaves like the `event` option (a live event), but at the end of the processing, the playlist will be set to `#EXT-X-PLAYLIST-TYPE:VOD`.",
nick = "vod"
)]
Vod = 2,
}
glib::wrapper! {
pub struct HlsSink3(ObjectSubclass<imp::HlsSink3>) @extends HlsBaseSink, gst::Bin, gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
#[cfg(feature = "doc")]
{
HlsSink3PlaylistType::static_type().mark_as_plugin_api(gst::PluginAPIFlags::empty());
}
gst::Element::register(
Some(plugin),
"hlssink3",
gst::Rank::NONE,
HlsSink3::static_type(),
)?;
Ok(())
}

1600
net/hlssink3/src/imp.rs Normal file

File diff suppressed because it is too large Load diff

View file

@ -13,25 +13,67 @@
* Since: plugins-rs-0.8.0
*/
use gst::glib;
use gst::prelude::*;
mod hlsbasesink;
pub mod hlscmafsink;
pub mod hlssink3;
mod imp;
mod playlist;
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Copy, glib::Enum)]
#[repr(u32)]
#[enum_type(name = "GstHlsSink3PlaylistType")]
#[non_exhaustive]
pub enum HlsSink3PlaylistType {
#[enum_value(
name = "Unspecified: The tag `#EXT-X-PLAYLIST-TYPE` won't be present in the playlist during the pipeline processing.",
nick = "unspecified"
)]
Unspecified = 0,
#[enum_value(
name = "Event: No segments will be removed from the playlist. At the end of the processing, the tag `#EXT-X-ENDLIST` is added to the playlist. The tag `#EXT-X-PLAYLIST-TYPE:EVENT` will be present in the playlist.",
nick = "event"
)]
Event = 1,
#[enum_value(
name = "Vod: The playlist behaves like the `event` option (a live event), but at the end of the processing, the playlist will be set to `#EXT-X-PLAYLIST-TYPE:VOD`.",
nick = "vod"
)]
Vod = 2,
}
glib::wrapper! {
pub struct HlsBaseSink(ObjectSubclass<hlsbasesink::HlsBaseSink>) @extends gst::Bin, gst::Element, gst::Object;
pub struct HlsBaseSink(ObjectSubclass<imp::HlsBaseSink>) @extends gst::Bin, gst::Element, gst::Object;
}
glib::wrapper! {
pub struct HlsSink3(ObjectSubclass<imp::HlsSink3>) @extends HlsBaseSink, gst::Bin, gst::Element, gst::Object;
}
glib::wrapper! {
pub struct HlsCmafSink(ObjectSubclass<imp::HlsCmafSink>) @extends HlsBaseSink, gst::Bin, gst::Element, gst::Object;
}
pub fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
#[cfg(feature = "doc")]
{
use gst::prelude::*;
HlsSink3PlaylistType::static_type().mark_as_plugin_api(gst::PluginAPIFlags::empty());
HlsBaseSink::static_type().mark_as_plugin_api(gst::PluginAPIFlags::empty());
}
hlssink3::register(plugin)?;
hlscmafsink::register(plugin)?;
gst::Element::register(
Some(plugin),
"hlssink3",
gst::Rank::NONE,
HlsSink3::static_type(),
)?;
gst::Element::register(
Some(plugin),
"hlscmafsink",
gst::Rank::NONE,
HlsCmafSink::static_type(),
)?;
Ok(())
}

View file

@ -59,7 +59,7 @@ impl Playlist {
while self.inner.segments.len() > max_playlist_length {
let to_remove = self.inner.segments.remove(0);
if self.inner.segments[0].map.is_none() {
self.inner.segments[0].map.clone_from(&to_remove.map)
self.inner.segments[0].map = to_remove.map.clone()
}
}
} else if self.inner.segments.len() > max_playlist_length {

View file

@ -8,7 +8,7 @@
use gio::prelude::*;
use gst::prelude::*;
use gsthlssink3::hlssink3::HlsSink3PlaylistType;
use gsthlssink3::HlsSink3PlaylistType;
use once_cell::sync::Lazy;
use std::io::Write;
use std::sync::{mpsc, Arc, Mutex};

View file

@ -1,7 +1,7 @@
GStreamer NDI Plugin
====================
*Compatible with NDI SDK 6.x and 5.x*
*Compatible with NDI SDK 5.x*
This is a plugin for the [GStreamer](https://gstreamer.freedesktop.org/)
multimedia framework that allows GStreamer to receive or send an
@ -16,8 +16,8 @@ sink element to provide an NDI source and a device provider for discovering
NDI sources on the network.
The plugin is loading the NDI SDK at runtime, either from the default library
path or, if set, from the directory given by the `NDI_RUNTIME_DIR_V6` or
`NDI_RUNTIME_DIR_V5` environment variables.
path or, if set, from the directory given by the `NDI_RUNTIME_DIR_V5`
environment variable.
Some examples of how to use these elements from the command line:

View file

@ -135,13 +135,13 @@ impl ElementImpl for NdiSink {
gst_video::VideoFormat::Rgbx.to_str(),
]),
)
.field("width", gst::IntRange::<i32>::new(1, i32::MAX))
.field("height", gst::IntRange::<i32>::new(1, i32::MAX))
.field("width", gst::IntRange::<i32>::new(1, std::i32::MAX))
.field("height", gst::IntRange::<i32>::new(1, std::i32::MAX))
.field(
"framerate",
gst::FractionRange::new(
gst::Fraction::new(0, 1),
gst::Fraction::new(i32::MAX, 1),
gst::Fraction::new(std::i32::MAX, 1),
),
)
.build(),

View file

@ -7,6 +7,7 @@ use gst_base::subclass::base_src::CreateSuccess;
use gst_base::subclass::prelude::*;
use std::sync::Mutex;
use std::u32;
use once_cell::sync::Lazy;

View file

@ -354,7 +354,7 @@ impl NdiSrcDemux {
gst::info!(CAT, imp: self, "Allowed audio caps {allowed_caps:?}");
state.audio_non_interleaved = allowed_caps
.is_some_and(|allowed_caps| allowed_caps.can_intersect(&caps));
.map_or(false, |allowed_caps| allowed_caps.can_intersect(&caps));
gst::info!(
CAT,

View file

@ -27,9 +27,7 @@ pub enum Buffer {
},
Metadata {
frame: MetadataFrame,
#[allow(unused)]
receive_time_gst: gst::ClockTime,
#[allow(unused)]
receive_time_real: gst::ClockTime,
},
}

View file

@ -8,7 +8,7 @@
clippy::missing_safety_doc
)]
use once_cell::sync::{Lazy, OnceCell};
use once_cell::sync::OnceCell;
#[cfg(unix)]
use libloading::os::unix::{Library, Symbol};
@ -16,15 +16,15 @@ use libloading::os::unix::{Library, Symbol};
use libloading::os::windows::{Library, Symbol};
#[cfg(all(target_arch = "x86_64", target_os = "windows"))]
const LIBRARY_NAMES: &[&str] = &["Processing.NDI.Lib.x64.dll"];
const LIBRARY_NAME: &str = "Processing.NDI.Lib.x64.dll";
#[cfg(all(target_arch = "x86", target_os = "windows"))]
const LIBRARY_NAMES: &[&str] = &["Processing.NDI.Lib.x86.dll"];
const LIBRARY_NAME: &str = "Processing.NDI.Lib.x86.dll";
#[cfg(target_os = "linux")]
const LIBRARY_NAMES: &[&str] = &["libndi.so.6", "libndi.so.5"];
const LIBRARY_NAME: &str = "libndi.so.5";
#[cfg(target_os = "macos")]
const LIBRARY_NAMES: &[&str] = &["libndi.dylib"];
const LIBRARY_NAME: &str = "libndi.dylib";
#[cfg(all(unix, not(any(target_os = "linux", target_os = "macos"))))]
const LIBRARY_NAMES: &[&str] = &["libndi.so"];
const LIBRARY_NAME: &str = "libndi.so";
#[allow(clippy::type_complexity)]
struct FFI {
@ -226,8 +226,8 @@ pub enum NDIlib_frame_format_type_e {
NDIlib_frame_format_type_field_1 = 3,
}
pub const NDIlib_send_timecode_synthesize: i64 = i64::MAX;
pub const NDIlib_recv_timestamp_undefined: i64 = i64::MAX;
pub const NDIlib_send_timecode_synthesize: i64 = ::std::i64::MAX;
pub const NDIlib_recv_timestamp_undefined: i64 = ::std::i64::MAX;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
@ -326,9 +326,6 @@ pub const NDIlib_compressed_packet_flags_keyframe: u32 = 1;
#[cfg(feature = "advanced-sdk")]
pub const NDIlib_compressed_packet_version_0: u32 = 44;
static CAT: Lazy<gst::DebugCategory> =
Lazy::new(|| gst::DebugCategory::new("ndi", gst::DebugColorFlags::empty(), Some("NewTek NDI")));
static FFI: OnceCell<FFI> = OnceCell::new();
pub fn load() -> Result<(), glib::BoolError> {
@ -338,47 +335,17 @@ pub fn load() -> Result<(), glib::BoolError> {
use std::env;
use std::path;
const ENV_VARS: &[&str] = &["NDI_RUNTIME_DIR_V6", "NDI_RUNTIME_DIR_V5", ""];
let library_directory = env::var_os("NDI_RUNTIME_DIR_V5");
let library_path = if let Some(library_directory) = library_directory {
let mut path = path::PathBuf::from(library_directory);
path.push(LIBRARY_NAME);
path
} else {
path::PathBuf::from(LIBRARY_NAME)
};
let mut library = None;
'outer_loop: for env_var in ENV_VARS {
let library_directory = if !env_var.is_empty() {
let Some(library_directory) = env::var_os(env_var) else {
continue;
};
Some(library_directory)
} else {
None
};
for library_name in LIBRARY_NAMES {
let library_path = if let Some(ref library_directory) = library_directory {
let mut path = path::PathBuf::from(library_directory);
path.push(library_name);
path
} else {
path::PathBuf::from(library_name)
};
match Library::new(&library_path) {
Ok(lib) => {
gst::log!(CAT, "Loaded NDI SDK from {}", library_path.display());
library = Some(lib);
break 'outer_loop;
}
Err(err) => {
gst::log!(
CAT,
"Failed loading NDI SDK from {}: {err}",
library_path.display()
);
continue;
}
}
}
}
let library = library.ok_or_else(|| glib::bool_error!("Failed loading NDI SDK"))?;
let library = Library::new(library_path)
.map_err(|err| glib::bool_error!("Failed to load NDI SDK: {}", err))?;
macro_rules! load_symbol {
($name:ident) => {{

View file

@ -153,14 +153,6 @@ impl Default for State {
}
}
impl Drop for State {
fn drop(&mut self) {
if let Some(clock_wait) = self.clock_wait.take() {
clock_wait.unschedule();
}
}
}
pub struct OnvifMetadataParse {
srcpad: gst::Pad,
sinkpad: gst::Pad,
@ -480,7 +472,7 @@ impl OnvifMetadataParse {
// configured latency has passed.
let queued_time = self.calculate_queued_time(&state);
if queued_time.is_some_and(|queued_time| queued_time >= state.configured_latency) {
if queued_time.map_or(false, |queued_time| queued_time >= state.configured_latency) {
gst::trace!(
CAT,
imp: self,
@ -632,7 +624,7 @@ impl OnvifMetadataParse {
let utc_time = *queued_frames.iter().next().unwrap().0;
// Check if this frame should still be drained
if drain_utc_time.is_some_and(|drain_utc_time| drain_utc_time < utc_time) {
if drain_utc_time.map_or(false, |drain_utc_time| drain_utc_time < utc_time) {
break;
}
@ -728,7 +720,7 @@ impl OnvifMetadataParse {
let diff = position.saturating_sub(frame_pts);
if settings
.max_lateness
.is_some_and(|max_lateness| diff > max_lateness)
.map_or(false, |max_lateness| diff > max_lateness)
{
gst::warning!(
CAT,
@ -1603,18 +1595,14 @@ impl ElementImpl for OnvifMetadataParse {
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
if matches!(transition, gst::StateChange::ReadyToPaused) {
if matches!(
transition,
gst::StateChange::PausedToReady | gst::StateChange::ReadyToPaused
) {
let mut state = self.state.lock().unwrap();
*state = State::default();
}
let res = self.parent_change_state(transition)?;
if matches!(transition, gst::StateChange::PausedToReady) {
let mut state = self.state.lock().unwrap();
*state = State::default();
}
Ok(res)
self.parent_change_state(transition)
}
}

View file

@ -1,56 +0,0 @@
[package]
name = "gst-plugin-quinn"
version.workspace = true
authors = ["Sanchayan Maity <sanchayan@asymptotic.io"]
repository.workspace = true
license = "MPL-2.0"
edition.workspace = true
description = "GStreamer Plugin for QUIC"
rust-version.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
gst.workspace = true
gst-base.workspace = true
once_cell.workspace = true
tokio = { version = "1.36.0", default-features = false, features = ["time", "rt-multi-thread"] }
futures = "0.3.30"
quinn = { version = "0.11", default-features = true, features = ["ring"]}
rustls = { version = "0.23", default-features = false, features = ["ring", "std"]}
rustls-pemfile = "2"
rustls-pki-types = "1"
rcgen = "0.13"
bytes = "1.5.0"
thiserror = "1"
[dev-dependencies]
gst-check = { workspace = true, features = ["v1_20"] }
serial_test = "3"
[lib]
name = "gstquinn"
crate-type = ["cdylib", "rlib"]
path = "src/lib.rs"
[build-dependencies]
gst-plugin-version-helper.workspace = true
[features]
static = []
capi = []
doc = []
[package.metadata.capi]
min_version = "0.9.21"
[package.metadata.capi.header]
enabled = false
[package.metadata.capi.library]
install_subdir = "gstreamer-1.0"
versioning = false
import_library = false
[package.metadata.capi.pkg_config]
requires_private = "gstreamer-1.0, gstreamer-base-1.0, gobject-2.0, glib-2.0"

Some files were not shown because too many files have changed in this diff Show more