Compare commits

..

13 commits

Author SHA1 Message Date
Sebastian Dröge
2f2aac55a3 Update version to 0.12.1 2024-02-13 13:02:27 +02:00
Sebastian Dröge
31dfcd0a78 Update CHANGELOG.md for 0.12.1 2024-02-13 13:01:46 +02:00
Sebastian Dröge
b3e233f0c5 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-13 12:37:23 +02:00
Sebastian Dröge
58a065caf3 textwrap: Remove unnecessary to_string() in debug output of a string
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-13 12:35:40 +02:00
Jordan Yelloz
606352d7cf webrtcsink: Added sinkpad with "msid" property
This forwards to the webrtcbin sinkpad's msid when specified.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-12 18:11:42 +02:00
Sebastian Dröge
aa2d056ea1 Update to async-tungstenite 0.25
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-12 18:11:31 +02:00
Sebastian Dröge
3f9d5cf2f0 gtk4: Create a window if running from gst-launch-1.0 or GST_GTK4_WINDOW=1 is set
Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/482

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-12 18:11:25 +02:00
Sebastian Dröge
149eff08b7 utils: Update for renamed clippy lint in 1.76
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1460>
2024-02-12 18:11:19 +02:00
Sebastian Dröge
c4e3fff2a2 Update Cargo.lock
Downgrade clap_derive to 4.4.7 to not require Rust 1.74 or newer.
2024-02-08 20:52:14 +02:00
Sebastian Dröge
16e001e3f2 Update dependency versions for gtk-rs-core / gtk4-rs / gstreamer-rs and local crates 2024-02-08 19:40:08 +02:00
Sebastian Dröge
af694e8bc1 ci: Use 0.22 branch of gstreamer-rs images templates 2024-02-08 19:35:05 +02:00
Sebastian Dröge
66f2969eb9 Update Cargo.lock 2024-02-08 19:33:32 +02:00
Sebastian Dröge
50efdf6a64 Update version to 0.12.0 2024-02-08 19:33:09 +02:00
515 changed files with 19020 additions and 84186 deletions

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
Cargo.lock
target
*~
*.bk

View file

@ -1,4 +1,4 @@
.templates_sha: &templates_sha 6a40df92957c8ce9ee741aaccc5daaaf70545b1e
.templates_sha: &templates_sha fddab8aa63e89a8e65214f59860d9c0f030360c9
include:
- project: 'freedesktop/ci-templates'
@ -6,7 +6,7 @@ include:
file: '/templates/debian.yml'
- project: 'gstreamer/gstreamer-rs'
ref: main
ref: '0.22'
file: '/ci/images_template.yml'
- project: 'gstreamer/gstreamer'
@ -20,11 +20,9 @@ variables:
# to ensure that we are testing against the same thing as GStreamer itself.
# The tag name is included above from the main repo.
GSTREAMER_DOC_IMAGE: "registry.freedesktop.org/gstreamer/gstreamer/amd64/fedora:$FEDORA_TAG-main"
# Use the gstreamer image to trigger the cerbero job, same as the monorepo
CERBERO_TRIGGER_IMAGE: "registry.freedesktop.org/gstreamer/gstreamer/amd64/fedora:$FEDORA_TAG-main"
WINDOWS_BASE: "registry.freedesktop.org/gstreamer/gstreamer-rs/windows"
WINDOWS_RUST_MINIMUM_IMAGE: "$WINDOWS_BASE:$GST_RS_IMG_WINDOWS_TAG-main-$GST_RS_MSRV"
WINDOWS_RUST_STABLE_IMAGE: "$WINDOWS_BASE:$GST_RS_IMG_WINDOWS_TAG-main-$GST_RS_STABLE"
WINDOWS_RUST_MINIMUM_IMAGE: "$WINDOWS_BASE:$GST_RS_IMG_TAG-main-$GST_RS_MSRV"
WINDOWS_RUST_STABLE_IMAGE: "$WINDOWS_BASE:$GST_RS_IMG_TAG-main-$GST_RS_STABLE"
workflow:
rules:
@ -38,14 +36,6 @@ workflow:
default:
interruptible: true
# Auto-retry jobs in case of infra failures
retry:
max: 1
when:
- 'runner_system_failure'
- 'stuck_or_timeout_failure'
- 'scheduler_failure'
- 'api_failure'
stages:
- "trigger"
@ -60,7 +50,6 @@ trigger:
stage: 'trigger'
variables:
GIT_STRATEGY: none
tags: [ 'placeholder-job' ]
script:
- echo "Trigger job done, now running the pipeline."
rules:
@ -83,7 +72,7 @@ trigger:
- rm -rf target
before_script:
- source ./ci/env.sh
- mkdir .cargo && echo -e "[net]\ngit-fetch-with-cli = true" > .cargo/config.toml
- mkdir .cargo && echo -e "[net]\ngit-fetch-with-cli = true" > .cargo/config
.debian:12-stable:
extends: .debian:12
@ -105,14 +94,13 @@ trigger:
RUST_BACKTRACE: 'full'
script:
- rustc --version
- CARGO_FLAGS="-j${FDO_CI_CONCURRENT:-$(nproc)} --locked --color=always --all --all-targets"
- cargo build $CARGO_FLAGS
- RUST_BACKTRACE=1 G_DEBUG=fatal_warnings cargo test $CARGO_FLAGS
- cargo build $CARGO_FLAGS --all-features --exclude gst-plugin-gtk4
- RUST_BACKTRACE=1 G_DEBUG=fatal_warnings cargo test $CARGO_FLAGS --all-features --exclude gst-plugin-gtk4
- cargo build $CARGO_FLAGS --no-default-features
- RUST_BACKTRACE=1 G_DEBUG=fatal_warnings cargo test $CARGO_FLAGS --no-default-features
- cargo build --locked --color=always --workspace --all-targets
- G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets
- cargo build --locked --color=always --workspace --all-targets --all-features --exclude gst-plugin-gtk4
- G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets --all-features --exclude gst-plugin-gtk4
- cargo build --locked --color=always --workspace --all-targets --no-default-features
- G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets --no-default-features
test msrv:
extends:
@ -273,7 +261,6 @@ documentation:
- 'docker'
- 'windows'
- '2022'
- "gstreamer-windows"
script:
# Set the code page to UTF-8
- chcp 65001
@ -300,7 +287,6 @@ test windows stable:
rustfmt:
extends: '.debian:12-stable'
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- cargo fmt --version
@ -309,7 +295,6 @@ rustfmt:
typos:
extends: '.debian:12-stable'
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- typos
@ -317,7 +302,6 @@ typos:
gstwebrtc-api lint:
image: node:lts
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- cd net/webrtc/gstwebrtc-api
@ -327,12 +311,10 @@ gstwebrtc-api lint:
check commits:
extends: '.debian:12-stable'
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- ci-fairy check-commits --textwidth 0 --no-signed-off-by
- ci/check-for-symlinks.sh
- ci/check-meson-version.sh
clippy:
extends: '.debian:12-stable'
@ -344,10 +326,9 @@ clippy:
# csound-sys only looks at /usr/lib and /usr/local top levels
CSOUND_LIB_DIR: '/usr/lib/x86_64-linux-gnu/'
script:
- CARGO_FLAGS="-j${FDO_CI_CONCURRENT:-$(nproc)} --locked --color=always --all --all-targets"
- cargo clippy $CARGO_FLAGS -- -D warnings -A unknown-lints
- cargo clippy $CARGO_FLAGS --all-features --exclude gst-plugin-gtk4 -- -D warnings -A unknown-lints
- cargo clippy $CARGO_FLAGS --no-default-features -- -D warnings -A unknown-lints
- cargo clippy --locked --color=always --all --all-targets -- -D warnings -A unknown-lints
- cargo clippy --locked --color=always --all --all-features --all-targets --exclude gst-plugin-gtk4 -- -D warnings -A unknown-lints
- cargo clippy --locked --color=always --all --all-targets --no-default-features -- -D warnings -A unknown-lints
deny:
extends: .debian:12-stable
@ -372,9 +353,7 @@ outdated:
- if: '$CI_PIPELINE_SOURCE == "schedule"'
script:
- cargo update --color=always
# Ignore bitstream-io until we can update MSRV to 1.80
# Ignore test-with until we can update MSRV to 1.77
- cargo outdated --color=always --root-deps-only --exit-code 1 -v -i bitstream-io -i test-with
- cargo outdated --color=always --root-deps-only --exit-code 1 -v
coverage:
allow_failure: true
@ -390,53 +369,18 @@ coverage:
# csound-sys only looks at /usr/lib and /usr/local top levels
CSOUND_LIB_DIR: '/usr/lib/x86_64-linux-gnu/'
script:
- CARGO_FLAGS="-j${FDO_CI_CONCURRENT:-$(nproc)} --locked --color=always --all"
- cargo test $CARGO_FLAGS --all-features --exclude gst-plugin-gtk4
- cargo test --locked --color=always --all --all-features --exclude gst-plugin-gtk4
# generate html report
- mkdir -p coverage
- grcov . --binary-path ./target/debug/ -s . -t html,cobertura --branch --ignore-not-existing --ignore "*target*" --ignore "*/build.rs" -o ./coverage/
- grcov . --binary-path ./target/debug/ -s . -t html --branch --ignore-not-existing --ignore "*target*" --ignore "*/build.rs" -o ./coverage/
# generate cobertura report for gitlab integration
- grcov . --binary-path ./target/debug/ -s . -t cobertura --branch --ignore-not-existing --ignore "*target*" --ignore "*/build.rs" -o coverage.xml
# output coverage summary for gitlab parsing.
# TODO: use grcov once https://github.com/mozilla/grcov/issues/556 is fixed
- grep "%" coverage/html/index.html | head -1 || true
- grep "%" coverage/index.html | head -1 || true
artifacts:
paths:
- 'coverage'
reports:
coverage_report:
coverage_format: cobertura
path: "coverage/cobertura.xml"
cerbero trigger:
image: $CERBERO_TRIGGER_IMAGE
needs: [ "trigger" ]
timeout: '4h'
tags:
- placeholder-job
variables:
# We will build this cerbero branch in the cerbero trigger CI
CERBERO_UPSTREAM_BRANCH: 'main'
script:
- ci/cerbero/trigger_cerbero_pipeline.py
rules:
# Never run post merge
- if: '$CI_PROJECT_NAMESPACE == "gstreamer"'
when: never
# Don't run if the only changes are files that cargo-c does not read
- if:
changes:
- "CHANGELOG.md"
- "README.md"
- "deny.toml"
- "rustfmt.toml"
- "typos.toml"
- "*.py"
- "*.sh"
- "Makefile"
- "meson.build"
- "meson_options.txt"
- "**/meson.build"
- "ci/*.sh"
- "ci/*.py"
when: never
- when: always
path: coverage.xml

View file

@ -5,192 +5,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html),
specifically the [variant used by Rust](http://doc.crates.io/manifest.html#the-version-field).
## [0.13.1] - 2024-08-27
### Fixed
- transcriberbin: Fix gst-inspect with missing elements.
- gtk4paintablesink: Move dmabuf cfg to the correct bracket level.
- webrtcsrc: Don't hold the state lock while removing sessions.
- rtpbasepay: Various fixes to payloader base class.
- webrtcsink: Fix various assertions when finalizing.
- webrtcsrc: Make sure to always call end_session() without state lock.
- mpegtslivesrc: Handle PCR discontinuities as errors.
- ndisrc: Calculate timestamps for metadata buffers too.
- Various new clippy warnings.
- webrtcsink: Fix segment format mismatch when using a remote offer.
- awstranscriber: Fix sanity check in transcribe loop.
- whepsrc: Fix incorrect default caps.
### Changed
- gtk4paintablesink: Enable `gtk::GraphicsOffload::black-background` when
building with GTK 4.16 or newer.
- gstwebrtc-api: Always include index file in dist for convenience.
- rtpbasepay: Negotiate SSRC/PT with downstream via caps for backwards
compatibility.
- hlssink3: Use more accurate fragment duration from splitmuxsink if
available.
### Added
- gtk4paintablesink: Add `window-width` and `window-height` properties.
- gtk4paintablesink: Add custom widget for automatically updating window size.
- fmp4mux / mp4mux: Add image orientation tag support.
- webrtcsink: Add nvv4l2av1enc support.
- cmafmux: Add Opus support.
## [0.13.0] - 2024-07-16
### Added
- rtp: New RTP payloader and depayloader base classes, in addition to new
payloader and depayloaders for: PCMA, PCMU, AC-3, AV1 (ported to the new
base classes), MPEG-TS, VP8, VP9, MP4A, MP4G, JPEG, Opus, KLV.
- originalbuffer: New pair of elements that allows to save a buffer, perform
transformations on it and then restore the original buffer but keeping any
new analytics and other metadata on it.
- gopbuffer: New element for buffering an entire group-of-pictures.
- tttocea708: New element for converting timed text to CEA-708 closed captions.
- cea708mux: New element for muxing multiple CEA-708 services together.
- transcriberbin: Add support for generating CEA-708 closed captions and
CEA-608-in-708.
- cea708overlay: New overlay element for CEA-708 and CEA-608 closed captions.
- dav1ddec: Signal colorimetry in the caps.
- webrtc: Add support for RFC7273 clock signalling and synchronization to
webrtcsrc and webrtcsink.
- tracers: Add a new pad push durations tracer.
- transcriberbin: Add support for a secondary audio stream.
- quinn: New plugin with a QUIC source and sink element.
- rtpgccbwe: New mode based on linear regression instead of a kalman filter.
- rtp: New rtpsend and rtprecv elements that provide a new implementation of
the rtpbin element with a separate send and receive side.
- rtpsrc2: Add support for new rtpsend / rtprecv elements instead of rtpbin.
- webrtcsrc: Add multi-producer support.
- livesync: Add sync property for enabling/disabling syncing of the output
buffers to the clock.
- mpegtslivesrc: New element for receiving an MPEG-TS stream, e.g. over SRT or
UDP, and exposing the remote PCR clock as a local GStreamer clock.
- gtk4paintablesink: Add support for rotations / flipping.
- gtk4paintablesink: Add support for RGBx formats in non-GL mode.
### Fixed
- livesync: Queue up to latency buffers instead of requiring a queue of the
same size in front of livesync.
- livesync: Synchronize the first buffer to the clock too.
- livesync: Use correct duration for deciding whether a filler has to be
inserted or not.
- audioloudnorm: Fix possible off-by-one in the limiter when handling the very
last buffer.
- webrtcsink: Fix property types for rav1enc.
### Changed
- sccparse, mccparse: Port from nom to winnow.
- uriplaylistbin: Rely on uridecodebin3 gapless logic instead of
re-implementing it.
- webrtc: Refactor of JavaScript API.
- janusvrwebrtcsink: New use-string-ids property to distinguish between
integer and string room IDs, instead of always using strings and guessing
what the server expects.
- janusvrwebrtcsink: Handle more events and expose some via signals.
- dav1ddec: Require dav1d 1.3.0.
- closedcaption: Drop libcaption C code and switch to a pure Rust
implementation.
## [0.12.7] - 2024-06-19
### Fixed
- aws, spotifyaudiosrc, reqwesthttpsrc, webrtchttp: Fix race condition when unlocking
- rtp: Allow any payload type for the AV1 RTP payloader/depayloader
- rtp: Various fixes to the AV1 RTP payloader/depayloader to work correctly
with Chrome and Pion
- meson: Various fixes to the meson-based build system around cargo
- webrtcsink: Use correct property names for configuring `av1enc`
- webrtcsink: Avoid lock poisoning when setting encoder properties
### Added
- ndi: Support for NDI SDK v6
- webrtcsink: Support for AV1 via `nvav1enc`, `av1enc` or `rav1enc`
### Changed
- Update to async-tungstenite 0.26
## [0.12.6] - 2024-05-23
### Fixed
- Various Rust 1.78 clippy warnings.
- gtk4paintablesink: Fix plugin description.
### Added
- fmp4mux / mp4mux: Add support for adding AV1 header OBUs into the MP4
headers.
- fmp4mux / mp4mux: Take track language from the tags if provided.
- gtk4paintablesink: Add GST_GTK4_WINDOW_FULLSCREEN environment variable to
create a fullscreen window for debugging purposes.
- gtk4paintablesink: Also create a window automatically when called from
gst-play-1.0.
- webrtc: Add support for insecure TLS connections.
- webrtcsink: Add VP9 parser after the encoder.
### Changed
- webrtcsink: Improve error when no discovery pipeline runs.
- rtpgccbwe: Improve debug output in various places.
## [0.12.5] - 2024-04-29
### Fixed
- hrtfrender: Use a bitmask instead of an int in the caps for the channel-mask.
- rtpgccbwe: Don't log an error when pushing a buffer list fails while stopping.
- webrtcsink: Don't panic in bitrate handling with unsupported encoders.
- webrtcsink: Don't panic if unsupported input caps are used.
- webrtcsrc: Allow a `None` producer-id in `request-encoded-filter` signal.
### Added
- aws: New property to support path-style addressing.
- fmp4mux / mp4mux: Support FLAC instead (f)MP4.
- gtk4: Support directly importing dmabufs with GTK 4.14.
- gtk4: Add force-aspect-ratio property similar to other video sinks.
## [0.12.4] - 2024-04-08
### Fixed
- aws: Use fixed behaviour version to ensure that updates to the AWS SDK don't
change any defaults configurations in unexpected ways.
- onvifmetadataparse: Fix possible deadlock on shutdown.
- webrtcsink: Set `perfect-timestamp=true` on audio encoders to work around
bugs in Chrome's audio decoders.
- Various clippy warnings.
### Changed
- reqwest: Update to reqwest 0.12.
- webrtchttp: Update to reqwest 0.12.
## [0.12.3] - 2024-03-21
### Fixed
- gtk4paintablesink: Fix scaling of texture position.
- janusvrwebrtcsink: Handle 64 bit numerical room ids.
- janusvrwebrtcsink: Don't include deprecated audio/video fields in publish
messages.
- janusvrwebrtcsink: Handle various other messages to avoid printing errors.
- livekitwebrtc: Fix shutdown behaviour.
- rtpgccbwe: Don't forward buffer lists with buffers from different SSRCs to
avoid breaking assumptions in rtpsession.
- sccparse: Ignore invalid timecodes during seeking.
- webrtcsink: Don't try parsing audio caps as video caps.
### Changed
- webrtc: Allow resolution and framerate changes.
- webrtcsrc: Make producer-peer-id optional.
### Added
- livekitwebrtcsrc: Add new LiveKit source element.
- regex: Add support for configuring regex behaviour.
- spotifyaudiosrc: Document how to use with non-Facebook accounts.
- webrtcsrc: Add `do-retransmission` property.
## [0.12.2] - 2024-02-26
### Fixed
- rtpgccbwe: Don't reset PTS/DTS to `None` as otherwise `rtpsession` won't be
able to generate valid RTCP.
- webrtcsink: Fix usage with 1.22.
### Added
- janusvrwebrtcsink: Add `secret-key` property.
- janusvrwebrtcsink: Allow for string room ids and add `string-ids` property.
- textwrap: Don't split on all whitespaces, especially not on non-breaking
whitespace.
## [0.12.1] - 2024-02-13
### Added
- gtk4: Create a window for testing purposes when running in `gst-launch-1.0`
@ -228,6 +42,7 @@ specifically the [variant used by Rust](http://doc.crates.io/manifest.html#the-v
- New `janusvrwebrtcsink` element for the Janus VideoRoom API.
- New `rtspsrc2` element.
- New `whipserversrc` element.
- gtk4: New `background-color` property for setting the color of the
background of the frame and the borders, if any.
- gtk4: New `scale-filter` property for defining how to scale the frames.
@ -535,15 +350,7 @@ specifically the [variant used by Rust](http://doc.crates.io/manifest.html#the-v
- webrtcsink: Make the `turn-server` property a `turn-servers` list
- webrtcsink: Move from async-std to tokio
[Unreleased]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.13.1...HEAD
[0.13.1]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.13.0...0.13.1
[0.13.0]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.7...0.13.0
[0.12.7]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.6...0.12.7
[0.12.6]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.5...0.12.6
[0.12.5]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.4...0.12.5
[0.12.4]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.3...0.12.4
[0.12.3]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.2...0.12.3
[0.12.2]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.1...0.12.2
[Unreleased]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.1...HEAD
[0.12.1]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.0...0.12.1
[0.12.0]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.11.3...0.12.0
[0.11.3]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.11.2...0.11.3

3070
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -9,15 +9,12 @@ members = [
"audio/claxon",
"audio/csound",
"audio/lewton",
"audio/speechmatics",
"audio/spotify",
"generic/file",
"generic/originalbuffer",
"generic/sodium",
"generic/threadshare",
"generic/inter",
"generic/gopbuffer",
"mux/flavors",
"mux/fmp4",
@ -25,7 +22,6 @@ members = [
"net/aws",
"net/hlssink3",
"net/mpegtslive",
"net/ndi",
"net/onvif",
"net/raptorq",
@ -36,7 +32,6 @@ members = [
"net/webrtc",
"net/webrtc/protocol",
"net/webrtc/signalling",
"net/quinn",
"text/ahead",
"text/json",
@ -70,16 +65,13 @@ default-members = [
"audio/claxon",
"audio/lewton",
"generic/originalbuffer",
"generic/threadshare",
"generic/inter",
"generic/gopbuffer",
"mux/fmp4",
"mux/mp4",
"net/aws",
"net/mpegtslive",
"net/hlssink3",
"net/onvif",
"net/raptorq",
@ -91,7 +83,6 @@ default-members = [
"net/webrtc/protocol",
"net/webrtc/signalling",
"net/ndi",
"net/quinn",
"text/ahead",
"text/json",
@ -120,40 +111,38 @@ panic = 'unwind'
[profile.dev]
opt-level = 1
lto = "off"
[workspace.package]
version = "0.14.0-alpha.1"
version = "0.12.1"
repository = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs"
edition = "2021"
rust-version = "1.71"
rust-version = "1.70"
[workspace.dependencies]
once_cell = "1"
glib = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
gio = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
cairo-rs = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master", features=["use_glib"] }
pango = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
pangocairo = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
gtk = { package = "gtk4", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master", features = ["v4_6"]}
gdk-wayland = { package = "gdk4-wayland", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master", features = ["v4_4"]}
gdk-x11 = { package = "gdk4-x11", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master", features = ["v4_4"]}
gdk-win32 = { package = "gdk4-win32", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master", features = ["v4_4"]}
gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-allocators = { package = "gstreamer-allocators", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-app = { package = "gstreamer-app", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-audio = { package = "gstreamer-audio", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-base = { package = "gstreamer-base", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-check = { package = "gstreamer-check", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-gl = { package = "gstreamer-gl", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-gl-egl = { package = "gstreamer-gl-egl", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-gl-wayland = { package = "gstreamer-gl-wayland", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-gl-x11 = { package = "gstreamer-gl-x11", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-net = { package = "gstreamer-net", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-pbutils = { package = "gstreamer-pbutils", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-plugin-version-helper = { path="./version-helper" }
gst-rtp = { package = "gstreamer-rtp", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-sdp = { package = "gstreamer-sdp", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-utils = { package = "gstreamer-utils", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-video = { package = "gstreamer-video", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-webrtc = { package = "gstreamer-webrtc", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
glib = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "0.19", version = "0.19" }
gio = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "0.19", version = "0.19" }
cairo-rs = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "0.19", version = "0.19", features=["use_glib"] }
pango = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "0.19", version = "0.19" }
pangocairo = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "0.19", version = "0.19" }
gtk = { package = "gtk4", git = "https://github.com/gtk-rs/gtk4-rs", branch = "0.8", version = "0.8"}
gdk-wayland = { package = "gdk4-wayland", git = "https://github.com/gtk-rs/gtk4-rs", branch = "0.8", version = "0.8"}
gdk-x11 = { package = "gdk4-x11", git = "https://github.com/gtk-rs/gtk4-rs", branch = "0.8", version = "0.8"}
gdk-win32 = { package = "gdk4-win32", git = "https://github.com/gtk-rs/gtk4-rs", branch = "0.8", version = "0.8"}
gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-app = { package = "gstreamer-app", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-audio = { package = "gstreamer-audio", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-base = { package = "gstreamer-base", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-check = { package = "gstreamer-check", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-gl = { package = "gstreamer-gl", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-gl-egl = { package = "gstreamer-gl-egl", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-gl-wayland = { package = "gstreamer-gl-wayland", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-gl-x11 = { package = "gstreamer-gl-x11", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-net = { package = "gstreamer-net", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-pbutils = { package = "gstreamer-pbutils", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-plugin-version-helper = { path="./version-helper", version = "0.8" }
gst-rtp = { package = "gstreamer-rtp", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-sdp = { package = "gstreamer-sdp", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-utils = { package = "gstreamer-utils", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-video = { package = "gstreamer-video", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }
gst-webrtc = { package = "gstreamer-webrtc", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "0.22", version = "0.22" }

View file

@ -23,7 +23,6 @@ You will find the following plugins in this repository:
- `aws`: Various elements for Amazon AWS services using the [AWS SDK](https://awslabs.github.io/aws-sdk-rust/) library
- `s3src`/`s3sink`: A source and sink element to talk to the Amazon S3 object storage system.
- `s3putobjectsink`: A sink element to talk to Amazon S3. Uses `PutObject` instead of multi-part upload like `s3sink`.
- `s3hlssink`: A sink element to store HLS streams on Amazon S3.
- `awstranscriber`: an element wrapping the AWS Transcriber service.
- `awstranscribeparse`: an element parsing the packets of the AWS Transcriber service.
@ -34,9 +33,6 @@ You will find the following plugins in this repository:
- `onvif`: Various elements for parsing, RTP (de)payloading, overlaying of ONVIF timed metadata.
- `quinn`: Transfer data over the network using QUIC
- `quinnquicsink`/`quinnquicsrc`: Send and receive data using QUIC
- `raptorq`: Encoder/decoder element for RaptorQ RTP FEC mechanism.
- `reqwest`: An HTTP source element based on the [reqwest](https://github.com/seanmonstar/reqwest) library.

View file

@ -11,8 +11,8 @@ use gst::prelude::*;
use gst::subclass::prelude::*;
use gst_audio::subclass::prelude::*;
use std::cmp;
use std::sync::Mutex;
use std::{cmp, u64};
use byte_slice_cast::*;

View file

@ -18,6 +18,7 @@ use gst::subclass::prelude::*;
use std::mem;
use std::sync::Mutex;
use std::u64;
use byte_slice_cast::*;
@ -263,7 +264,7 @@ impl State {
// Drains everything
fn drain(&mut self, imp: &AudioLoudNorm) -> Result<gst::Buffer, gst::FlowError> {
gst::debug!(CAT, imp = imp, "Draining");
gst::debug!(CAT, imp: imp, "Draining");
let (pts, distance) = self.adapter.prev_pts();
let distance_samples = distance / self.info.bpf() as u64;
@ -298,7 +299,7 @@ impl State {
self.frame_type = FrameType::Final;
} else if src.is_empty() {
// Nothing to drain at all
gst::debug!(CAT, imp = imp, "No data to drain");
gst::debug!(CAT, imp: imp, "No data to drain");
return Err(gst::FlowError::Eos);
}
@ -341,7 +342,7 @@ impl State {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Calculated global loudness for first frame {} with peak {}",
global,
true_peak
@ -395,7 +396,7 @@ impl State {
self.prev_delta = self.delta[self.index];
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Initializing for first frame with gain adjustment of {}",
self.prev_delta
);
@ -457,7 +458,7 @@ impl State {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Applying gain adjustment {}-{}",
gain,
gain_next
@ -531,7 +532,7 @@ impl State {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Calculated global loudness {}, short term loudness {} and relative threshold {}",
global,
shortterm,
@ -554,7 +555,7 @@ impl State {
self.above_threshold = true;
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Above threshold now ({} >= {}, {} > -70)",
shortterm_out,
self.target_i,
@ -582,7 +583,7 @@ impl State {
self.prev_delta = self.delta[self.index];
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Calculated new gain adjustment {}",
self.prev_delta
);
@ -753,7 +754,7 @@ impl State {
// amount of samples the last frame is short to reach the correct read position.
if next_frame_size < FRAME_SIZE {
self.limiter_buf_index += FRAME_SIZE - next_frame_size;
if self.limiter_buf_index >= self.limiter_buf.len() {
if self.limiter_buf_index > self.limiter_buf.len() {
self.limiter_buf_index -= self.limiter_buf.len();
}
}
@ -776,7 +777,7 @@ impl State {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Applying linear gain adjustment of {}",
self.offset
);
@ -855,7 +856,7 @@ impl State {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Found peak {} at sample {}, going to attack state at sample {} (gain reduction {}-{})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -992,7 +993,7 @@ impl State {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Found new peak {} at sample {}, restarting attack state at sample {} (gain reduction {}-{})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1041,7 +1042,7 @@ impl State {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Found new peak {} at sample {}, adjusting attack state at sample {} (gain reduction {}-{})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1056,7 +1057,7 @@ impl State {
// to ensure that we at least sustain it for that long afterwards.
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Found new low peak {} at sample {} in attack state at sample {}",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1072,7 +1073,7 @@ impl State {
// If we reached the target gain reduction, go into sustain state.
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Going to sustain state at sample {} (gain reduction {})",
smp_cnt,
self.gain_reduction[1]
@ -1151,7 +1152,7 @@ impl State {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Found new peak {} at sample {}, going back to attack state at sample {} (gain reduction {}-{})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1162,7 +1163,7 @@ impl State {
} else {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Found new peak {} at sample {}, going sustain further at sample {} (gain reduction {})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1189,7 +1190,7 @@ impl State {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Going to release state for sample {} at sample {} (gain reduction {}-1.0)",
smp_cnt + LIMITER_RELEASE_WINDOW,
smp_cnt,
@ -1259,7 +1260,7 @@ impl State {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Found new peak {} at sample {}, going back to attack state at sample {} (gain reduction {}-{})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1271,7 +1272,7 @@ impl State {
self.gain_reduction[1] = current_gain_reduction;
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Going from release to sustain state at sample {} because of low peak {} at sample {} (gain reduction {})",
smp_cnt,
peak_value,
@ -1312,7 +1313,7 @@ impl State {
self.limiter_state = LimiterState::Out;
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Leaving release state and going to out state at sample {}",
smp_cnt,
);
@ -1350,7 +1351,7 @@ impl State {
self.gain_reduction[1] = self.target_tp / max;
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Reducing gain for start of first frame by {} ({} > {}) and going to sustain state",
self.gain_reduction[1],
max,
@ -1366,7 +1367,7 @@ impl State {
let channels = self.info.channels() as usize;
let nb_samples = dst.len() / channels;
gst::debug!(CAT, imp = imp, "Running limiter for {} samples", nb_samples);
gst::debug!(CAT, imp: imp, "Running limiter for {} samples", nb_samples);
// For the first frame we can't adjust the gain before it smoothly anymore so instead
// apply the gain reduction immediately if we get above the threshold and move to sustain
@ -1535,12 +1536,12 @@ impl AudioLoudNorm {
_pad: &gst::Pad,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::log!(CAT, imp = self, "Handling buffer {:?}", buffer);
gst::log!(CAT, imp: self, "Handling buffer {:?}", buffer);
let mut state_guard = self.state.borrow_mut();
let state = match *state_guard {
None => {
gst::error!(CAT, imp = self, "Not negotiated yet");
gst::error!(CAT, imp: self, "Not negotiated yet");
return Err(gst::FlowError::NotNegotiated);
}
Some(ref mut state) => state,
@ -1548,7 +1549,7 @@ impl AudioLoudNorm {
let mut outbufs = vec![];
if buffer.flags().contains(gst::BufferFlags::DISCONT) {
gst::debug!(CAT, imp = self, "Draining on discontinuity");
gst::debug!(CAT, imp: self, "Draining on discontinuity");
match state.drain(self) {
Ok(outbuf) => {
outbufs.push(outbuf);
@ -1566,7 +1567,7 @@ impl AudioLoudNorm {
drop(state_guard);
for buffer in outbufs {
gst::log!(CAT, imp = self, "Outputting buffer {:?}", buffer);
gst::log!(CAT, imp: self, "Outputting buffer {:?}", buffer);
self.srcpad.push(buffer)?;
}
@ -1576,17 +1577,17 @@ impl AudioLoudNorm {
fn sink_event(&self, pad: &gst::Pad, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj = pad, "Handling event {:?}", event);
gst::log!(CAT, obj: pad, "Handling event {:?}", event);
match event.view() {
EventView::Caps(c) => {
let caps = c.caps();
gst::info!(CAT, obj = pad, "Got caps {:?}", caps);
gst::info!(CAT, obj: pad, "Got caps {:?}", caps);
let info = match gst_audio::AudioInfo::from_caps(caps) {
Ok(info) => info,
Err(_) => {
gst::error!(CAT, obj = pad, "Failed to parse caps");
gst::error!(CAT, obj: pad, "Failed to parse caps");
return false;
}
};
@ -1604,9 +1605,9 @@ impl AudioLoudNorm {
drop(state);
if let Some(outbuf) = outbuf {
gst::log!(CAT, imp = self, "Outputting buffer {:?}", outbuf);
gst::log!(CAT, imp: self, "Outputting buffer {:?}", outbuf);
if let Err(err) = self.srcpad.push(outbuf) {
gst::error!(CAT, imp = self, "Failed to push drained data: {}", err);
gst::error!(CAT, imp: self, "Failed to push drained data: {}", err);
return false;
}
@ -1626,11 +1627,11 @@ impl AudioLoudNorm {
drop(state);
if let Some(outbuf) = outbuf {
gst::log!(CAT, imp = self, "Outputting buffer {:?}", outbuf);
gst::log!(CAT, imp: self, "Outputting buffer {:?}", outbuf);
if let Err(err) = self.srcpad.push(outbuf) {
gst::error!(
CAT,
imp = self,
imp: self,
"Failed to push drained data on EOS: {}",
err
);
@ -1660,7 +1661,7 @@ impl AudioLoudNorm {
fn src_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
use gst::QueryViewMut;
gst::log!(CAT, obj = pad, "Handling query {:?}", query);
gst::log!(CAT, obj: pad, "Handling query {:?}", query);
match query.view_mut() {
QueryViewMut::Latency(q) => {
let mut peer_query = gst::query::Latency::new();

View file

@ -113,7 +113,7 @@ impl AudioRNNoise {
let settings = *self.settings.lock().unwrap();
let mut buffer = gst::Buffer::with_size(available).map_err(|e| {
gst::error!(CAT, imp = self, "Failed to allocate buffer at EOS {:?}", e);
gst::error!(CAT, imp: self, "Failed to allocate buffer at EOS {:?}", e);
gst::FlowError::Flushing
})?;
@ -214,7 +214,7 @@ impl AudioRNNoise {
);
}
gst::trace!(CAT, imp = self, "Voice activity: {}", vad);
gst::trace!(CAT, imp: self, "Voice activity: {}", vad);
if vad < settings.vad_threshold {
out_frame.fill(0.0);
} else {
@ -237,9 +237,8 @@ impl AudioRNNoise {
gst::trace!(
CAT,
imp = self,
"rms: {}, level: {}, has_voice : {} ",
rms,
imp: self,
"rms: {}, level: {}, has_voice : {} ", rms,
level,
has_voice
);
@ -346,7 +345,7 @@ impl BaseTransformImpl for AudioRNNoise {
use gst::EventView;
if let EventView::Eos(_) = event.view() {
gst::debug!(CAT, imp = self, "Handling EOS");
gst::debug!(CAT, imp: self, "Handling EOS");
if self.drain().is_err() {
return false;
}
@ -362,7 +361,7 @@ impl BaseTransformImpl for AudioRNNoise {
let (live, mut min, mut max) = upstream_query.result();
gst::debug!(
CAT,
imp = self,
imp: self,
"Peer latency: live {} min {} max {}",
live,
min,
@ -407,7 +406,7 @@ impl AudioFilterImpl for AudioRNNoise {
})?;
}
gst::debug!(CAT, imp = self, "Set caps to {:?}", info);
gst::debug!(CAT, imp: self, "Set caps to {:?}", info);
let mut denoisers = vec![];
for _i in 0..info.channels() {

View file

@ -12,6 +12,7 @@ use gst::subclass::prelude::*;
use gst_audio::subclass::prelude::*;
use gst_base::prelude::*;
use std::i32;
use std::sync::atomic;
use std::sync::Mutex;
@ -129,7 +130,7 @@ impl ObjectImpl for EbuR128Level {
let this = args[0].get::<super::EbuR128Level>().unwrap();
let imp = this.imp();
gst::info!(CAT, obj = this, "Resetting measurements",);
gst::info!(CAT, obj: this, "Resetting measurements",);
imp.reset.store(true, atomic::Ordering::SeqCst);
None
@ -175,7 +176,7 @@ impl ObjectImpl for EbuR128Level {
let mode = value.get().expect("type checked upstream");
gst::info!(
CAT,
imp = self,
imp: self,
"Changing mode from {:?} to {:?}",
settings.mode,
mode
@ -186,7 +187,7 @@ impl ObjectImpl for EbuR128Level {
let post_messages = value.get().expect("type checked upstream");
gst::info!(
CAT,
imp = self,
imp: self,
"Changing post-messages from {} to {}",
settings.post_messages,
post_messages
@ -197,7 +198,7 @@ impl ObjectImpl for EbuR128Level {
let interval = value.get::<u64>().unwrap().nseconds();
gst::info!(
CAT,
imp = self,
imp: self,
"Changing interval from {} to {}",
settings.interval,
interval,
@ -286,7 +287,7 @@ impl BaseTransformImpl for EbuR128Level {
// Drop state
let _ = self.state.borrow_mut().take();
gst::info!(CAT, imp = self, "Stopped");
gst::info!(CAT, imp: self, "Stopped");
Ok(())
}
@ -377,7 +378,7 @@ impl BaseTransformImpl for EbuR128Level {
Ok(loudness) => s.set("momentary-loudness", loudness),
Err(err) => gst::error!(
CAT,
imp = self,
imp: self,
"Failed to get momentary loudness: {}",
err
),
@ -389,7 +390,7 @@ impl BaseTransformImpl for EbuR128Level {
Ok(loudness) => s.set("shortterm-loudness", loudness),
Err(err) => gst::error!(
CAT,
imp = self,
imp: self,
"Failed to get shortterm loudness: {}",
err
),
@ -401,7 +402,7 @@ impl BaseTransformImpl for EbuR128Level {
Ok(loudness) => s.set("global-loudness", loudness),
Err(err) => gst::error!(
CAT,
imp = self,
imp: self,
"Failed to get global loudness: {}",
err
),
@ -411,7 +412,7 @@ impl BaseTransformImpl for EbuR128Level {
Ok(threshold) => s.set("relative-threshold", threshold),
Err(err) => gst::error!(
CAT,
imp = self,
imp: self,
"Failed to get relative threshold: {}",
err
),
@ -422,12 +423,7 @@ impl BaseTransformImpl for EbuR128Level {
match state.ebur128.loudness_range() {
Ok(range) => s.set("loudness-range", range),
Err(err) => {
gst::error!(
CAT,
imp = self,
"Failed to get loudness range: {}",
err
)
gst::error!(CAT, imp: self, "Failed to get loudness range: {}", err)
}
}
}
@ -440,7 +436,7 @@ impl BaseTransformImpl for EbuR128Level {
match peaks {
Ok(peaks) => s.set("sample-peak", peaks),
Err(err) => {
gst::error!(CAT, imp = self, "Failed to get sample peaks: {}", err)
gst::error!(CAT, imp: self, "Failed to get sample peaks: {}", err)
}
}
}
@ -453,12 +449,12 @@ impl BaseTransformImpl for EbuR128Level {
match peaks {
Ok(peaks) => s.set("true-peak", peaks),
Err(err) => {
gst::error!(CAT, imp = self, "Failed to get true peaks: {}", err)
gst::error!(CAT, imp: self, "Failed to get true peaks: {}", err)
}
}
}
gst::debug!(CAT, imp = self, "Posting message {}", s);
gst::debug!(CAT, imp: self, "Posting message {}", s);
let msg = gst::message::Element::builder(s).src(&*self.obj()).build();
@ -509,7 +505,7 @@ impl AudioFilterImpl for EbuR128Level {
}
fn setup(&self, info: &gst_audio::AudioInfo) -> Result<(), gst::LoggableError> {
gst::debug!(CAT, imp = self, "Configured for caps {:?}", info);
gst::debug!(CAT, imp: self, "Configured for caps {:?}", info);
let settings = *self.settings.lock().unwrap();
@ -572,7 +568,7 @@ impl AudioFilterImpl for EbuR128Level {
val => {
gst::debug!(
CAT,
imp = self,
imp: self,
"Unknown channel position {:?}, ignoring channel",
val
);
@ -750,12 +746,12 @@ fn interleaved_channel_data_into_slice<'a, T: FromByteSlice>(
) -> Result<&'a [T], gst::FlowError> {
buf.plane_data(0)
.map_err(|err| {
gst::error!(CAT, imp = imp, "Failed to get audio data: {}", err);
gst::error!(CAT, imp: imp, "Failed to get audio data: {}", err);
gst::FlowError::Error
})?
.as_slice_of::<T>()
.map_err(|err| {
gst::error!(CAT, imp = imp, "Failed to handle audio data: {}", err);
gst::error!(CAT, imp: imp, "Failed to handle audio data: {}", err);
gst::FlowError::Error
})
}
@ -769,12 +765,12 @@ fn non_interleaved_channel_data_into_slices<'a, T: FromByteSlice>(
.map(|c| {
buf.plane_data(c)
.map_err(|err| {
gst::error!(CAT, imp = imp, "Failed to get audio data: {}", err);
gst::error!(CAT, imp: imp, "Failed to get audio data: {}", err);
gst::FlowError::Error
})?
.as_slice_of::<T>()
.map_err(|err| {
gst::error!(CAT, imp = imp, "Failed to handle audio data: {}", err);
gst::error!(CAT, imp: imp, "Failed to handle audio data: {}", err);
gst::FlowError::Error
})
})

View file

@ -224,7 +224,7 @@ impl HrtfRender {
let mut outbuf =
gst_audio::AudioBufferRef::from_buffer_ref_writable(outbuf, &state.outinfo).map_err(
|err| {
gst::error!(CAT, imp = self, "Failed to map buffer : {}", err);
gst::error!(CAT, imp: self, "Failed to map buffer : {}", err);
gst::FlowError::Error
},
)?;
@ -248,13 +248,13 @@ impl HrtfRender {
while state.adapter.available() >= inblksz {
let inbuf = state.adapter.take_buffer(inblksz).map_err(|_| {
gst::error!(CAT, imp = self, "Failed to map buffer");
gst::error!(CAT, imp: self, "Failed to map buffer");
gst::FlowError::Error
})?;
let inbuf = gst_audio::AudioBuffer::from_buffer_readable(inbuf, &state.ininfo)
.map_err(|_| {
gst::error!(CAT, imp = self, "Failed to map buffer");
gst::error!(CAT, imp: self, "Failed to map buffer");
gst::FlowError::Error
})?;
@ -624,7 +624,7 @@ impl BaseTransformImpl for HrtfRender {
gst::log!(
CAT,
imp = self,
imp: self,
"Adapter size: {}, input size {}, transformed size {}",
state.adapter.available(),
size,
@ -649,7 +649,7 @@ impl BaseTransformImpl for HrtfRender {
if direction == gst::PadDirection::Sink {
s.set("channels", 2);
s.set("channel-mask", gst::Bitmask(0x3));
s.set("channel-mask", 0x3);
} else {
let settings = self.settings.lock().unwrap();
if let Some(objs) = &settings.spatial_objects {
@ -670,7 +670,7 @@ impl BaseTransformImpl for HrtfRender {
gst::debug!(
CAT,
imp = self,
imp: self,
"Transformed caps from {} to {} in direction {:?}",
caps,
other_caps,
@ -741,7 +741,7 @@ impl BaseTransformImpl for HrtfRender {
adapter: gst_base::UniqueAdapter::new(),
});
gst::debug!(CAT, imp = self, "Configured for caps {}", incaps);
gst::debug!(CAT, imp: self, "Configured for caps {}", incaps);
Ok(())
}
@ -749,7 +749,7 @@ impl BaseTransformImpl for HrtfRender {
fn sink_event(&self, event: gst::Event) -> bool {
use gst::EventView;
gst::debug!(CAT, imp = self, "Handling event {:?}", event);
gst::debug!(CAT, imp: self, "Handling event {:?}", event);
match event.view() {
EventView::FlushStop(_) => {

View file

@ -198,7 +198,7 @@ fn basic_two_channels() {
#[test]
fn silence() {
run_test("wave=silence", None, 1000, 1024, 1, f64::NEG_INFINITY);
run_test("wave=silence", None, 1000, 1024, 1, std::f64::NEG_INFINITY);
}
#[test]
@ -228,7 +228,7 @@ fn below_threshold() {
1000,
1024,
1,
f64::NEG_INFINITY,
std::f64::NEG_INFINITY,
);
}

View file

@ -115,7 +115,7 @@ impl AudioDecoderImpl for ClaxonDec {
}
fn set_format(&self, caps: &gst::Caps) -> Result<(), gst::LoggableError> {
gst::debug!(CAT, imp = self, "Setting format {:?}", caps);
gst::debug!(CAT, imp: self, "Setting format {:?}", caps);
let mut audio_info: Option<gst_audio::AudioInfo> = None;
@ -124,15 +124,15 @@ impl AudioDecoderImpl for ClaxonDec {
let streamheaders = streamheaders.as_slice();
if streamheaders.len() < 2 {
gst::debug!(CAT, imp = self, "Not enough streamheaders, trying in-band");
gst::debug!(CAT, imp: self, "Not enough streamheaders, trying in-band");
} else {
let ident_buf = streamheaders[0].get::<Option<gst::Buffer>>();
if let Ok(Some(ident_buf)) = ident_buf {
gst::debug!(CAT, imp = self, "Got streamheader buffers");
gst::debug!(CAT, imp: self, "Got streamheader buffers");
let inmap = ident_buf.map_readable().unwrap();
if inmap[0..7] != [0x7f, b'F', b'L', b'A', b'C', 0x01, 0x00] {
gst::debug!(CAT, imp = self, "Unknown streamheader format");
gst::debug!(CAT, imp: self, "Unknown streamheader format");
} else if let Ok(tstreaminfo) = claxon_streaminfo(&inmap[13..]) {
if let Ok(taudio_info) = gstaudioinfo(&tstreaminfo) {
// To speed up negotiation
@ -142,7 +142,7 @@ impl AudioDecoderImpl for ClaxonDec {
{
gst::debug!(
CAT,
imp = self,
imp: self,
"Error to negotiate output from based on in-caps streaminfo"
);
}
@ -165,7 +165,7 @@ impl AudioDecoderImpl for ClaxonDec {
&self,
inbuf: Option<&gst::Buffer>,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::debug!(CAT, imp = self, "Handling buffer {:?}", inbuf);
gst::debug!(CAT, imp: self, "Handling buffer {:?}", inbuf);
let inbuf = match inbuf {
None => return Ok(gst::FlowSuccess::Ok),
@ -173,7 +173,7 @@ impl AudioDecoderImpl for ClaxonDec {
};
let inmap = inbuf.map_readable().map_err(|_| {
gst::error!(CAT, imp = self, "Failed to buffer readable");
gst::error!(CAT, imp: self, "Failed to buffer readable");
gst::FlowError::Error
})?;
@ -181,18 +181,18 @@ impl AudioDecoderImpl for ClaxonDec {
let state = state_guard.as_mut().ok_or(gst::FlowError::NotNegotiated)?;
if inmap.as_slice() == b"fLaC" {
gst::debug!(CAT, imp = self, "fLaC buffer received");
gst::debug!(CAT, imp: self, "fLaC buffer received");
} else if inmap[0] & 0x7F == 0x00 {
gst::debug!(CAT, imp = self, "Streaminfo header buffer received");
gst::debug!(CAT, imp: self, "Streaminfo header buffer received");
return self.handle_streaminfo_header(state, inmap.as_ref());
} else if inmap[0] == 0b1111_1111 && inmap[1] & 0b1111_1100 == 0b1111_1000 {
gst::debug!(CAT, imp = self, "Data buffer received");
gst::debug!(CAT, imp: self, "Data buffer received");
return self.handle_data(state, inmap.as_ref());
} else {
// info about other headers in flacparse and https://xiph.org/flac/format.html
gst::debug!(
CAT,
imp = self,
imp: self,
"Other header buffer received {:?}",
inmap[0] & 0x7F
);
@ -220,7 +220,7 @@ impl ClaxonDec {
gst::debug!(
CAT,
imp = self,
imp: self,
"Successfully parsed headers: {:?}",
audio_info
);

View file

@ -17,6 +17,7 @@ use gst_base::subclass::prelude::*;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Mutex;
use std::{f64, i32};
use byte_slice_cast::*;
@ -191,7 +192,7 @@ impl CsoundFilter {
(avail / state.in_info.channels() as usize) * state.out_info.channels() as usize;
let mut buffer = gst::Buffer::with_size(out_bytes).map_err(|e| {
gst::error!(CAT, imp = self, "Failed to allocate buffer at EOS {:?}", e);
gst::error!(CAT, imp: self, "Failed to allocate buffer at EOS {:?}", e);
gst::FlowError::Flushing
})?;
@ -246,7 +247,7 @@ impl CsoundFilter {
gst::log!(
CAT,
imp = self,
imp: self,
"Generating output at: {} - duration: {}",
pts.display(),
duration.display(),
@ -481,7 +482,7 @@ impl BaseTransformImpl for CsoundFilter {
csound.reset();
let _ = self.state.lock().unwrap().take();
gst::info!(CAT, imp = self, "Stopped");
gst::info!(CAT, imp: self, "Stopped");
Ok(())
}
@ -490,7 +491,7 @@ impl BaseTransformImpl for CsoundFilter {
use gst::EventView;
if let EventView::Eos(_) = event.view() {
gst::log!(CAT, imp = self, "Handling Eos");
gst::log!(CAT, imp: self, "Handling Eos");
if self.drain().is_err() {
return false;
}
@ -535,7 +536,7 @@ impl BaseTransformImpl for CsoundFilter {
gst::debug!(
CAT,
imp = self,
imp: self,
"Transformed caps from {} to {} in direction {:?}",
caps,
other_caps,

View file

@ -120,7 +120,7 @@ impl AudioDecoderImpl for LewtonDec {
}
fn set_format(&self, caps: &gst::Caps) -> Result<(), gst::LoggableError> {
gst::debug!(CAT, imp = self, "Setting format {:?}", caps);
gst::debug!(CAT, imp: self, "Setting format {:?}", caps);
// When the caps are changing we require new headers
let mut state_guard = self.state.borrow_mut();
@ -138,7 +138,7 @@ impl AudioDecoderImpl for LewtonDec {
if let Ok(Some(streamheaders)) = s.get_optional::<gst::ArrayRef>("streamheader") {
let streamheaders = streamheaders.as_slice();
if streamheaders.len() < 3 {
gst::debug!(CAT, imp = self, "Not enough streamheaders, trying in-band");
gst::debug!(CAT, imp: self, "Not enough streamheaders, trying in-band");
return Ok(());
}
@ -148,7 +148,7 @@ impl AudioDecoderImpl for LewtonDec {
if let (Ok(Some(ident_buf)), Ok(Some(comment_buf)), Ok(Some(setup_buf))) =
(ident_buf, comment_buf, setup_buf)
{
gst::debug!(CAT, imp = self, "Got streamheader buffers");
gst::debug!(CAT, imp: self, "Got streamheader buffers");
state.header_bufs = (Some(ident_buf), Some(comment_buf), Some(setup_buf));
}
}
@ -157,7 +157,7 @@ impl AudioDecoderImpl for LewtonDec {
}
fn flush(&self, _hard: bool) {
gst::debug!(CAT, imp = self, "Flushing");
gst::debug!(CAT, imp: self, "Flushing");
let mut state_guard = self.state.borrow_mut();
if let Some(ref mut state) = *state_guard {
@ -169,7 +169,7 @@ impl AudioDecoderImpl for LewtonDec {
&self,
inbuf: Option<&gst::Buffer>,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::debug!(CAT, imp = self, "Handling buffer {:?}", inbuf);
gst::debug!(CAT, imp: self, "Handling buffer {:?}", inbuf);
let inbuf = match inbuf {
None => return Ok(gst::FlowSuccess::Ok),
@ -177,7 +177,7 @@ impl AudioDecoderImpl for LewtonDec {
};
let inmap = inbuf.map_readable().map_err(|_| {
gst::error!(CAT, imp = self, "Failed to buffer readable");
gst::error!(CAT, imp: self, "Failed to buffer readable");
gst::FlowError::Error
})?;
@ -191,7 +191,7 @@ impl AudioDecoderImpl for LewtonDec {
if state.headerset.is_some() {
return Ok(gst::FlowSuccess::Ok);
} else {
gst::error!(CAT, imp = self, "Got empty packet before all headers");
gst::error!(CAT, imp: self, "Got empty packet before all headers");
return Err(gst::FlowError::Error);
}
}
@ -219,14 +219,14 @@ impl LewtonDec {
) -> Result<gst::FlowSuccess, gst::FlowError> {
// ident header
if indata[0] == 0x01 {
gst::debug!(CAT, imp = self, "Got ident header buffer");
gst::debug!(CAT, imp: self, "Got ident header buffer");
state.header_bufs = (Some(inbuf.clone()), None, None);
} else if indata[0] == 0x03 {
// comment header
if state.header_bufs.0.is_none() {
gst::warning!(CAT, imp = self, "Got comment header before ident header");
gst::warning!(CAT, imp: self, "Got comment header before ident header");
} else {
gst::debug!(CAT, imp = self, "Got comment header buffer");
gst::debug!(CAT, imp: self, "Got comment header buffer");
state.header_bufs.1 = Some(inbuf.clone());
}
} else if indata[0] == 0x05 {
@ -234,11 +234,11 @@ impl LewtonDec {
if state.header_bufs.0.is_none() || state.header_bufs.1.is_none() {
gst::warning!(
CAT,
imp = self,
imp: self,
"Got setup header before ident/comment header"
);
} else {
gst::debug!(CAT, imp = self, "Got setup header buffer");
gst::debug!(CAT, imp: self, "Got setup header buffer");
state.header_bufs.2 = Some(inbuf.clone());
}
}
@ -263,7 +263,7 @@ impl LewtonDec {
// First try to parse the headers
let ident_map = ident_buf.map_readable().map_err(|_| {
gst::error!(CAT, imp = self, "Failed to map ident buffer readable");
gst::error!(CAT, imp: self, "Failed to map ident buffer readable");
gst::FlowError::Error
})?;
let ident = lewton::header::read_header_ident(ident_map.as_ref()).map_err(|err| {
@ -276,7 +276,7 @@ impl LewtonDec {
})?;
let comment_map = comment_buf.map_readable().map_err(|_| {
gst::error!(CAT, imp = self, "Failed to map comment buffer readable");
gst::error!(CAT, imp: self, "Failed to map comment buffer readable");
gst::FlowError::Error
})?;
let comment = lewton::header::read_header_comment(comment_map.as_ref()).map_err(|err| {
@ -289,7 +289,7 @@ impl LewtonDec {
})?;
let setup_map = setup_buf.map_readable().map_err(|_| {
gst::error!(CAT, imp = self, "Failed to map setup buffer readable");
gst::error!(CAT, imp: self, "Failed to map setup buffer readable");
gst::FlowError::Error
})?;
let setup = lewton::header::read_header_setup(
@ -327,7 +327,7 @@ impl LewtonDec {
if gst_audio::channel_reorder_map(from, to, &mut map[..channels]).is_err() {
gst::error!(
CAT,
imp = self,
imp: self,
"Failed to generate channel reorder map from {:?} to {:?}",
from,
to,
@ -343,7 +343,7 @@ impl LewtonDec {
gst::debug!(
CAT,
imp = self,
imp: self,
"Successfully parsed headers: {:?}",
audio_info
);
@ -396,7 +396,7 @@ impl LewtonDec {
}
let sample_count = decoded.samples.len() / audio_info.channels() as usize;
gst::debug!(CAT, imp = self, "Got {} decoded samples", sample_count);
gst::debug!(CAT, imp: self, "Got {} decoded samples", sample_count);
if sample_count == 0 {
return self.obj().finish_frame(None, 1);

View file

@ -1,50 +0,0 @@
[package]
name = "gst-plugin-speechmatics"
version.workspace = true
authors = ["Mathieu Duponchelle <mathieu@centricular.com>"]
repository.workspace = true
license = "MPL-2.0"
description = "GStreamer Speechmatics plugin"
edition.workspace = true
rust-version.workspace = true
[dependencies]
futures = "0.3"
gst.workspace = true
gst-base.workspace = true
gst-audio = { workspace = true, features = ["v1_16"] }
tokio = { version = "1", features = [ "full" ] }
async-tungstenite = { version = "0.28", features = ["tokio", "tokio-runtime", "tokio-native-tls"] }
once_cell.workspace = true
serde = { version = "1", features = ["derive"] }
serde_json = "1"
atomic_refcell = "0.1"
http = { version = "1.0" }
url = "2"
[lib]
name = "gstspeechmatics"
crate-type = ["cdylib", "rlib"]
path = "src/lib.rs"
[build-dependencies]
gst-plugin-version-helper.workspace = true
[features]
static = []
capi = []
doc = ["gst/v1_18"]
[package.metadata.capi]
min_version = "0.9.21"
[package.metadata.capi.header]
enabled = false
[package.metadata.capi.library]
install_subdir = "gstreamer-1.0"
versioning = false
import_library = false
[package.metadata.capi.pkg_config]
requires_private = "gstreamer-1.0, gstreamer-base-1.0, gobject-2.0, glib-2.0, gmodule-2.0"

View file

@ -1,373 +0,0 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

View file

@ -1,3 +0,0 @@
fn main() {
gst_plugin_version_helper::info()
}

View file

@ -1,36 +0,0 @@
// Copyright (C) 2024 Mathieu Duponchelle <mathieu@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(clippy::non_send_fields_in_send_ty, unused_doc_comments)]
#![recursion_limit = "128"]
/**
* plugin-speechmatics:
*
* Since: plugins-rs-0.14.0
*/
use gst::glib;
mod transcriber;
fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
transcriber::register(plugin)?;
Ok(())
}
gst::plugin_define!(
speechmatics,
env!("CARGO_PKG_DESCRIPTION"),
plugin_init,
concat!(env!("CARGO_PKG_VERSION"), "-", env!("COMMIT_ID")),
"Proprietary",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_REPOSITORY"),
env!("BUILD_REL_DATE")
);

File diff suppressed because it is too large Load diff

View file

@ -1,33 +0,0 @@
// Copyright (C) 2024 Mathieu Duponchelle <mathieu@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub struct Transcriber(ObjectSubclass<imp::Transcriber>) @extends gst::Element, gst::Object, @implements gst::ChildProxy;
}
glib::wrapper! {
pub struct TranscriberSrcPad(ObjectSubclass<imp::TranscriberSrcPad>) @extends gst::Pad, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
#[cfg(feature = "doc")]
{
TranscriberSrcPad::static_type().mark_as_plugin_api(gst::PluginAPIFlags::empty());
}
gst::Element::register(
Some(plugin),
"speechmaticstranscriber",
gst::Rank::NONE,
Transcriber::static_type(),
)
}

View file

@ -11,9 +11,8 @@ rust-version.workspace = true
[dependencies]
gst.workspace = true
gst-base.workspace = true
librespot-core = "0.4"
librespot-playback = "0.4"
tokio = { version = "1", features = ["rt-multi-thread"] }
librespot = { version = "0.4", default-features = false }
tokio = "1.0"
futures = "0.3"
anyhow = "1.0"
url = "2.3"

View file

@ -8,11 +8,10 @@ to respect their legal/licensing restrictions.
## Spotify Credentials
This plugin requires a [Spotify Premium](https://www.spotify.com/premium/) account.
If your account is linked with Facebook, you'll need to setup
a [device username and password](https://www.spotify.com/us/account/set-device-password/).
This plugin requires a [Spotify Premium](https://www.spotify.com/premium/) account configured
with a [device password](https://www.spotify.com/us/account/set-device-password/).
Those username and password are then set using the `username` and `password` properties.
You can then set the device username and password using the `username` and `password` properties.
You may also want to cache credentials and downloaded files, see the `cache-` properties on the element.

View file

@ -11,10 +11,10 @@ use anyhow::bail;
use gst::glib;
use gst::prelude::*;
use librespot_core::{
authentication::Credentials, cache::Cache, config::SessionConfig, session::Session,
spotify_id::SpotifyId,
use librespot::core::{
cache::Cache, config::SessionConfig, session::Session, spotify_id::SpotifyId,
};
use librespot::discovery::Credentials;
#[derive(Default, Debug, Clone)]
pub struct Settings {
@ -30,13 +30,13 @@ impl Settings {
pub fn properties() -> Vec<glib::ParamSpec> {
vec![glib::ParamSpecString::builder("username")
.nick("Username")
.blurb("Spotify username, Facebook accounts need a device username from https://www.spotify.com/us/account/set-device-password/")
.blurb("Spotify device username from https://www.spotify.com/us/account/set-device-password/")
.default_value(Some(""))
.mutable_ready()
.build(),
glib::ParamSpecString::builder("password")
.nick("Password")
.blurb("Spotify password, Facebook accounts need a device password from https://www.spotify.com/us/account/set-device-password/")
.blurb("Spotify device password from https://www.spotify.com/us/account/set-device-password/")
.default_value(Some(""))
.mutable_ready()
.build(),
@ -135,7 +135,7 @@ impl Settings {
if !self.username.is_empty() && self.username != cached_cred.username {
gst::debug!(
cat,
obj = &src,
obj: &src,
"ignore cached credentials for user {} which mismatch user {}",
cached_cred.username,
self.username
@ -143,7 +143,7 @@ impl Settings {
} else {
gst::debug!(
cat,
obj = &src,
obj: &src,
"reuse cached credentials for user {}",
cached_cred.username
);
@ -162,7 +162,7 @@ impl Settings {
gst::debug!(
cat,
obj = &src,
obj: &src,
"credentials not in cache or cached credentials invalid",
);

View file

@ -6,7 +6,7 @@
//
// SPDX-License-Identifier: MPL-2.0
use std::sync::{mpsc, Arc, Mutex};
use std::sync::{mpsc, Arc, Mutex, MutexGuard};
use futures::future::{AbortHandle, Abortable, Aborted};
use once_cell::sync::Lazy;
@ -17,7 +17,7 @@ use gst::prelude::*;
use gst::subclass::prelude::*;
use gst_base::subclass::{base_src::CreateSuccess, prelude::*};
use librespot_playback::{
use librespot::playback::{
audio_backend::{Sink, SinkResult},
config::PlayerConfig,
convert::Converter,
@ -66,42 +66,18 @@ struct Settings {
bitrate: Bitrate,
}
#[derive(Default)]
enum SetupThread {
#[default]
None,
Pending {
thread_handle: Option<std::thread::JoinHandle<Result<anyhow::Result<()>, Aborted>>>,
abort_handle: AbortHandle,
},
Cancelled,
Done,
}
impl SetupThread {
fn abort(&mut self) {
// Cancel setup thread if it is pending and not done yet
if matches!(self, SetupThread::None | SetupThread::Done) {
return;
}
if let SetupThread::Pending {
ref abort_handle, ..
} = *self
{
abort_handle.abort();
}
*self = SetupThread::Cancelled;
}
}
#[derive(Default)]
pub struct SpotifyAudioSrc {
setup_thread: Mutex<SetupThread>,
setup_thread: Mutex<Option<SetupThread>>,
state: Arc<Mutex<Option<State>>>,
settings: Mutex<Settings>,
}
struct SetupThread {
thread_handle: std::thread::JoinHandle<Result<anyhow::Result<()>, Aborted>>,
abort_handle: AbortHandle,
}
#[glib::object_subclass]
impl ObjectSubclass for SpotifyAudioSrc {
const NAME: &'static str = "GstSpotifyAudioSrc";
@ -196,20 +172,23 @@ impl BaseSrcImpl for SpotifyAudioSrc {
}
{
// If not started yet and not cancelled, start the setup
let mut setup_thread = self.setup_thread.lock().unwrap();
assert!(!matches!(&*setup_thread, SetupThread::Cancelled));
if matches!(&*setup_thread, SetupThread::None) {
self.start_setup(&mut setup_thread);
let setup_thread = self.setup_thread.lock().unwrap();
if setup_thread.is_some() {
// already starting
return Ok(());
}
self.start_setup(setup_thread);
}
Ok(())
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
// stop the setup if it's not completed yet
self.cancel_setup();
if let Some(state) = self.state.lock().unwrap().take() {
gst::debug!(CAT, imp = self, "stopping");
gst::debug!(CAT, imp: self, "stopping");
state.player.stop();
state.player_channel_handle.abort();
// FIXME: not sure why this is needed to unblock BufferSink::write(), dropping State should drop the receiver
@ -220,17 +199,9 @@ impl BaseSrcImpl for SpotifyAudioSrc {
}
fn unlock(&self) -> Result<(), gst::ErrorMessage> {
let mut setup_thread = self.setup_thread.lock().unwrap();
setup_thread.abort();
Ok(())
}
self.cancel_setup();
fn unlock_stop(&self) -> Result<(), gst::ErrorMessage> {
let mut setup_thread = self.setup_thread.lock().unwrap();
if matches!(&*setup_thread, SetupThread::Cancelled) {
*setup_thread = SetupThread::None;
}
Ok(())
self.parent_unlock()
}
}
@ -245,47 +216,30 @@ impl PushSrcImpl for SpotifyAudioSrc {
};
if !state_set {
// If not started yet and not cancelled, start the setup
let mut setup_thread = self.setup_thread.lock().unwrap();
if matches!(&*setup_thread, SetupThread::Cancelled) {
return Err(gst::FlowError::Flushing);
}
if matches!(&*setup_thread, SetupThread::None) {
self.start_setup(&mut setup_thread);
let setup_thread = self.setup_thread.lock().unwrap();
if setup_thread.is_none() {
// unlock() could potentially cancel the setup, and create() can be called after unlock() without going through start() again.
self.start_setup(setup_thread);
}
}
{
// wait for the setup to be completed
let mut setup_thread = self.setup_thread.lock().unwrap();
if let SetupThread::Pending {
ref mut thread_handle,
..
} = *setup_thread
{
let thread_handle = thread_handle.take().expect("Waiting multiple times");
drop(setup_thread);
let res = thread_handle.join().unwrap();
if let Some(setup) = setup_thread.take() {
let res = setup.thread_handle.join().unwrap();
match res {
Err(_aborted) => {
gst::debug!(CAT, imp = self, "setup has been cancelled");
setup_thread = self.setup_thread.lock().unwrap();
*setup_thread = SetupThread::Cancelled;
gst::debug!(CAT, imp: self, "setup has been cancelled");
return Err(gst::FlowError::Flushing);
}
Ok(Err(err)) => {
gst::error!(CAT, imp = self, "failed to start: {err:?}");
gst::error!(CAT, imp: self, "failed to start: {err:?}");
gst::element_imp_error!(self, gst::ResourceError::Settings, ["{err:?}"]);
setup_thread = self.setup_thread.lock().unwrap();
*setup_thread = SetupThread::None;
return Err(gst::FlowError::Error);
}
Ok(Ok(_)) => {
setup_thread = self.setup_thread.lock().unwrap();
*setup_thread = SetupThread::Done;
}
Ok(Ok(_)) => {}
}
}
}
@ -295,15 +249,15 @@ impl PushSrcImpl for SpotifyAudioSrc {
match state.receiver.recv().unwrap() {
Message::Buffer(buffer) => {
gst::log!(CAT, imp = self, "got buffer of size {}", buffer.size());
gst::log!(CAT, imp: self, "got buffer of size {}", buffer.size());
Ok(CreateSuccess::NewBuffer(buffer))
}
Message::Eos => {
gst::debug!(CAT, imp = self, "eos");
gst::debug!(CAT, imp: self, "eos");
Err(gst::FlowError::Eos)
}
Message::Unavailable => {
gst::error!(CAT, imp = self, "track is not available");
gst::error!(CAT, imp: self, "track is not available");
gst::element_imp_error!(
self,
gst::ResourceError::NotFound,
@ -352,7 +306,7 @@ impl URIHandlerImpl for SpotifyAudioSrc {
}
fn set_uri(&self, uri: &str) -> Result<(), glib::Error> {
gst::debug!(CAT, imp = self, "set URI: {}", uri);
gst::debug!(CAT, imp: self, "set URI: {}", uri);
let url = url::Url::parse(uri)
.map_err(|e| glib::Error::new(gst::URIError::BadUri, &format!("{e:?}")))?;
@ -364,7 +318,7 @@ impl URIHandlerImpl for SpotifyAudioSrc {
self.obj().set_property(&key, value.as_ref());
}
_ => {
gst::warning!(CAT, imp = self, "unsupported query: {}={}", key, value);
gst::warning!(CAT, imp: self, "unsupported query: {}={}", key, value);
}
}
}
@ -377,9 +331,7 @@ impl URIHandlerImpl for SpotifyAudioSrc {
}
impl SpotifyAudioSrc {
fn start_setup(&self, setup_thread: &mut SetupThread) {
assert!(matches!(setup_thread, SetupThread::None));
fn start_setup(&self, mut setup_thread: MutexGuard<Option<SetupThread>>) {
let self_ = self.to_owned();
// run the runtime from another thread to prevent the "start a runtime from within a runtime" panic
@ -392,10 +344,10 @@ impl SpotifyAudioSrc {
})
});
*setup_thread = SetupThread::Pending {
thread_handle: Some(thread_handle),
setup_thread.replace(SetupThread {
thread_handle,
abort_handle,
};
});
}
async fn setup(&self) -> anyhow::Result<()> {
@ -420,7 +372,7 @@ impl SpotifyAudioSrc {
let session = common.connect_session(src.clone(), &CAT).await?;
let track = common.track_id()?;
gst::debug!(CAT, imp = self, "Requesting bitrate {:?}", bitrate);
gst::debug!(CAT, imp: self, "Requesting bitrate {:?}", bitrate);
(session, track, bitrate)
};
@ -468,4 +420,12 @@ impl SpotifyAudioSrc {
Ok(())
}
fn cancel_setup(&self) {
let mut setup_thread = self.setup_thread.lock().unwrap();
if let Some(setup) = setup_thread.take() {
setup.abort_handle.abort();
}
}
}

View file

@ -29,7 +29,7 @@ impl Default for Bitrate {
}
}
impl From<Bitrate> for librespot_playback::config::Bitrate {
impl From<Bitrate> for librespot::playback::config::Bitrate {
fn from(value: Bitrate) -> Self {
match value {
Bitrate::B96 => Self::Bitrate96,

View file

@ -1,103 +0,0 @@
#!/usr/bin/python3
#
# Copied from gstreamer.git/ci/gitlab/trigger_cerbero_pipeline.py
import time
import os
import sys
import gitlab
CERBERO_PROJECT = 'gstreamer/cerbero'
class Status:
FAILED = 'failed'
MANUAL = 'manual'
CANCELED = 'canceled'
SUCCESS = 'success'
SKIPPED = 'skipped'
CREATED = 'created'
@classmethod
def is_finished(cls, state):
return state in [
cls.FAILED,
cls.MANUAL,
cls.CANCELED,
cls.SUCCESS,
cls.SKIPPED,
]
def fprint(msg):
print(msg, end="")
sys.stdout.flush()
if __name__ == "__main__":
server = os.environ['CI_SERVER_URL']
gl = gitlab.Gitlab(server,
private_token=os.environ.get('GITLAB_API_TOKEN'),
job_token=os.environ.get('CI_JOB_TOKEN'))
def get_matching_user_project(project, branch):
cerbero = gl.projects.get(project)
# Search for matching branches, return only if the branch name matches
# exactly
for b in cerbero.branches.list(search=cerbero_branch, iterator=True):
if branch == b.name:
return cerbero
return None
cerbero = None
# We do not want to run on (often out of date) user upstream branch
if os.environ["CI_COMMIT_REF_NAME"] != os.environ['CERBERO_UPSTREAM_BRANCH']:
try:
cerbero_name = f'{os.environ["CI_PROJECT_NAMESPACE"]}/cerbero'
cerbero_branch = os.environ["CI_COMMIT_REF_NAME"]
cerbero = get_matching_user_project(cerbero_name, cerbero_branch)
except gitlab.exceptions.GitlabGetError:
pass
if cerbero is None:
cerbero_name = CERBERO_PROJECT
cerbero_branch = os.environ["CERBERO_UPSTREAM_BRANCH"]
cerbero = gl.projects.get(cerbero_name)
fprint(f"-> Triggering on branch {cerbero_branch} in {cerbero_name}\n")
# CI_PROJECT_URL is not necessarily the project where the branch we need to
# build resides, for instance merge request pipelines can be run on
# 'gstreamer' namespace. Fetch the branch name in the same way, just in
# case it breaks in the future.
if 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' in os.environ:
project_url = os.environ['CI_MERGE_REQUEST_SOURCE_PROJECT_URL']
project_branch = os.environ['CI_MERGE_REQUEST_SOURCE_BRANCH_NAME']
else:
project_url = os.environ['CI_PROJECT_URL']
project_branch = os.environ['CI_COMMIT_REF_NAME']
variables = {
"CI_GST_PLUGINS_RS_URL": project_url,
"CI_GST_PLUGINS_RS_REF_NAME": project_branch,
# This tells cerbero CI that this is a pipeline started via the
# trigger API, which means it can use a deps cache instead of
# building from scratch.
"CI_GSTREAMER_TRIGGERED": "true",
}
pipe = cerbero.trigger_pipeline(
token=os.environ['CI_JOB_TOKEN'],
ref=cerbero_branch,
variables=variables,
)
fprint(f'Cerbero pipeline running at {pipe.web_url} ')
while True:
time.sleep(15)
pipe.refresh()
if Status.is_finished(pipe.status):
fprint(f": {pipe.status}\n")
sys.exit(0 if pipe.status == Status.SUCCESS else 1)
else:
fprint(".")

View file

@ -1,14 +0,0 @@
#!/bin/bash
MESON_VERSION=`head -n5 meson.build | grep ' version\s*:' | sed -e "s/.*version\s*:\s*'//" -e "s/',.*//"`
CARGO_VERSION=`cat Cargo.toml | grep -A1 workspace.package | grep ^version | sed -e 's/^version = "\(.*\)"/\1/'`
echo "gst-plugins-rs version (meson.build) : $MESON_VERSION"
echo "gst-plugins-rs version (Cargo.toml) : $CARGO_VERSION"
if test "x$MESON_VERSION" != "x$CARGO_VERSION"; then
echo
echo "===> Version mismatch between meson.build and Cargo.toml! <==="
echo
exit 1;
fi

11
ci/install-dav1d.sh Normal file
View file

@ -0,0 +1,11 @@
set -e
RELEASE=1.1.0
git clone https://code.videolan.org/videolan/dav1d.git --branch $RELEASE
cd dav1d
meson build -D prefix=/usr/local
ninja -C build
ninja -C build install
cd ..
rm -rf dav1d

6
ci/install-rust-ext.sh Executable file
View file

@ -0,0 +1,6 @@
source ./ci/env.sh
set -e
export CARGO_HOME='/usr/local/cargo'
cargo install cargo-c --version 0.9.15+cargo-0.67

View file

@ -36,7 +36,6 @@ function Run-Tests {
}
$env:G_DEBUG="fatal_warnings"
$env:RUST_BACKTRACE="1"
cargo test --no-fail-fast --color=always --workspace $local_exclude --all-targets $Features
if (!$?) {

152
deny.toml
View file

@ -1,7 +1,9 @@
[advisories]
version = 2
db-path = "~/.cargo/advisory-db"
db-urls = ["https://github.com/rustsec/advisory-db"]
vulnerability = "deny"
unmaintained = "warn"
notice = "warn"
ignore = [
# Waiting for https://github.com/librespot-org/librespot/issues/937
"RUSTSEC-2021-0059",
@ -9,27 +11,17 @@ ignore = [
"RUSTSEC-2021-0061",
"RUSTSEC-2021-0145",
# sodiumoxide is deprecated
# https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/530
"RUSTSEC-2021-0137",
# proc-macro-error is unmaintained
# https://github.com/yanganto/test-with/issues/91
"RUSTSEC-2024-0370",
]
[licenses]
version = 2
unlicensed = "deny"
allow = [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause",
"ISC",
"OpenSSL",
"Zlib",
"Unicode-DFS-2016",
"Apache-2.0",
"Apache-2.0 WITH LLVM-exception",
"MPL-2.0",
]
default = "deny"
copyleft = "deny"
allow-osi-fsf-free = "either"
confidence-threshold = 0.8
[[licenses.clarify]]
@ -78,18 +70,6 @@ version = "0.9"
[[bans.skip]]
name = "hmac"
version = "0.11"
[[bans.skip]]
name = "zerocopy"
version = "0.6"
[[bans.skip]]
name = "zerocopy-derive"
version = "0.6"
[[bans.skip]]
name = "multimap"
version = "0.8"
[[bans.skip]]
name = "nix"
version = "0.23"
# field-offset and nix depend on an older memoffset
# https://github.com/Diggsey/rust-field-offset/pull/23
@ -102,23 +82,22 @@ version = "0.6"
[[bans.skip]]
name = "hermit-abi"
version = "0.1"
[[bans.skip]]
name = "hermit-abi"
version = "0.3"
# Various crates depend on an older version of base64
[[bans.skip]]
name = "base64"
version = "0.13"
[[bans.skip]]
name = "base64"
version = "0.21"
# Various crates depend on an older version of socket2
[[bans.skip]]
name = "socket2"
version = "0.4"
# Various crates depend on an older version of syn
[[bans.skip]]
name = "syn"
version = "1.0"
# Various crates depend on an older version of bitflags
[[bans.skip]]
name = "bitflags"
@ -143,18 +122,10 @@ version = "1.0"
name = "hashbrown"
version = "0.12"
# various livekit dependencies depend on an old version of itertools and sync_wrapper
# various livekit dependencies depend on an old version of itertools
[[bans.skip]]
name = "itertools"
version = "0.11"
[[bans.skip]]
name = "sync_wrapper"
version = "0.1"
# various rav1e / dssim-core depend on an old version of itertools
[[bans.skip]]
name = "itertools"
version = "0.12"
# matchers depends on an old version of regex-automata
[[bans.skip]]
@ -208,102 +179,11 @@ version = "0.20"
name = "http"
version = "0.2"
# Various crates depend on an older version of heck
# proc-macro-crate depends on an older version of toml_edit
# https://github.com/bkchr/proc-macro-crate/pull/50
[[bans.skip]]
name = "heck"
version = "0.4"
# Various crates depend on an older version of hyper / reqwest / headers / etc
[[bans.skip]]
name = "hyper"
version = "0.14"
[[bans.skip]]
name = "hyper-tls"
version = "0.5"
[[bans.skip]]
name = "http-body"
version = "0.4"
[[bans.skip]]
name = "headers-core"
version = "0.2"
[[bans.skip]]
name = "headers"
version = "0.3"
[[bans.skip]]
name = "h2"
version = "0.3"
[[bans.skip]]
name = "reqwest"
version = "0.11"
[[bans.skip]]
name = "rustls-pemfile"
version = "1.0"
[[bans.skip]]
name = "winreg"
version = "0.50"
[[bans.skip]]
name = "system-configuration"
version = "0.5"
[[bans.skip]]
name = "system-configuration-sys"
version = "0.5"
# The AWS SDK uses old versions of rustls and related crates
[[bans.skip]]
name = "rustls"
name = "toml_edit"
version = "0.21"
[[bans.skip]]
name = "rustls-native-certs"
version = "0.6"
[[bans.skip]]
name = "rustls-webpki"
version = "0.101"
# warp depends on an older version of tokio-tungstenite
[[bans.skip]]
name = "tokio-tungstenite"
version = "0.21"
[[bans.skip]]
name = "tungstenite"
version = "0.21"
# various crates depend on an older version of system-deps
[[bans.skip]]
name = "system-deps"
version = "6"
# various crates depend on an older version of windows-sys
[[bans.skip]]
name = "windows-sys"
version = "0.52"
# derived-into-owned (via pcap-file) depends on old syn / quote
[[bans.skip]]
name = "syn"
version = "0.11"
[[bans.skip]]
name = "quote"
version = "0.3"
# dav1d depends on old system-deps which depends on old cfg-expr
[[bans.skip]]
name = "cfg-expr"
version = "0.15"
# backtrace and png depend on old miniz_oxide
[[bans.skip]]
name = "miniz_oxide"
version = "0.7"
# tokio-rustls via warp depends on old rustls
[[bans.skip]]
name = "rustls"
version = "0.22"
# aws-smithy-runtime depends on old tokio-rustls
[[bans.skip]]
name = "tokio-rustls"
version = "0.24"
[sources]
unknown-registry = "deny"

View file

@ -1,9 +1,5 @@
build_hotdoc = false
if get_option('doc').disabled()
subdir_done()
endif
if meson.is_cross_build()
if get_option('doc').enabled()
error('Documentation enabled but building the doc while cross building is not supported yet.')

File diff suppressed because it is too large Load diff

View file

@ -81,20 +81,20 @@ impl FileSink {
Some(ref location_cur) => {
gst::info!(
CAT,
imp = self,
imp: self,
"Changing `location` from {:?} to {}",
location_cur,
location,
);
}
None => {
gst::info!(CAT, imp = self, "Setting `location` to {}", location,);
gst::info!(CAT, imp: self, "Setting `location` to {}", location,);
}
}
Some(location)
}
None => {
gst::info!(CAT, imp = self, "Resetting `location` to None",);
gst::info!(CAT, imp: self, "Resetting `location` to None",);
None
}
};
@ -140,12 +140,7 @@ impl ObjectImpl for FileSink {
};
if let Err(err) = res {
gst::error!(
CAT,
imp = self,
"Failed to set property `location`: {}",
err
);
gst::error!(CAT, imp: self, "Failed to set property `location`: {}", err);
}
}
_ => unimplemented!(),
@ -227,10 +222,10 @@ impl BaseSinkImpl for FileSink {
]
)
})?;
gst::debug!(CAT, imp = self, "Opened file {:?}", file);
gst::debug!(CAT, imp: self, "Opened file {:?}", file);
*state = State::Started { file, position: 0 };
gst::info!(CAT, imp = self, "Started");
gst::info!(CAT, imp: self, "Started");
Ok(())
}
@ -245,7 +240,7 @@ impl BaseSinkImpl for FileSink {
}
*state = State::Stopped;
gst::info!(CAT, imp = self, "Stopped");
gst::info!(CAT, imp: self, "Stopped");
Ok(())
}
@ -265,7 +260,7 @@ impl BaseSinkImpl for FileSink {
}
};
gst::trace!(CAT, imp = self, "Rendering {:?}", buffer);
gst::trace!(CAT, imp: self, "Rendering {:?}", buffer);
let map = buffer.map_readable().map_err(|_| {
gst::element_imp_error!(self, gst::CoreError::Failed, ["Failed to map buffer"]);
gst::FlowError::Error

View file

@ -94,20 +94,20 @@ impl FileSrc {
Some(ref location_cur) => {
gst::info!(
CAT,
imp = self,
imp: self,
"Changing `location` from {:?} to {}",
location_cur,
location,
);
}
None => {
gst::info!(CAT, imp = self, "Setting `location to {}", location,);
gst::info!(CAT, imp: self, "Setting `location to {}", location,);
}
}
Some(location)
}
None => {
gst::info!(CAT, imp = self, "Resetting `location` to None",);
gst::info!(CAT, imp: self, "Resetting `location` to None",);
None
}
};
@ -148,12 +148,7 @@ impl ObjectImpl for FileSrc {
};
if let Err(err) = res {
gst::error!(
CAT,
imp = self,
"Failed to set property `location`: {}",
err
);
gst::error!(CAT, imp: self, "Failed to set property `location`: {}", err);
}
}
_ => unimplemented!(),
@ -255,11 +250,11 @@ impl BaseSrcImpl for FileSrc {
)
})?;
gst::debug!(CAT, imp = self, "Opened file {:?}", file);
gst::debug!(CAT, imp: self, "Opened file {:?}", file);
*state = State::Started { file, position: 0 };
gst::info!(CAT, imp = self, "Started");
gst::info!(CAT, imp: self, "Started");
Ok(())
}
@ -275,7 +270,7 @@ impl BaseSrcImpl for FileSrc {
*state = State::Stopped;
gst::info!(CAT, imp = self, "Stopped");
gst::info!(CAT, imp: self, "Stopped");
Ok(())
}

View file

@ -1,44 +0,0 @@
[package]
name = "gst-plugin-gopbuffer"
version.workspace = true
authors = ["Matthew Waters <matthew@centricular.com>"]
license = "MPL-2.0"
description = "Store complete groups of pictures at a time"
repository.workspace = true
edition.workspace = true
rust-version.workspace = true
[dependencies]
anyhow = "1"
gst = { workspace = true, features = ["v1_18"] }
gst-video = { workspace = true, features = ["v1_18"] }
once_cell.workspace = true
[lib]
name = "gstgopbuffer"
crate-type = ["cdylib", "rlib"]
path = "src/lib.rs"
[dev-dependencies]
gst-app = { workspace = true, features = ["v1_18"] }
gst-check = { workspace = true, features = ["v1_18"] }
[build-dependencies]
gst-plugin-version-helper.workspace = true
[features]
static = []
capi = []
[package.metadata.capi]
min_version = "0.8.0"
[package.metadata.capi.header]
enabled = false
[package.metadata.capi.library]
install_subdir = "gstreamer-1.0"
versioning = false
[package.metadata.capi.pkg_config]
requires_private = "gstreamer-1.0, gstreamer-base-1.0, gstreamer-audio-1.0, gstreamer-video-1.0, gobject-2.0, glib-2.0, gmodule-2.0"

View file

@ -1,373 +0,0 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

View file

@ -1,3 +0,0 @@
fn main() {
gst_plugin_version_helper::info()
}

View file

@ -1,897 +0,0 @@
// Copyright (C) 2023 Matthew Waters <matthew@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
/**
* SECTION:element-gopbuffer
*
* #gopbuffer is an element that can be used to store a minimum duration of data delimited by
* discrete GOPs (Group of Picture). It does this in by differentiation on the DELTA_UNIT
* flag on each input buffer.
*
* One example of the usefulness of #gopbuffer is its ability to store a backlog of data starting
* on a key frame boundary if say the previous 10s seconds of a stream would like to be recorded to
* disk.
*
* ## Example pipeline
*
* |[
* gst-launch videotestsrc ! vp8enc ! gopbuffer minimum-duration=10000000000 ! fakesink
* ]|
*
* Since: plugins-rs-0.13.0
*/
use gst::glib;
use gst::prelude::*;
use gst::subclass::prelude::*;
use std::collections::VecDeque;
use std::sync::Mutex;
use once_cell::sync::Lazy;
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"gopbuffer",
gst::DebugColorFlags::empty(),
Some("GopBuffer Element"),
)
});
const DEFAULT_MIN_TIME: gst::ClockTime = gst::ClockTime::from_seconds(1);
const DEFAULT_MAX_TIME: Option<gst::ClockTime> = None;
#[derive(Debug, Clone)]
struct Settings {
min_time: gst::ClockTime,
max_time: Option<gst::ClockTime>,
}
impl Default for Settings {
fn default() -> Self {
Settings {
min_time: DEFAULT_MIN_TIME,
max_time: DEFAULT_MAX_TIME,
}
}
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum DeltaFrames {
/// Only single completely decodable frames
IntraOnly,
/// Frames may depend on past frames
PredictiveOnly,
/// Frames may depend on past or future frames
Bidirectional,
}
impl DeltaFrames {
/// Whether dts is required to order buffers differently from presentation order
pub(crate) fn requires_dts(&self) -> bool {
matches!(self, Self::Bidirectional)
}
/// Whether this coding structure does not allow delta flags on buffers
pub(crate) fn intra_only(&self) -> bool {
matches!(self, Self::IntraOnly)
}
pub(crate) fn from_caps(caps: &gst::CapsRef) -> Option<Self> {
let s = caps.structure(0)?;
Some(match s.name().as_str() {
"video/x-h264" | "video/x-h265" => DeltaFrames::Bidirectional,
"video/x-vp8" | "video/x-vp9" | "video/x-av1" => DeltaFrames::PredictiveOnly,
"image/jpeg" | "image/png" | "video/x-raw" => DeltaFrames::IntraOnly,
_ => return None,
})
}
}
// TODO: add buffer list support
#[derive(Debug)]
enum GopItem {
Buffer(gst::Buffer),
Event(gst::Event),
}
struct Gop {
// all times are in running time
start_pts: gst::ClockTime,
start_dts: Option<gst::Signed<gst::ClockTime>>,
earliest_pts: gst::ClockTime,
final_earliest_pts: bool,
end_pts: gst::ClockTime,
end_dts: Option<gst::Signed<gst::ClockTime>>,
final_end_pts: bool,
// Buffer or event
data: VecDeque<GopItem>,
}
impl Gop {
fn push_on_pad(mut self, pad: &gst::Pad) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut iter = self.data.iter().filter_map(|item| match item {
GopItem::Buffer(buffer) => buffer.pts(),
_ => None,
});
let first_pts = iter.next();
let last_pts = iter.last();
gst::debug!(
CAT,
"pushing gop with start pts {} end pts {}",
first_pts.display(),
last_pts.display(),
);
for item in self.data.drain(..) {
match item {
GopItem::Buffer(buffer) => {
pad.push(buffer)?;
}
GopItem::Event(event) => {
pad.push_event(event);
}
}
}
Ok(gst::FlowSuccess::Ok)
}
}
struct Stream {
sinkpad: gst::Pad,
srcpad: gst::Pad,
sink_segment: Option<gst::FormattedSegment<gst::ClockTime>>,
delta_frames: DeltaFrames,
queued_gops: VecDeque<Gop>,
}
impl Stream {
fn queue_buffer(
&mut self,
buffer: gst::Buffer,
segment: &gst::FormattedSegment<gst::ClockTime>,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let pts_position = buffer.pts().unwrap();
let end_pts_position = pts_position
.opt_add(buffer.duration())
.unwrap_or(pts_position);
let pts = segment
.to_running_time_full(pts_position)
.ok_or_else(|| {
gst::error!(
CAT,
obj = self.sinkpad,
"Couldn't convert PTS to running time"
);
gst::FlowError::Error
})?
.positive()
.unwrap_or_else(|| {
gst::warning!(CAT, obj = self.sinkpad, "Negative PTSs are not supported");
gst::ClockTime::ZERO
});
let end_pts = segment
.to_running_time_full(end_pts_position)
.ok_or_else(|| {
gst::error!(
CAT,
obj = self.sinkpad,
"Couldn't convert end PTS to running time"
);
gst::FlowError::Error
})?
.positive()
.unwrap_or_else(|| {
gst::warning!(CAT, obj = self.sinkpad, "Negative PTSs are not supported");
gst::ClockTime::ZERO
});
let (dts, end_dts) = if !self.delta_frames.requires_dts() {
(None, None)
} else {
let dts_position = buffer.dts().expect("No dts");
let end_dts_position = buffer
.duration()
.opt_add(dts_position)
.unwrap_or(dts_position);
let dts = segment.to_running_time_full(dts_position).ok_or_else(|| {
gst::error!(
CAT,
obj = self.sinkpad,
"Couldn't convert DTS to running time"
);
gst::FlowError::Error
})?;
let end_dts = segment
.to_running_time_full(end_dts_position)
.ok_or_else(|| {
gst::error!(
CAT,
obj = self.sinkpad,
"Couldn't convert end DTS to running time"
);
gst::FlowError::Error
})?;
let end_dts = std::cmp::max(end_dts, dts);
(Some(dts), Some(end_dts))
};
if !buffer.flags().contains(gst::BufferFlags::DELTA_UNIT) {
gst::debug!(
CAT,
"New GOP detected with buffer pts {} dts {}",
buffer.pts().display(),
buffer.dts().display()
);
let gop = Gop {
start_pts: pts,
start_dts: dts,
earliest_pts: pts,
final_earliest_pts: false,
end_pts: pts,
end_dts,
final_end_pts: false,
data: VecDeque::from([GopItem::Buffer(buffer)]),
};
self.queued_gops.push_front(gop);
if let Some(prev_gop) = self.queued_gops.get_mut(1) {
gst::debug!(
CAT,
obj = self.sinkpad,
"Updating previous GOP starting at PTS {} to end PTS {}",
prev_gop.earliest_pts,
pts,
);
prev_gop.end_pts = std::cmp::max(prev_gop.end_pts, pts);
prev_gop.end_dts = std::cmp::max(prev_gop.end_dts, dts);
if !self.delta_frames.requires_dts() {
prev_gop.final_end_pts = true;
}
if !prev_gop.final_earliest_pts {
// Don't bother logging this for intra-only streams as it would be for every
// single buffer.
if self.delta_frames.requires_dts() {
gst::debug!(
CAT,
obj = self.sinkpad,
"Previous GOP has final earliest PTS at {}",
prev_gop.earliest_pts
);
}
prev_gop.final_earliest_pts = true;
if let Some(prev_prev_gop) = self.queued_gops.get_mut(2) {
prev_prev_gop.final_end_pts = true;
}
}
}
} else if let Some(gop) = self.queued_gops.front_mut() {
gop.end_pts = std::cmp::max(gop.end_pts, end_pts);
gop.end_dts = gop.end_dts.opt_max(end_dts);
gop.data.push_back(GopItem::Buffer(buffer));
if self.delta_frames.requires_dts() {
let dts = dts.unwrap();
if gop.earliest_pts > pts && !gop.final_earliest_pts {
gst::debug!(
CAT,
obj = self.sinkpad,
"Updating current GOP earliest PTS from {} to {}",
gop.earliest_pts,
pts
);
gop.earliest_pts = pts;
if let Some(prev_gop) = self.queued_gops.get_mut(1) {
if prev_gop.end_pts < pts {
gst::debug!(
CAT,
obj = self.sinkpad,
"Updating previous GOP starting PTS {} end time from {} to {}",
pts,
prev_gop.end_pts,
pts
);
prev_gop.end_pts = pts;
}
}
}
let gop = self.queued_gops.front_mut().unwrap();
// The earliest PTS is known when the current DTS is bigger or equal to the first
// PTS that was observed in this GOP. If there was another frame later that had a
// lower PTS then it wouldn't be possible to display it in time anymore, i.e. the
// stream would be invalid.
if gop.start_pts <= dts && !gop.final_earliest_pts {
gst::debug!(
CAT,
obj = self.sinkpad,
"GOP has final earliest PTS at {}",
gop.earliest_pts
);
gop.final_earliest_pts = true;
if let Some(prev_gop) = self.queued_gops.get_mut(1) {
prev_gop.final_end_pts = true;
}
}
}
} else {
gst::debug!(
CAT,
"dropping buffer before first GOP with pts {} dts {}",
buffer.pts().display(),
buffer.dts().display()
);
}
if let Some((prev_gop, first_gop)) = Option::zip(
self.queued_gops.iter().find(|gop| gop.final_end_pts),
self.queued_gops.back(),
) {
gst::debug!(
CAT,
obj = self.sinkpad,
"Queued full GOPs duration updated to {}",
prev_gop.end_pts.saturating_sub(first_gop.earliest_pts),
);
}
gst::debug!(
CAT,
obj = self.sinkpad,
"Queued duration updated to {}",
Option::zip(self.queued_gops.front(), self.queued_gops.back())
.map(|(end, start)| end.end_pts.saturating_sub(start.start_pts))
.unwrap_or(gst::ClockTime::ZERO)
);
Ok(gst::FlowSuccess::Ok)
}
fn oldest_gop(&mut self) -> Option<Gop> {
self.queued_gops.pop_back()
}
fn peek_oldest_gop(&self) -> Option<&Gop> {
self.queued_gops.back()
}
fn peek_second_oldest_gop(&self) -> Option<&Gop> {
if self.queued_gops.len() <= 1 {
return None;
}
self.queued_gops.get(self.queued_gops.len() - 2)
}
fn drain_all(&mut self) -> impl Iterator<Item = Gop> + '_ {
self.queued_gops.drain(..)
}
fn flush(&mut self) {
self.queued_gops.clear();
}
}
#[derive(Default)]
struct State {
streams: Vec<Stream>,
}
impl State {
fn stream_from_sink_pad(&self, pad: &gst::Pad) -> Option<&Stream> {
self.streams.iter().find(|stream| &stream.sinkpad == pad)
}
fn stream_from_sink_pad_mut(&mut self, pad: &gst::Pad) -> Option<&mut Stream> {
self.streams
.iter_mut()
.find(|stream| &stream.sinkpad == pad)
}
fn stream_from_src_pad(&self, pad: &gst::Pad) -> Option<&Stream> {
self.streams.iter().find(|stream| &stream.srcpad == pad)
}
}
#[derive(Default)]
pub(crate) struct GopBuffer {
state: Mutex<State>,
settings: Mutex<Settings>,
}
impl GopBuffer {
fn sink_chain(
&self,
pad: &gst::Pad,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let obj = self.obj();
if buffer.pts().is_none() {
gst::error!(CAT, obj = obj, "Require timestamped buffers!");
return Err(gst::FlowError::Error);
}
let settings = self.settings.lock().unwrap().clone();
let mut state = self.state.lock().unwrap();
let stream = state
.stream_from_sink_pad_mut(pad)
.expect("pad without an internal Stream");
let Some(segment) = stream.sink_segment.clone() else {
gst::element_imp_error!(self, gst::CoreError::Clock, ["Got buffer before segment"]);
return Err(gst::FlowError::Error);
};
if stream.delta_frames.intra_only() && buffer.flags().contains(gst::BufferFlags::DELTA_UNIT)
{
gst::error!(CAT, obj = pad, "Intra-only stream with delta units");
return Err(gst::FlowError::Error);
}
if stream.delta_frames.requires_dts() && buffer.dts().is_none() {
gst::error!(CAT, obj = pad, "Require DTS for video streams");
return Err(gst::FlowError::Error);
}
let srcpad = stream.srcpad.clone();
stream.queue_buffer(buffer, &segment)?;
let mut gops_to_push = vec![];
let Some(newest_gop) = stream.queued_gops.front() else {
return Ok(gst::FlowSuccess::Ok);
};
// we are looking for the latest pts value here (which should be the largest)
let newest_ts = if stream.delta_frames.requires_dts() {
newest_gop.end_dts.unwrap()
} else {
gst::Signed::Positive(newest_gop.end_pts)
};
loop {
// check stored times as though the oldest GOP doesn't exist.
let Some(second_oldest_gop) = stream.peek_second_oldest_gop() else {
break;
};
// we are looking for the oldest pts here (with the largest value). This is our potentially
// new end time.
let oldest_ts = if stream.delta_frames.requires_dts() {
second_oldest_gop.start_dts.unwrap()
} else {
gst::Signed::Positive(second_oldest_gop.start_pts)
};
let stored_duration_without_oldest = newest_ts.saturating_sub(oldest_ts);
gst::trace!(
CAT,
obj = obj,
"newest_pts {}, second oldest_pts {}, stored_duration_without_oldest_gop {}, min-time {}",
newest_ts.display(),
oldest_ts.display(),
stored_duration_without_oldest.display(),
settings.min_time.display()
);
if stored_duration_without_oldest < settings.min_time {
break;
}
gops_to_push.push(stream.oldest_gop().unwrap());
}
if let Some(max_time) = settings.max_time {
while let Some(oldest_gop) = stream.peek_oldest_gop() {
let oldest_ts = oldest_gop.data.iter().rev().find_map(|item| match item {
GopItem::Buffer(buffer) => {
if stream.delta_frames.requires_dts() {
Some(gst::Signed::Positive(buffer.dts().unwrap()))
} else {
Some(gst::Signed::Positive(buffer.pts().unwrap()))
}
}
_ => None,
});
if newest_ts
.opt_saturating_sub(oldest_ts)
.is_some_and(|diff| diff > gst::Signed::Positive(max_time))
{
gst::warning!(CAT, obj = obj, "Stored data has overflowed the maximum allowed stored time {}, pushing oldest GOP", max_time.display());
gops_to_push.push(stream.oldest_gop().unwrap());
} else {
break;
}
}
}
drop(state);
for gop in gops_to_push.into_iter() {
gop.push_on_pad(&srcpad)?;
}
Ok(gst::FlowSuccess::Ok)
}
fn sink_event(&self, pad: &gst::Pad, event: gst::Event) -> bool {
let obj = self.obj();
let mut state = self.state.lock().unwrap();
let stream = state
.stream_from_sink_pad_mut(pad)
.expect("pad without an internal Stream!");
match event.view() {
gst::EventView::Caps(caps) => {
let Some(delta_frames) = DeltaFrames::from_caps(caps.caps()) else {
return false;
};
stream.delta_frames = delta_frames;
}
gst::EventView::FlushStop(_flush) => {
gst::debug!(CAT, obj = obj, "flushing stored data");
stream.flush();
}
gst::EventView::Eos(_eos) => {
gst::debug!(CAT, obj = obj, "draining data at EOS");
let gops = stream.drain_all().collect::<Vec<_>>();
let srcpad = stream.srcpad.clone();
drop(state);
for gop in gops.into_iter() {
let _ = gop.push_on_pad(&srcpad);
}
// once we've pushed all the data, we can push the corresponding eos
gst::Pad::event_default(pad, Some(&*obj), event);
return true;
}
gst::EventView::Segment(segment) => {
let Ok(segment) = segment.segment().clone().downcast::<gst::ClockTime>() else {
gst::error!(CAT, "Non TIME segments are not supported");
return false;
};
stream.sink_segment = Some(segment);
}
_ => (),
};
if event.is_serialized() {
if stream.peek_oldest_gop().is_none() {
// if there is nothing queued, the event can go straight through
gst::trace!(
CAT,
obj = obj,
"nothing queued, event {:?} passthrough",
event.structure().map(|s| s.name().as_str())
);
drop(state);
return gst::Pad::event_default(pad, Some(&*obj), event);
}
let gop = stream.queued_gops.front_mut().unwrap();
gop.data.push_back(GopItem::Event(event));
true
} else {
// non-serialized events can be pushed directly
drop(state);
gst::Pad::event_default(pad, Some(&*obj), event)
}
}
fn sink_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
let obj = self.obj();
if query.is_serialized() {
// TODO: serialized queries somehow?
gst::warning!(
CAT,
obj = pad,
"Serialized queries are currently not supported"
);
return false;
}
gst::Pad::query_default(pad, Some(&*obj), query)
}
fn src_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
let obj = self.obj();
match query.view_mut() {
gst::QueryViewMut::Latency(latency) => {
let mut upstream_query = gst::query::Latency::new();
let otherpad = {
let state = self.state.lock().unwrap();
let Some(stream) = state.stream_from_src_pad(pad) else {
return false;
};
stream.sinkpad.clone()
};
let ret = otherpad.peer_query(&mut upstream_query);
if ret {
let (live, mut min, mut max) = upstream_query.result();
let settings = self.settings.lock().unwrap();
min += settings.max_time.unwrap_or(settings.min_time);
max = max.opt_max(settings.max_time);
latency.set(live, min, max);
gst::debug!(
CAT,
obj = pad,
"Latency query response: live {} min {} max {}",
live,
min,
max.display()
);
}
ret
}
_ => gst::Pad::query_default(pad, Some(&*obj), query),
}
}
fn iterate_internal_links(&self, pad: &gst::Pad) -> gst::Iterator<gst::Pad> {
let state = self.state.lock().unwrap();
let otherpad = match pad.direction() {
gst::PadDirection::Src => state
.stream_from_src_pad(pad)
.map(|stream| stream.sinkpad.clone()),
gst::PadDirection::Sink => state
.stream_from_sink_pad(pad)
.map(|stream| stream.srcpad.clone()),
_ => unreachable!(),
};
if let Some(otherpad) = otherpad {
gst::Iterator::from_vec(vec![otherpad])
} else {
gst::Iterator::from_vec(vec![])
}
}
}
#[glib::object_subclass]
impl ObjectSubclass for GopBuffer {
const NAME: &'static str = "GstGopBuffer";
type Type = super::GopBuffer;
type ParentType = gst::Element;
}
impl ObjectImpl for GopBuffer {
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
vec![
glib::ParamSpecUInt64::builder("minimum-duration")
.nick("Minimum Duration")
.blurb("The minimum duration to store")
.default_value(DEFAULT_MIN_TIME.nseconds())
.mutable_ready()
.build(),
glib::ParamSpecUInt64::builder("max-size-time")
.nick("Maximum Duration")
.blurb("The maximum duration to store (0=disable)")
.default_value(0)
.mutable_ready()
.build(),
]
});
&PROPERTIES
}
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
match pspec.name() {
"minimum-duration" => {
let mut settings = self.settings.lock().unwrap();
let min_time = value.get().expect("type checked upstream");
if settings.min_time != min_time {
settings.min_time = min_time;
drop(settings);
self.post_message(gst::message::Latency::builder().src(&*self.obj()).build());
}
}
"max-size-time" => {
let mut settings = self.settings.lock().unwrap();
let max_time = value
.get::<Option<gst::ClockTime>>()
.expect("type checked upstream");
let max_time = if matches!(max_time, Some(gst::ClockTime::ZERO) | None) {
None
} else {
max_time
};
if settings.max_time != max_time {
settings.max_time = max_time;
drop(settings);
self.post_message(gst::message::Latency::builder().src(&*self.obj()).build());
}
}
_ => unimplemented!(),
}
}
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
match pspec.name() {
"minimum-duration" => {
let settings = self.settings.lock().unwrap();
settings.min_time.to_value()
}
"max-size-time" => {
let settings = self.settings.lock().unwrap();
settings.max_time.unwrap_or(gst::ClockTime::ZERO).to_value()
}
_ => unimplemented!(),
}
}
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
let class = obj.class();
let templ = class.pad_template("video_sink").unwrap();
let sinkpad = gst::Pad::builder_from_template(&templ)
.name("video_sink")
.chain_function(|pad, parent, buffer| {
GopBuffer::catch_panic_pad_function(
parent,
|| Err(gst::FlowError::Error),
|gopbuffer| gopbuffer.sink_chain(pad, buffer),
)
})
.event_function(|pad, parent, event| {
GopBuffer::catch_panic_pad_function(
parent,
|| false,
|gopbuffer| gopbuffer.sink_event(pad, event),
)
})
.query_function(|pad, parent, query| {
GopBuffer::catch_panic_pad_function(
parent,
|| false,
|gopbuffer| gopbuffer.sink_query(pad, query),
)
})
.iterate_internal_links_function(|pad, parent| {
GopBuffer::catch_panic_pad_function(
parent,
|| gst::Pad::iterate_internal_links_default(pad, parent),
|gopbuffer| gopbuffer.iterate_internal_links(pad),
)
})
.flags(gst::PadFlags::PROXY_CAPS)
.build();
obj.add_pad(&sinkpad).unwrap();
let templ = class.pad_template("video_src").unwrap();
let srcpad = gst::Pad::builder_from_template(&templ)
.name("video_src")
.query_function(|pad, parent, query| {
GopBuffer::catch_panic_pad_function(
parent,
|| false,
|gopbuffer| gopbuffer.src_query(pad, query),
)
})
.iterate_internal_links_function(|pad, parent| {
GopBuffer::catch_panic_pad_function(
parent,
|| gst::Pad::iterate_internal_links_default(pad, parent),
|gopbuffer| gopbuffer.iterate_internal_links(pad),
)
})
.build();
obj.add_pad(&srcpad).unwrap();
let mut state = self.state.lock().unwrap();
state.streams.push(Stream {
sinkpad,
srcpad,
sink_segment: None,
delta_frames: DeltaFrames::IntraOnly,
queued_gops: VecDeque::new(),
});
}
}
impl GstObjectImpl for GopBuffer {}
impl ElementImpl for GopBuffer {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"GopBuffer",
"Video",
"GOP Buffer",
"Matthew Waters <matthew@centricular.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
// This element is designed to implement multiple streams but it has not been
// implemented.
//
// The things missing for multiple (audio or video) streams are:
// 1. More pad templates
// 2. Choosing a main stream to drive the timestamp logic between all input streams
// 3. Allowing either the main stream to cause other streams to push data
// regardless of it's GOP state, or allow each stream to be individually delimited
// by GOP but all still within the minimum duration.
let video_caps = [
gst::Structure::builder("video/x-h264")
.field("stream-format", gst::List::new(["avc", "avc3"]))
.field("alignment", "au")
.build(),
gst::Structure::builder("video/x-h265")
.field("stream-format", gst::List::new(["hvc1", "hev1"]))
.field("alignment", "au")
.build(),
gst::Structure::builder("video/x-vp8").build(),
gst::Structure::builder("video/x-vp9").build(),
gst::Structure::builder("video/x-av1")
.field("stream-format", "obu-stream")
.field("alignment", "tu")
.build(),
]
.into_iter()
.collect::<gst::Caps>();
let src_pad_template = gst::PadTemplate::new(
"video_src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&video_caps,
)
.unwrap();
let sink_pad_template = gst::PadTemplate::new(
"video_sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&video_caps,
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
fn change_state(
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
#[allow(clippy::single_match)]
match transition {
gst::StateChange::NullToReady => {
let settings = self.settings.lock().unwrap();
if let Some(max_time) = settings.max_time {
if max_time < settings.min_time {
gst::element_imp_error!(
self,
gst::CoreError::StateChange,
["Configured maximum time is less than the minimum time"]
);
return Err(gst::StateChangeError);
}
}
}
_ => (),
}
self.parent_change_state(transition)?;
Ok(gst::StateChangeSuccess::Success)
}
}

View file

@ -1,27 +0,0 @@
// Copyright (C) 2022 Matthew Waters <matthew@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub(crate) struct GopBuffer(ObjectSubclass<imp::GopBuffer>) @extends gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"gopbuffer",
gst::Rank::PRIMARY,
GopBuffer::static_type(),
)?;
Ok(())
}

View file

@ -1,34 +0,0 @@
// Copyright (C) 2022 Matthew Waters <matthew@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(clippy::non_send_fields_in_send_ty, unused_doc_comments)]
/**
* plugin-gopbuffer:
*
* Since: plugins-rs-0.13.0
*/
use gst::glib;
mod gopbuffer;
fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gopbuffer::register(plugin)
}
gst::plugin_define!(
gopbuffer,
env!("CARGO_PKG_DESCRIPTION"),
plugin_init,
concat!(env!("CARGO_PKG_VERSION"), "-", env!("COMMIT_ID")),
// FIXME: MPL-2.0 is only allowed since 1.18.3 (as unknown) and 1.20 (as known)
"MPL",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_REPOSITORY"),
env!("BUILD_REL_DATE")
);

View file

@ -1,128 +0,0 @@
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
//
use gst::prelude::*;
fn init() {
use std::sync::Once;
static INIT: Once = Once::new();
INIT.call_once(|| {
gst::init().unwrap();
gstgopbuffer::plugin_register_static().unwrap();
});
}
macro_rules! check_buffer {
($buf1:expr, $buf2:expr) => {
assert_eq!($buf1.pts(), $buf2.pts());
assert_eq!($buf1.dts(), $buf2.dts());
assert_eq!($buf1.flags(), $buf2.flags());
};
}
#[test]
fn test_min_one_gop_held() {
const OFFSET: gst::ClockTime = gst::ClockTime::from_seconds(10);
init();
let mut h =
gst_check::Harness::with_padnames("gopbuffer", Some("video_sink"), Some("video_src"));
// 200ms min buffer time
let element = h.element().unwrap();
element.set_property("minimum-duration", gst::ClockTime::from_mseconds(200));
h.set_src_caps(
gst::Caps::builder("video/x-h264")
.field("width", 320i32)
.field("height", 240i32)
.field("framerate", gst::Fraction::new(10, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::with_size(1).unwrap())
.build(),
);
let mut in_segment = gst::Segment::new();
in_segment.set_format(gst::Format::Time);
in_segment.set_base(10.seconds());
assert!(h.push_event(gst::event::Segment::builder(&in_segment).build()));
h.play();
// Push 10 buffers of 100ms each, 2nd and 5th buffer without DELTA_UNIT flag
let in_buffers: Vec<_> = (0..6)
.map(|i| {
let mut buffer = gst::Buffer::with_size(1).unwrap();
{
let buffer = buffer.get_mut().unwrap();
buffer.set_pts(OFFSET + gst::ClockTime::from_mseconds(i * 100));
buffer.set_dts(OFFSET + gst::ClockTime::from_mseconds(i * 100));
buffer.set_duration(gst::ClockTime::from_mseconds(100));
if i != 1 && i != 4 {
buffer.set_flags(gst::BufferFlags::DELTA_UNIT);
}
}
assert_eq!(h.push(buffer.clone()), Ok(gst::FlowSuccess::Ok));
buffer
})
.collect();
// pull mandatory events
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::StreamStart);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Caps);
// GstHarness pushes its own segment event that we need to eat
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Segment);
let ev = h.pull_event().unwrap();
let gst::event::EventView::Segment(recv_segment) = ev.view() else {
unreachable!()
};
let recv_segment = recv_segment.segment();
assert_eq!(recv_segment, &in_segment);
// check that at least the first GOP has been output already as it exceeds the minimum-time
// value
let mut in_iter = in_buffers.iter();
// the first buffer is dropped because it was not preceded by a keyframe
let _buffer = in_iter.next().unwrap();
// a keyframe
let out = h.pull().unwrap();
let buffer = in_iter.next().unwrap();
check_buffer!(buffer, out);
// not a keyframe
let out = h.pull().unwrap();
let buffer = in_iter.next().unwrap();
check_buffer!(buffer, out);
// not a keyframe
let out = h.pull().unwrap();
let buffer = in_iter.next().unwrap();
check_buffer!(buffer, out);
// no more buffers
assert_eq!(h.buffers_in_queue(), 0);
// push eos to drain out the rest of the data
assert!(h.push_event(gst::event::Eos::new()));
for buffer in in_iter {
let out = h.pull().unwrap();
check_buffer!(buffer, out);
}
// no more buffers
assert_eq!(h.buffers_in_queue(), 0);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Eos);
}

View file

@ -108,7 +108,7 @@ impl ObjectImpl for InterSink {
InterStreamProducer::acquire(&settings.producer_name, &appsink)
{
drop(settings);
gst::error!(CAT, imp = self, "{err}");
gst::error!(CAT, imp: self, "{err}");
self.post_error_message(gst::error_msg!(
gst::StreamError::Failed,
["{err}"]
@ -191,7 +191,7 @@ impl ElementImpl for InterSink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
if transition == gst::StateChange::ReadyToPaused {
if let Err(err) = self.prepare() {

View file

@ -177,7 +177,7 @@ impl ElementImpl for InterSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
if transition == gst::StateChange::ReadyToPaused {
if let Err(err) = self.prepare() {

View file

@ -1,43 +0,0 @@
[package]
name = "gst-plugin-originalbuffer"
version.workspace = true
authors = ["Olivier Crête <olivier.crete@collabora.com>"]
repository.workspace = true
license = "MPL-2.0"
description = "GStreamer Origin buffer meta Plugin"
edition.workspace = true
rust-version.workspace = true
[dependencies]
glib.workspace = true
gst.workspace = true
gst-video.workspace = true
atomic_refcell = "0.1"
once_cell.workspace = true
[lib]
name = "gstoriginalbuffer"
crate-type = ["cdylib", "rlib"]
path = "src/lib.rs"
[build-dependencies]
gst-plugin-version-helper.workspace = true
[features]
static = []
capi = []
doc = ["gst/v1_16"]
[package.metadata.capi]
min_version = "0.9.21"
[package.metadata.capi.header]
enabled = false
[package.metadata.capi.library]
install_subdir = "gstreamer-1.0"
versioning = false
import_library = false
[package.metadata.capi.pkg_config]
requires_private = "gstreamer-1.0, gstreamer-base-1.0, gobject-2.0, glib-2.0, gmodule-2.0"

View file

@ -1,3 +0,0 @@
fn main() {
gst_plugin_version_helper::info()
}

View file

@ -1,38 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(clippy::non_send_fields_in_send_ty, unused_doc_comments)]
/**
* plugin-originalbuffer:
*
* Since: plugins-rs-0.12 */
use gst::glib;
mod originalbuffermeta;
mod originalbufferrestore;
mod originalbuffersave;
fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
originalbuffersave::register(plugin)?;
originalbufferrestore::register(plugin)?;
Ok(())
}
gst::plugin_define!(
originalbuffer,
env!("CARGO_PKG_DESCRIPTION"),
plugin_init,
concat!(env!("CARGO_PKG_VERSION"), "-", env!("COMMIT_ID")),
"MPL",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_REPOSITORY"),
env!("BUILD_REL_DATE")
);

View file

@ -1,199 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::prelude::*;
use std::fmt;
use std::mem;
#[repr(transparent)]
pub struct OriginalBufferMeta(imp::OriginalBufferMeta);
unsafe impl Send for OriginalBufferMeta {}
unsafe impl Sync for OriginalBufferMeta {}
impl OriginalBufferMeta {
pub fn add(
buffer: &mut gst::BufferRef,
original: gst::Buffer,
caps: Option<gst::Caps>,
) -> gst::MetaRefMut<'_, Self, gst::meta::Standalone> {
unsafe {
// Manually dropping because gst_buffer_add_meta() takes ownership of the
// content of the struct
let mut params =
mem::ManuallyDrop::new(imp::OriginalBufferMetaParams { original, caps });
let meta = gst::ffi::gst_buffer_add_meta(
buffer.as_mut_ptr(),
imp::original_buffer_meta_get_info(),
&mut *params as *mut imp::OriginalBufferMetaParams as gst::glib::ffi::gpointer,
) as *mut imp::OriginalBufferMeta;
Self::from_mut_ptr(buffer, meta)
}
}
pub fn replace(&mut self, original: gst::Buffer, caps: Option<gst::Caps>) {
self.0.original = Some(original);
self.0.caps = caps;
}
pub fn original(&self) -> &gst::Buffer {
self.0.original.as_ref().unwrap()
}
pub fn caps(&self) -> &gst::Caps {
self.0.caps.as_ref().unwrap()
}
}
unsafe impl MetaAPI for OriginalBufferMeta {
type GstType = imp::OriginalBufferMeta;
fn meta_api() -> gst::glib::Type {
imp::original_buffer_meta_api_get_type()
}
}
impl fmt::Debug for OriginalBufferMeta {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("OriginalBufferMeta")
.field("buffer", &self.original())
.finish()
}
}
mod imp {
use gst::glib::translate::*;
use once_cell::sync::Lazy;
use std::mem;
use std::ptr;
pub(super) struct OriginalBufferMetaParams {
pub original: gst::Buffer,
pub caps: Option<gst::Caps>,
}
#[repr(C)]
pub struct OriginalBufferMeta {
parent: gst::ffi::GstMeta,
pub(super) original: Option<gst::Buffer>,
pub(super) caps: Option<gst::Caps>,
}
pub(super) fn original_buffer_meta_api_get_type() -> glib::Type {
static TYPE: Lazy<glib::Type> = Lazy::new(|| unsafe {
let t = from_glib(gst::ffi::gst_meta_api_type_register(
b"GstOriginalBufferMetaAPI\0".as_ptr() as *const _,
[ptr::null::<std::os::raw::c_char>()].as_ptr() as *mut *const _,
));
assert_ne!(t, glib::Type::INVALID);
t
});
*TYPE
}
unsafe extern "C" fn original_buffer_meta_init(
meta: *mut gst::ffi::GstMeta,
params: glib::ffi::gpointer,
_buffer: *mut gst::ffi::GstBuffer,
) -> glib::ffi::gboolean {
assert!(!params.is_null());
let meta = &mut *(meta as *mut OriginalBufferMeta);
let params = ptr::read(params as *const OriginalBufferMetaParams);
let OriginalBufferMetaParams { original, caps } = params;
ptr::write(&mut meta.original, Some(original));
ptr::write(&mut meta.caps, caps);
true.into_glib()
}
unsafe extern "C" fn original_buffer_meta_free(
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
) {
let meta = &mut *(meta as *mut OriginalBufferMeta);
meta.original = None;
meta.caps = None;
}
unsafe extern "C" fn original_buffer_meta_transform(
dest: *mut gst::ffi::GstBuffer,
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
_type_: glib::ffi::GQuark,
_data: glib::ffi::gpointer,
) -> glib::ffi::gboolean {
let dest = gst::BufferRef::from_mut_ptr(dest);
let meta = &*(meta as *const OriginalBufferMeta);
if dest.meta::<super::OriginalBufferMeta>().is_some() {
return true.into_glib();
}
// We don't store a ref in the meta if it's self-refencing, but we add it
// when copying the meta to another buffer.
super::OriginalBufferMeta::add(
dest,
meta.original.as_ref().unwrap().clone(),
meta.caps.clone(),
);
true.into_glib()
}
pub(super) fn original_buffer_meta_get_info() -> *const gst::ffi::GstMetaInfo {
struct MetaInfo(ptr::NonNull<gst::ffi::GstMetaInfo>);
unsafe impl Send for MetaInfo {}
unsafe impl Sync for MetaInfo {}
static META_INFO: Lazy<MetaInfo> = Lazy::new(|| unsafe {
MetaInfo(
ptr::NonNull::new(gst::ffi::gst_meta_register(
original_buffer_meta_api_get_type().into_glib(),
b"OriginalBufferMeta\0".as_ptr() as *const _,
mem::size_of::<OriginalBufferMeta>(),
Some(original_buffer_meta_init),
Some(original_buffer_meta_free),
Some(original_buffer_meta_transform),
) as *mut gst::ffi::GstMetaInfo)
.expect("Failed to register meta API"),
)
});
META_INFO.0.as_ptr()
}
}
#[test]
fn test() {
gst::init().unwrap();
let mut b = gst::Buffer::with_size(10).unwrap();
let caps = gst::Caps::new_empty_simple("video/x-raw");
let copy = b.copy();
let m = OriginalBufferMeta::add(b.make_mut(), copy, Some(caps.clone()));
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original().clone(), b);
let b2: gst::Buffer = b.copy_deep().unwrap();
let m = b.meta::<OriginalBufferMeta>().unwrap();
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original(), &b);
let m = b2.meta::<OriginalBufferMeta>().unwrap();
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original(), &b);
let b3: gst::Buffer = b2.copy_deep().unwrap();
drop(b2);
let m = b3.meta::<OriginalBufferMeta>().unwrap();
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original(), &b);
}

View file

@ -1,315 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::subclass::prelude::*;
use gst_video::prelude::*;
use atomic_refcell::AtomicRefCell;
use crate::originalbuffermeta;
use crate::originalbuffermeta::OriginalBufferMeta;
struct CapsState {
caps: gst::Caps,
vinfo: Option<gst_video::VideoInfo>,
}
impl Default for CapsState {
fn default() -> Self {
CapsState {
caps: gst::Caps::new_empty(),
vinfo: None,
}
}
}
#[derive(Default)]
struct State {
sinkpad_caps: CapsState,
meta_caps: CapsState,
sinkpad_segment: Option<gst::Event>,
}
pub struct OriginalBufferRestore {
state: AtomicRefCell<State>,
src_pad: gst::Pad,
sink_pad: gst::Pad,
}
use once_cell::sync::Lazy;
#[allow(dead_code)]
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"originalbufferrestore",
gst::DebugColorFlags::empty(),
Some("Restore Original buffer as meta"),
)
});
#[glib::object_subclass]
impl ObjectSubclass for OriginalBufferRestore {
const NAME: &'static str = "GstOriginalBufferRestore";
type Type = super::OriginalBufferRestore;
type ParentType = gst::Element;
fn with_class(klass: &Self::Class) -> Self {
let sink_templ = klass.pad_template("sink").unwrap();
let src_templ = klass.pad_template("src").unwrap();
let sink_pad = gst::Pad::builder_from_template(&sink_templ)
.chain_function(|pad, parent, buffer| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| Err(gst::FlowError::Error),
|obj| obj.sink_chain(pad, buffer),
)
})
.event_function(|pad, parent, event| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| false,
|obj| obj.sink_event(pad, parent, event),
)
})
.query_function(|pad, parent, query| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| false,
|obj| obj.sink_query(pad, parent, query),
)
})
.build();
let src_pad = gst::Pad::builder_from_template(&src_templ)
.event_function(|pad, parent, event| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| false,
|obj| obj.src_event(pad, parent, event),
)
})
.build();
Self {
src_pad,
sink_pad,
state: Default::default(),
}
}
}
impl ObjectImpl for OriginalBufferRestore {
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
obj.add_pad(&self.sink_pad).unwrap();
obj.add_pad(&self.src_pad).unwrap();
}
}
impl GstObjectImpl for OriginalBufferRestore {}
impl ElementImpl for OriginalBufferRestore {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"Original Buffer Restore",
"Generic",
"Restores a reference to the buffer in a meta",
"Olivier Crête <olivier.crete@collabora.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let caps = gst::Caps::new_any();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
fn change_state(
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
let ret = self.parent_change_state(transition)?;
if transition == gst::StateChange::PausedToReady {
let mut state = self.state.borrow_mut();
*state = State::default();
}
Ok(ret)
}
}
impl OriginalBufferRestore {
fn sink_event(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
event: gst::Event,
) -> bool {
match event.view() {
gst::EventView::Caps(e) => {
let mut state = self.state.borrow_mut();
let caps = e.caps_owned();
let vinfo = gst_video::VideoInfo::from_caps(&caps).ok();
state.sinkpad_caps = CapsState { caps, vinfo };
true
}
gst::EventView::Segment(_) => {
let mut state = self.state.borrow_mut();
state.sinkpad_segment = Some(event);
true
}
_ => gst::Pad::event_default(pad, parent, event),
}
}
fn src_event(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
event: gst::Event,
) -> bool {
if event.type_() == gst::EventType::Reconfigure
|| event.has_name("gst-original-buffer-forward-upstream-event")
{
let s = gst::Structure::builder("gst-original-buffer-forward-upstream-event")
.field("event", event)
.build();
let event = gst::event::CustomUpstream::new(s);
self.sink_pad.push_event(event)
} else {
gst::Pad::event_default(pad, parent, event)
}
}
fn sink_query(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
query: &mut gst::QueryRef,
) -> bool {
if let gst::QueryViewMut::Custom(_) = query.view_mut() {
let s = query.structure_mut();
if s.has_name("gst-original-buffer-forward-query") {
if let Ok(mut q) = s.get::<gst::Query>("query") {
s.remove_field("query");
assert!(q.is_writable());
let res = self.src_pad.peer_query(q.get_mut().unwrap());
s.set("query", q);
s.set("result", res);
return true;
}
}
}
gst::Pad::query_default(pad, parent, query)
}
fn sink_chain(
&self,
_pad: &gst::Pad,
inbuf: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let Some(ometa) = inbuf.meta::<OriginalBufferMeta>() else {
//gst::element_warning!(self, gst::StreamError::Failed, ["Buffer {} is missing the GstOriginalBufferMeta, put originalbuffersave upstream in your pipeline", buffer]);
return Ok(gst::FlowSuccess::Ok);
};
let mut state = self.state.borrow_mut();
let meta_caps = &mut state.meta_caps;
if &meta_caps.caps != ometa.caps() {
if !self.src_pad.push_event(gst::event::Caps::new(ometa.caps())) {
return Err(gst::FlowError::NotNegotiated);
}
meta_caps.caps = ometa.caps().clone();
meta_caps.vinfo = gst_video::VideoInfo::from_caps(&meta_caps.caps).ok();
}
let mut outbuf = ometa.original().copy();
inbuf
.copy_into(
outbuf.make_mut(),
gst::BufferCopyFlags::TIMESTAMPS | gst::BufferCopyFlags::FLAGS,
..,
)
.unwrap();
for meta in inbuf.iter_meta::<gst::Meta>() {
if meta.api() == originalbuffermeta::OriginalBufferMeta::meta_api() {
continue;
}
if meta.has_tag::<gst::meta::tags::Memory>()
|| meta.has_tag::<gst::meta::tags::MemoryReference>()
{
continue;
}
if meta.has_tag::<gst_video::video_meta::tags::Size>() {
if let (Some(ref meta_vinfo), Some(ref sink_vinfo)) =
(&state.meta_caps.vinfo, &state.sinkpad_caps.vinfo)
{
if (meta_vinfo.width() != sink_vinfo.width()
|| meta_vinfo.height() != sink_vinfo.height())
&& meta
.transform(
outbuf.make_mut(),
&gst_video::video_meta::VideoMetaTransformScale::new(
sink_vinfo, meta_vinfo,
),
)
.is_ok()
{
continue;
}
}
}
let _ = meta.transform(
outbuf.make_mut(),
&gst::meta::MetaTransformCopy::new(false, ..),
);
}
if let Some(event) = state.sinkpad_segment.take() {
if !self.src_pad.push_event(event) {
return Err(gst::FlowError::Error);
}
}
self.src_pad.push(outbuf)
}
}

View file

@ -1,31 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
/**
* SECTION:element-originalbufferrestore
*
* See originalbuffersave for details
*/
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub struct OriginalBufferRestore(ObjectSubclass<imp::OriginalBufferRestore>) @extends gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"originalbufferrestore",
gst::Rank::NONE,
OriginalBufferRestore::static_type(),
)
}

View file

@ -1,205 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::prelude::*;
use gst::subclass::prelude::*;
use crate::originalbuffermeta::OriginalBufferMeta;
pub struct OriginalBufferSave {
src_pad: gst::Pad,
sink_pad: gst::Pad,
}
use once_cell::sync::Lazy;
#[allow(dead_code)]
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"originalbuffersave",
gst::DebugColorFlags::empty(),
Some("Save Original buffer as meta"),
)
});
#[glib::object_subclass]
impl ObjectSubclass for OriginalBufferSave {
const NAME: &'static str = "GstOriginalBufferSave";
type Type = super::OriginalBufferSave;
type ParentType = gst::Element;
fn with_class(klass: &Self::Class) -> Self {
let sink_templ = klass.pad_template("sink").unwrap();
let src_templ = klass.pad_template("src").unwrap();
let sink_pad = gst::Pad::builder_from_template(&sink_templ)
.chain_function(|pad, parent, buffer| {
OriginalBufferSave::catch_panic_pad_function(
parent,
|| Err(gst::FlowError::Error),
|obj| obj.sink_chain(pad, buffer),
)
})
.query_function(|pad, parent, query| {
OriginalBufferSave::catch_panic_pad_function(
parent,
|| false,
|obj| obj.sink_query(pad, parent, query),
)
})
.flags(gst::PadFlags::PROXY_CAPS | gst::PadFlags::PROXY_ALLOCATION)
.build();
let src_pad = gst::Pad::builder_from_template(&src_templ)
.event_function(|pad, parent, event| {
OriginalBufferSave::catch_panic_pad_function(
parent,
|| false,
|obj| obj.src_event(pad, parent, event),
)
})
.build();
Self { src_pad, sink_pad }
}
}
impl ObjectImpl for OriginalBufferSave {
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
obj.add_pad(&self.sink_pad).unwrap();
obj.add_pad(&self.src_pad).unwrap();
}
}
impl GstObjectImpl for OriginalBufferSave {}
impl ElementImpl for OriginalBufferSave {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"Original Buffer Save",
"Generic",
"Saves a reference to the buffer in a meta",
"Olivier Crête <olivier.crete@collabora.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let caps = gst::Caps::new_any();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl OriginalBufferSave {
fn forward_query(&self, query: gst::Query) -> Option<gst::Query> {
let mut s = gst::Structure::new_empty("gst-original-buffer-forward-query");
s.set("query", query);
let mut query = gst::query::Custom::new(s);
if self.src_pad.peer_query(&mut query) {
let s = query.structure_mut();
if let (Ok(true), Ok(q)) = (s.get("result"), s.get::<gst::Query>("query")) {
Some(q)
} else {
None
}
} else {
None
}
}
fn sink_chain(
&self,
pad: &gst::Pad,
inbuf: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut buf = inbuf.copy();
let caps = pad.current_caps();
if let Some(mut meta) = buf.make_mut().meta_mut::<OriginalBufferMeta>() {
meta.replace(inbuf, caps);
} else {
OriginalBufferMeta::add(buf.make_mut(), inbuf, caps);
}
self.src_pad.push(buf)
}
fn sink_query(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
query: &mut gst::QueryRef,
) -> bool {
let ret = gst::Pad::query_default(pad, parent, query);
if !ret {
return ret;
}
if let gst::QueryViewMut::Caps(q) = query.view_mut() {
if let Some(caps) = q.result_owned() {
let forwarding_q = gst::query::Caps::new(Some(&caps)).into();
if let Some(forwarding_q) = self.forward_query(forwarding_q) {
if let gst::QueryView::Caps(c) = forwarding_q.view() {
let res = c
.result_owned()
.map(|c| c.intersect_with_mode(&caps, gst::CapsIntersectMode::First));
q.set_result(&res);
}
}
}
}
// We should also do allocation queries, but that requires supporting the same
// intersection semantics as gsttee, which should be in a helper function.
true
}
fn src_event(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
event: gst::Event,
) -> bool {
let event = if event.has_name("gst-original-buffer-forward-upstream-event") {
event.structure().unwrap().get("event").unwrap()
} else {
event
};
gst::Pad::event_default(pad, parent, event)
}
}

View file

@ -1,41 +0,0 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
/**
* SECTION:element-originalbuffersave
*
* GStreamer elements to store the original buffer and restore it later
*
* In many analysis scenario (for example machine learning), it is desirable to
* use a pre-processed buffer, for example by lowering the resolution, but we may
* want to take the output of this analysis, and apply it to the original buffer.
*
* These elements do just this, the typical usage would be a pipeline like:
*
* `... ! originalbuffersave ! videoconvertscale ! video/x-raw, width=100, height=100 ! analysiselement ! originalbufferrestore ! ...`
*
* The originalbufferrestore element will "restore" the buffer that was entered to the "save" element, but will keep any metadata that was added later.
*/
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub struct OriginalBufferSave(ObjectSubclass<imp::OriginalBufferSave>) @extends gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"originalbuffersave",
gst::Rank::NONE,
OriginalBufferSave::static_type(),
)
}

View file

@ -112,7 +112,7 @@ impl State {
gst::FlowError::Error
})?;
gst::debug!(CAT, obj = pad, "Returned pull size: {}", map.len());
gst::debug!(CAT, obj: pad, "Returned pull size: {}", map.len());
let mut nonce = add_nonce(self.initial_nonce.unwrap(), chunk_index);
let block_size = self.block_size.expect("Block size wasn't set") as usize + box_::MACBYTES;
@ -144,8 +144,8 @@ impl State {
adapter_offset: usize,
) -> Result<gst::PadGetRangeSuccess, gst::FlowError> {
let avail = self.adapter.available();
gst::debug!(CAT, obj = pad, "Avail: {}", avail);
gst::debug!(CAT, obj = pad, "Adapter offset: {}", adapter_offset);
gst::debug!(CAT, obj: pad, "Avail: {}", avail);
gst::debug!(CAT, obj: pad, "Adapter offset: {}", adapter_offset);
// if this underflows, the available buffer in the adapter is smaller than the
// requested offset, which means we have reached EOS
@ -189,7 +189,7 @@ impl State {
Err(e) => {
gst::error!(
CAT,
obj = pad,
obj: pad,
"Failed to map provided buffer writable: {}",
e
);
@ -197,7 +197,7 @@ impl State {
}
};
if let Err(e) = self.adapter.copy(0, &mut map[..available_size]) {
gst::error!(CAT, obj = pad, "Failed to copy into provided buffer: {}", e);
gst::error!(CAT, obj: pad, "Failed to copy into provided buffer: {}", e);
return Err(gst::FlowError::Error);
}
if map.len() != available_size {
@ -278,7 +278,7 @@ impl Decrypter {
fn src_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
use gst::QueryViewMut;
gst::log!(CAT, obj = pad, "Handling query {:?}", query);
gst::log!(CAT, obj: pad, "Handling query {:?}", query);
match query.view_mut() {
QueryViewMut::Scheduling(q) => {
@ -288,12 +288,12 @@ impl Decrypter {
return res;
}
gst::log!(CAT, obj = pad, "Upstream returned {:?}", peer_query);
gst::log!(CAT, obj: pad, "Upstream returned {:?}", peer_query);
let (flags, min, max, align) = peer_query.result();
q.set(flags, min, max, align);
q.add_scheduling_modes(&[gst::PadMode::Pull]);
gst::log!(CAT, obj = pad, "Returning {:?}", q.query_mut());
gst::log!(CAT, obj: pad, "Returning {:?}", q.query_mut());
true
}
QueryViewMut::Duration(q) => {
@ -334,7 +334,7 @@ impl Decrypter {
// subtrack the MAC of each block
let size = size - total_chunks * box_::MACBYTES as u64;
gst::debug!(CAT, obj = pad, "Setting duration bytes: {}", size);
gst::debug!(CAT, obj: pad, "Setting duration bytes: {}", size);
q.set(size.bytes());
true
@ -402,9 +402,9 @@ impl Decrypter {
let state = state.as_mut().unwrap();
state.initial_nonce = Some(nonce);
gst::debug!(CAT, imp = self, "Setting nonce to: {:?}", nonce.0);
gst::debug!(CAT, imp: self, "Setting nonce to: {:?}", nonce.0);
state.block_size = Some(block_size);
gst::debug!(CAT, imp = self, "Setting block size to: {}", block_size);
gst::debug!(CAT, imp: self, "Setting block size to: {}", block_size);
Ok(())
}
@ -420,8 +420,8 @@ impl Decrypter {
+ (chunk_index * block_size as u64)
+ (chunk_index * box_::MACBYTES as u64);
gst::debug!(CAT, obj = pad, "Pull offset: {}", pull_offset);
gst::debug!(CAT, obj = pad, "block size: {}", block_size);
gst::debug!(CAT, obj: pad, "Pull offset: {}", pull_offset);
gst::debug!(CAT, obj: pad, "block size: {}", block_size);
// calculate how many chunks are needed, if we need something like 3.2
// round the number to 4 and cut the buffer afterwards.
@ -440,7 +440,7 @@ impl Decrypter {
// Read at least one chunk in case 0 bytes were requested
let total_chunks = u32::max((checked - 1) / block_size, 1);
gst::debug!(CAT, obj = pad, "Blocks to be pulled: {}", total_chunks);
gst::debug!(CAT, obj: pad, "Blocks to be pulled: {}", total_chunks);
// Pull a buffer of all the chunks we will need
let checked_size = total_chunks.checked_mul(block_size).ok_or_else(|| {
@ -457,34 +457,23 @@ impl Decrypter {
})?;
let total_size = checked_size + (total_chunks * box_::MACBYTES as u32);
gst::debug!(CAT, obj = pad, "Requested pull size: {}", total_size);
gst::debug!(CAT, obj: pad, "Requested pull size: {}", total_size);
self.sinkpad
.pull_range(pull_offset, total_size)
.map_err(|err| {
match err {
gst::FlowError::Flushing => {
gst::debug!(
CAT,
obj = self.sinkpad,
"Pausing after pulling buffer, reason: flushing"
);
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj = self.sinkpad, "Eos");
}
flow => {
gst::error!(
CAT,
obj = self.sinkpad,
"Failed to pull, reason: {:?}",
flow
);
}
};
self.sinkpad.pull_range(pull_offset, total_size).map_err(|err| {
match err {
gst::FlowError::Flushing => {
gst::debug!(CAT, obj: self.sinkpad, "Pausing after pulling buffer, reason: flushing");
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj: self.sinkpad, "Eos");
}
flow => {
gst::error!(CAT, obj: self.sinkpad, "Failed to pull, reason: {:?}", flow);
}
};
err
})
err
})
}
fn range(
@ -504,14 +493,14 @@ impl Decrypter {
state.block_size.expect("Block size wasn't set")
};
gst::debug!(CAT, obj = pad, "Requested offset: {}", offset);
gst::debug!(CAT, obj = pad, "Requested size: {}", requested_size);
gst::debug!(CAT, obj: pad, "Requested offset: {}", offset);
gst::debug!(CAT, obj: pad, "Requested size: {}", requested_size);
let chunk_index = offset / block_size as u64;
gst::debug!(CAT, obj = pad, "Stream Block index: {}", chunk_index);
gst::debug!(CAT, obj: pad, "Stream Block index: {}", chunk_index);
let pull_offset = offset - (chunk_index * block_size as u64);
assert!(pull_offset <= u32::MAX as u64);
assert!(pull_offset <= std::u32::MAX as u64);
let pull_offset = pull_offset as u32;
let pulled_buffer =
@ -681,7 +670,7 @@ impl ElementImpl for Decrypter {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::debug!(CAT, imp = self, "Changing state {:?}", transition);
gst::debug!(CAT, imp: self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -166,7 +166,7 @@ impl Encrypter {
pad: &gst::Pad,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::log!(CAT, obj = pad, "Handling buffer {:?}", buffer);
gst::log!(CAT, obj: pad, "Handling buffer {:?}", buffer);
let mut buffers = BufferVec::new();
let mut state_guard = self.state.lock().unwrap();
@ -193,7 +193,7 @@ impl Encrypter {
for buffer in buffers {
self.srcpad.push(buffer).map_err(|err| {
gst::error!(CAT, imp = self, "Failed to push buffer {:?}", err);
gst::error!(CAT, imp: self, "Failed to push buffer {:?}", err);
err
})?;
}
@ -204,7 +204,7 @@ impl Encrypter {
fn sink_event(&self, pad: &gst::Pad, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj = pad, "Handling event {:?}", event);
gst::log!(CAT, obj: pad, "Handling event {:?}", event);
match event.view() {
EventView::Caps(_) => {
@ -236,7 +236,7 @@ impl Encrypter {
for buffer in buffers {
if let Err(err) = self.srcpad.push(buffer) {
gst::error!(CAT, imp = self, "Failed to push buffer at EOS {:?}", err);
gst::error!(CAT, imp: self, "Failed to push buffer at EOS {:?}", err);
return false;
}
}
@ -250,7 +250,7 @@ impl Encrypter {
fn src_event(&self, pad: &gst::Pad, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj = pad, "Handling event {:?}", event);
gst::log!(CAT, obj: pad, "Handling event {:?}", event);
match event.view() {
EventView::Seek(_) => false,
@ -261,7 +261,7 @@ impl Encrypter {
fn src_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
use gst::QueryViewMut;
gst::log!(CAT, obj = pad, "Handling query {:?}", query);
gst::log!(CAT, obj: pad, "Handling query {:?}", query);
match query.view_mut() {
QueryViewMut::Seeking(q) => {
@ -271,7 +271,7 @@ impl Encrypter {
gst::GenericFormattedValue::none_for_format(format),
gst::GenericFormattedValue::none_for_format(format),
);
gst::log!(CAT, obj = pad, "Returning {:?}", q.query_mut());
gst::log!(CAT, obj: pad, "Returning {:?}", q.query_mut());
true
}
QueryViewMut::Duration(q) => {
@ -311,7 +311,7 @@ impl Encrypter {
// add static offsets
let size = size + crate::HEADERS_SIZE as u64;
gst::debug!(CAT, obj = pad, "Setting duration bytes: {}", size);
gst::debug!(CAT, obj: pad, "Setting duration bytes: {}", size);
q.set(size.bytes());
true
@ -492,7 +492,7 @@ impl ElementImpl for Encrypter {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::debug!(CAT, imp = self, "Changing state {:?}", transition);
gst::debug!(CAT, imp: self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -84,16 +84,13 @@ fn main() {
.property("signal-handoffs", true)
.build()
.unwrap();
sink.connect_closure(
sink.connect(
"handoff",
true,
glib::closure!(
#[strong]
counter,
move |_fakesink: &gst::Element, _buffer: &gst::Buffer, _pad: &gst::Pad| {
let _ = counter.fetch_add(1, Ordering::SeqCst);
}
),
glib::clone!(@strong counter => move |_| {
let _ = counter.fetch_add(1, Ordering::SeqCst);
None
}),
);
let (source, context) = match source.as_str() {

View file

@ -1,19 +1,19 @@
macro_rules! debug_or_trace {
($cat:expr, $raise_log_level:expr, $qual:ident = $obj:expr, $rest:tt $(,)?) => {
($cat:expr, $raise_log_level:expr, $qual:ident: $obj:expr, $rest:tt $(,)?) => {
if $raise_log_level {
gst::debug!($cat, $qual = $obj, $rest);
gst::debug!($cat, $qual: $obj, $rest);
} else {
gst::trace!($cat, $qual = $obj, $rest);
gst::trace!($cat, $qual: $obj, $rest);
}
};
}
macro_rules! log_or_trace {
($cat:expr, $raise_log_level:expr, $qual:ident = $obj:expr, $rest:tt $(,)?) => {
($cat:expr, $raise_log_level:expr, $qual:ident: $obj:expr, $rest:tt $(,)?) => {
if $raise_log_level {
gst::log!($cat, $qual = $obj, $rest);
gst::log!($cat, $qual: $obj, $rest);
} else {
gst::trace!($cat, $qual = $obj, $rest);
gst::trace!($cat, $qual: $obj, $rest);
}
};
}

View file

@ -43,14 +43,14 @@ impl PadSinkHandlerInner {
log_or_trace!(
CAT,
self.is_main_elem,
obj = elem,
obj: elem,
"Discarding {buffer:?} (flushing)"
);
return Err(gst::FlowError::Flushing);
}
debug_or_trace!(CAT, self.is_main_elem, obj = elem, "Received {buffer:?}");
debug_or_trace!(CAT, self.is_main_elem, obj: elem, "Received {buffer:?}");
let dts = buffer
.dts()
@ -67,23 +67,18 @@ impl PadSinkHandlerInner {
stats.add_buffer(latency, interval);
}
debug_or_trace!(CAT, self.is_main_elem, obj: elem, "o latency {latency:.2?}");
debug_or_trace!(
CAT,
self.is_main_elem,
obj = elem,
"o latency {latency:.2?}"
);
debug_or_trace!(
CAT,
self.is_main_elem,
obj = elem,
obj: elem,
"o interval {interval:.2?}",
);
}
self.last_dts = Some(dts);
log_or_trace!(CAT, self.is_main_elem, obj = elem, "Buffer processed");
log_or_trace!(CAT, self.is_main_elem, obj: elem, "Buffer processed");
Ok(())
}
@ -122,7 +117,7 @@ impl PadSinkHandler for AsyncPadSinkHandler {
EventView::Eos(_) => {
{
let mut inner = self.0.lock().await;
debug_or_trace!(CAT, inner.is_main_elem, obj = elem, "EOS");
debug_or_trace!(CAT, inner.is_main_elem, obj: elem, "EOS");
inner.is_flushing = true;
}
@ -201,7 +196,7 @@ pub struct AsyncMutexSink {
impl AsyncMutexSink {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
let settings = self.settings.lock().unwrap();
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Preparing");
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Preparing");
let stats = if settings.logs_stats {
Some(Stats::new(
settings.max_buffers,
@ -212,25 +207,25 @@ impl AsyncMutexSink {
};
self.sink_pad_handler.prepare(settings.is_main_elem, stats);
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Prepared");
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Prepared");
Ok(())
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopping");
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopping");
self.sink_pad_handler.stop();
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopped");
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Starting");
debug_or_trace!(CAT, is_main_elem, imp: self, "Starting");
self.sink_pad_handler.start();
debug_or_trace!(CAT, is_main_elem, imp = self, "Started");
debug_or_trace!(CAT, is_main_elem, imp: self, "Started");
Ok(())
}
@ -316,7 +311,7 @@ impl ElementImpl for AsyncMutexSink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {transition:?}");
gst::trace!(CAT, imp: self, "Changing state {transition:?}");
match transition {
gst::StateChange::NullToReady => {

View file

@ -42,14 +42,14 @@ impl PadSinkHandlerInner {
log_or_trace!(
CAT,
self.is_main_elem,
obj = elem,
obj: elem,
"Discarding {buffer:?} (flushing)"
);
return Err(gst::FlowError::Flushing);
}
debug_or_trace!(CAT, self.is_main_elem, obj = elem, "Received {buffer:?}");
debug_or_trace!(CAT, self.is_main_elem, obj: elem, "Received {buffer:?}");
let dts = buffer
.dts()
@ -66,23 +66,18 @@ impl PadSinkHandlerInner {
stats.add_buffer(latency, interval);
}
debug_or_trace!(CAT, self.is_main_elem, obj: elem, "o latency {latency:.2?}");
debug_or_trace!(
CAT,
self.is_main_elem,
obj = elem,
"o latency {latency:.2?}"
);
debug_or_trace!(
CAT,
self.is_main_elem,
obj = elem,
obj: elem,
"o interval {interval:.2?}",
);
}
self.last_dts = Some(dts);
log_or_trace!(CAT, self.is_main_elem, obj = elem, "Buffer processed");
log_or_trace!(CAT, self.is_main_elem, obj: elem, "Buffer processed");
Ok(())
}
@ -121,7 +116,7 @@ impl PadSinkHandler for SyncPadSinkHandler {
EventView::Eos(_) => {
{
let mut inner = self.0.lock().unwrap();
debug_or_trace!(CAT, inner.is_main_elem, obj = elem, "EOS");
debug_or_trace!(CAT, inner.is_main_elem, obj: elem, "EOS");
inner.is_flushing = true;
}
@ -194,7 +189,7 @@ pub struct DirectSink {
impl DirectSink {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
let settings = self.settings.lock().unwrap();
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Preparing");
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Preparing");
let stats = if settings.logs_stats {
Some(Stats::new(
settings.max_buffers,
@ -205,25 +200,25 @@ impl DirectSink {
};
self.sink_pad_handler.prepare(settings.is_main_elem, stats);
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Prepared");
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Prepared");
Ok(())
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopping");
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopping");
self.sink_pad_handler.stop();
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopped");
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Starting");
debug_or_trace!(CAT, is_main_elem, imp: self, "Starting");
self.sink_pad_handler.start();
debug_or_trace!(CAT, is_main_elem, imp = self, "Started");
debug_or_trace!(CAT, is_main_elem, imp: self, "Started");
Ok(())
}
@ -309,7 +304,7 @@ impl ElementImpl for DirectSink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {transition:?}");
gst::trace!(CAT, imp: self, "Changing state {transition:?}");
match transition {
gst::StateChange::NullToReady => {

View file

@ -68,7 +68,7 @@ impl PadSinkHandler for TaskPadSinkHandler {
}
EventView::Eos(_) => {
let is_main_elem = elem.imp().settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, obj = elem, "EOS");
debug_or_trace!(CAT, is_main_elem, obj: elem, "EOS");
// When each element sends its own EOS message,
// it takes ages for the pipeline to process all of them.
@ -137,13 +137,13 @@ impl TaskImpl for TaskSinkTask {
type Item = StreamItem;
fn prepare(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Preparing Task");
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Preparing Task");
future::ok(()).boxed()
}
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async {
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Starting Task");
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Starting Task");
self.last_dts = None;
if let Some(stats) = self.stats.as_mut() {
stats.start();
@ -156,7 +156,7 @@ impl TaskImpl for TaskSinkTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async {
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Stopping Task");
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Stopping Task");
self.flush();
Ok(())
}
@ -172,7 +172,7 @@ impl TaskImpl for TaskSinkTask {
fn handle_item(&mut self, item: StreamItem) -> BoxFuture<'_, Result<(), gst::FlowError>> {
async move {
debug_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Received {item:?}");
debug_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Received {item:?}");
match item {
StreamItem::Buffer(buffer) => {
@ -194,20 +194,20 @@ impl TaskImpl for TaskSinkTask {
debug_or_trace!(
CAT,
self.is_main_elem,
obj = self.elem,
obj: self.elem,
"o latency {latency:.2?}",
);
debug_or_trace!(
CAT,
self.is_main_elem,
obj = self.elem,
obj: self.elem,
"o interval {interval:.2?}",
);
}
self.last_dts = Some(dts);
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Buffer processed");
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Buffer processed");
}
StreamItem::Event(evt) => {
if let EventView::Segment(evt) = evt.view() {
@ -249,7 +249,7 @@ impl TaskSink {
None
};
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Preparing");
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Preparing");
let ts_ctx = Context::acquire(&settings.context, settings.context_wait).map_err(|err| {
error_msg!(
@ -265,32 +265,32 @@ impl TaskSink {
*self.item_sender.lock().unwrap() = Some(item_sender);
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Prepared");
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Prepared");
Ok(())
}
fn unprepare(&self) {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Unpreparing");
debug_or_trace!(CAT, is_main_elem, imp: self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
debug_or_trace!(CAT, is_main_elem, imp = self, "Unprepared");
debug_or_trace!(CAT, is_main_elem, imp: self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopping");
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopping");
self.task.stop().block_on()?;
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopped");
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Starting");
debug_or_trace!(CAT, is_main_elem, imp: self, "Starting");
self.task.start().block_on()?;
debug_or_trace!(CAT, is_main_elem, imp = self, "Started");
debug_or_trace!(CAT, is_main_elem, imp: self, "Started");
Ok(())
}
@ -376,7 +376,7 @@ impl ElementImpl for TaskSink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {transition:?}");
gst::trace!(CAT, imp: self, "Changing state {transition:?}");
match transition {
gst::StateChange::NullToReady => {

View file

@ -103,7 +103,7 @@ impl TaskImpl for SrcTask {
let settings = imp.settings.lock().unwrap();
self.is_main_elem = settings.is_main_elem;
log_or_trace!(CAT, self.is_main_elem, imp = imp, "Preparing Task");
log_or_trace!(CAT, self.is_main_elem, imp: imp, "Preparing Task");
self.push_period = settings.push_period;
self.num_buffers = settings.num_buffers;
@ -113,17 +113,12 @@ impl TaskImpl for SrcTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Starting Task");
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Starting Task");
if self.need_initial_events {
let imp = self.elem.imp();
debug_or_trace!(
CAT,
self.is_main_elem,
obj = self.elem,
"Pushing initial events"
);
debug_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Pushing initial events");
let stream_id =
format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
@ -162,7 +157,7 @@ impl TaskImpl for SrcTask {
}
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Stopping Task");
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Stopping Task");
self.buffer_pool.set_active(false).unwrap();
self.timer = None;
self.need_initial_events = true;
@ -172,9 +167,9 @@ impl TaskImpl for SrcTask {
fn try_next(&mut self) -> BoxFuture<'_, Result<(), gst::FlowError>> {
async move {
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Awaiting timer");
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Awaiting timer");
self.timer.as_mut().unwrap().next().await;
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Timer ticked");
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Timer ticked");
Ok(())
}
@ -195,18 +190,13 @@ impl TaskImpl for SrcTask {
buffer
})
.map_err(|err| {
gst::error!(CAT, obj = self.elem, "Failed to acquire buffer {err}");
gst::error!(CAT, obj: self.elem, "Failed to acquire buffer {err}");
err
})?;
debug_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Forwarding buffer");
debug_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Forwarding buffer");
self.elem.imp().src_pad.push(buffer).await?;
log_or_trace!(
CAT,
self.is_main_elem,
obj = self.elem,
"Successfully pushed buffer"
);
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Successfully pushed buffer");
self.buffer_count += 1;
@ -223,22 +213,22 @@ impl TaskImpl for SrcTask {
async move {
match err {
gst::FlowError::Eos => {
debug_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Pushing EOS");
debug_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Pushing EOS");
let imp = self.elem.imp();
if !imp.src_pad.push_event(gst::event::Eos::new()).await {
gst::error!(CAT, imp = imp, "Error pushing EOS");
gst::error!(CAT, imp: imp, "Error pushing EOS");
}
task::Trigger::Stop
}
gst::FlowError::Flushing => {
debug_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Flushing");
debug_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Flushing");
task::Trigger::FlushStart
}
err => {
gst::error!(CAT, obj = self.elem, "Got error {err}");
gst::error!(CAT, obj: self.elem, "Got error {err}");
gst::element_error!(
&self.elem,
gst::StreamError::Failed,
@ -264,7 +254,7 @@ pub struct TestSrc {
impl TestSrc {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Preparing");
debug_or_trace!(CAT, is_main_elem, imp: self, "Preparing");
let settings = self.settings.lock().unwrap();
let ts_ctx = Context::acquire(&settings.context, settings.context_wait).map_err(|err| {
@ -279,41 +269,41 @@ impl TestSrc {
.prepare(SrcTask::new(self.obj().clone()), ts_ctx)
.block_on()?;
debug_or_trace!(CAT, is_main_elem, imp = self, "Prepared");
debug_or_trace!(CAT, is_main_elem, imp: self, "Prepared");
Ok(())
}
fn unprepare(&self) {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Unpreparing");
debug_or_trace!(CAT, is_main_elem, imp: self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
debug_or_trace!(CAT, is_main_elem, imp = self, "Unprepared");
debug_or_trace!(CAT, is_main_elem, imp: self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopping");
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopping");
self.task.stop().block_on()?;
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopped");
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Starting");
debug_or_trace!(CAT, is_main_elem, imp: self, "Starting");
self.task.start().block_on()?;
debug_or_trace!(CAT, is_main_elem, imp = self, "Started");
debug_or_trace!(CAT, is_main_elem, imp: self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp = self, "Pausing");
debug_or_trace!(CAT, is_main_elem, imp: self, "Pausing");
self.task.pause().block_on()?;
debug_or_trace!(CAT, is_main_elem, imp = self, "Paused");
debug_or_trace!(CAT, is_main_elem, imp: self, "Paused");
Ok(())
}
@ -463,7 +453,7 @@ impl ElementImpl for TestSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {transition:?}");
gst::trace!(CAT, imp: self, "Changing state {transition:?}");
match transition {
gst::StateChange::NullToReady => {

View file

@ -30,6 +30,7 @@ use once_cell::sync::Lazy;
use std::sync::Mutex;
use std::time::Duration;
use std::u32;
use crate::runtime::prelude::*;
use crate::runtime::{Context, PadSrc, Task, TaskState};
@ -82,7 +83,7 @@ impl PadSrcHandler for AppSrcPadHandler {
type ElementImpl = AppSrc;
fn src_event(self, pad: &gst::Pad, imp: &AppSrc, event: gst::Event) -> bool {
gst::log!(CAT, obj = pad, "Handling {:?}", event);
gst::log!(CAT, obj: pad, "Handling {:?}", event);
use gst::EventView;
let ret = match event.view() {
@ -94,16 +95,16 @@ impl PadSrcHandler for AppSrcPadHandler {
};
if ret {
gst::log!(CAT, obj = pad, "Handled {:?}", event);
gst::log!(CAT, obj: pad, "Handled {:?}", event);
} else {
gst::log!(CAT, obj = pad, "Didn't handle {:?}", event);
gst::log!(CAT, obj: pad, "Didn't handle {:?}", event);
}
ret
}
fn src_query(self, pad: &gst::Pad, imp: &AppSrc, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj = pad, "Handling {:?}", query);
gst::log!(CAT, obj: pad, "Handling {:?}", query);
use gst::QueryViewMut;
let ret = match query.view_mut() {
@ -135,9 +136,9 @@ impl PadSrcHandler for AppSrcPadHandler {
};
if ret {
gst::log!(CAT, obj = pad, "Handled {:?}", query);
gst::log!(CAT, obj: pad, "Handled {:?}", query);
} else {
gst::log!(CAT, obj = pad, "Didn't handle {:?}", query);
gst::log!(CAT, obj: pad, "Didn't handle {:?}", query);
}
ret
}
@ -169,11 +170,11 @@ impl AppSrcTask {
}
async fn push_item(&mut self, item: StreamItem) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::log!(CAT, obj = self.element, "Handling {:?}", item);
gst::log!(CAT, obj: self.element, "Handling {:?}", item);
let appsrc = self.element.imp();
if self.need_initial_events {
gst::debug!(CAT, obj = self.element, "Pushing initial events");
gst::debug!(CAT, obj: self.element, "Pushing initial events");
let stream_id = format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
let stream_start_evt = gst::event::StreamStart::builder(&stream_id)
@ -203,7 +204,7 @@ impl AppSrcTask {
match item {
StreamItem::Buffer(buffer) => {
gst::log!(CAT, obj = self.element, "Forwarding {:?}", buffer);
gst::log!(CAT, obj: self.element, "Forwarding {:?}", buffer);
appsrc.src_pad.push(buffer).await
}
StreamItem::Event(event) => {
@ -213,7 +214,7 @@ impl AppSrcTask {
Err(gst::FlowError::Eos)
}
_ => {
gst::log!(CAT, obj = self.element, "Forwarding {:?}", event);
gst::log!(CAT, obj: self.element, "Forwarding {:?}", event);
appsrc.src_pad.push_event(event).await;
Ok(gst::FlowSuccess::Ok)
}
@ -241,18 +242,18 @@ impl TaskImpl for AppSrcTask {
let res = self.push_item(item).await;
match res {
Ok(_) => {
gst::log!(CAT, obj = self.element, "Successfully pushed item");
gst::log!(CAT, obj: self.element, "Successfully pushed item");
}
Err(gst::FlowError::Eos) => {
gst::debug!(CAT, obj = self.element, "EOS");
gst::debug!(CAT, obj: self.element, "EOS");
let appsrc = self.element.imp();
appsrc.src_pad.push_event(gst::event::Eos::new()).await;
}
Err(gst::FlowError::Flushing) => {
gst::debug!(CAT, obj = self.element, "Flushing");
gst::debug!(CAT, obj: self.element, "Flushing");
}
Err(err) => {
gst::error!(CAT, obj = self.element, "Got error {}", err);
gst::error!(CAT, obj: self.element, "Got error {}", err);
gst::element_error!(
&self.element,
gst::StreamError::Failed,
@ -269,13 +270,13 @@ impl TaskImpl for AppSrcTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Stopping task");
gst::log!(CAT, obj: self.element, "Stopping task");
self.flush();
self.need_initial_events = true;
self.need_segment = true;
gst::log!(CAT, obj = self.element, "Task stopped");
gst::log!(CAT, obj: self.element, "Task stopped");
Ok(())
}
.boxed()
@ -283,12 +284,12 @@ impl TaskImpl for AppSrcTask {
fn flush_start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Starting task flush");
gst::log!(CAT, obj: self.element, "Starting task flush");
self.flush();
self.need_segment = true;
gst::log!(CAT, obj = self.element, "Task flush started");
gst::log!(CAT, obj: self.element, "Task flush started");
Ok(())
}
.boxed()
@ -308,7 +309,7 @@ impl AppSrc {
fn push_buffer(&self, mut buffer: gst::Buffer) -> bool {
let state = self.task.lock_state();
if *state != TaskState::Started && *state != TaskState::Paused {
gst::debug!(CAT, imp = self, "Rejecting buffer due to element state");
gst::debug!(CAT, imp: self, "Rejecting buffer due to element state");
return false;
}
@ -323,7 +324,7 @@ impl AppSrc {
buffer.set_dts(now.opt_checked_sub(base_time).ok().flatten());
buffer.set_pts(None);
} else {
gst::error!(CAT, imp = self, "Don't have a clock yet");
gst::error!(CAT, imp: self, "Don't have a clock yet");
return false;
}
}
@ -336,7 +337,7 @@ impl AppSrc {
{
Ok(_) => true,
Err(err) => {
gst::error!(CAT, imp = self, "Failed to queue buffer: {}", err);
gst::error!(CAT, imp: self, "Failed to queue buffer: {}", err);
false
}
}
@ -352,14 +353,14 @@ impl AppSrc {
match sender.try_send(StreamItem::Event(gst::event::Eos::new())) {
Ok(_) => true,
Err(err) => {
gst::error!(CAT, imp = self, "Failed to queue EOS: {}", err);
gst::error!(CAT, imp: self, "Failed to queue EOS: {}", err);
false
}
}
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Preparing");
gst::debug!(CAT, imp: self, "Preparing");
let settings = self.settings.lock().unwrap();
let context =
@ -386,38 +387,38 @@ impl AppSrc {
.prepare(AppSrcTask::new(self.obj().clone(), receiver), context)
.block_on()?;
gst::debug!(CAT, imp = self, "Prepared");
gst::debug!(CAT, imp: self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp = self, "Unpreparing");
gst::debug!(CAT, imp: self, "Unpreparing");
*self.sender.lock().unwrap() = None;
self.task.unprepare().block_on().unwrap();
gst::debug!(CAT, imp = self, "Unprepared");
gst::debug!(CAT, imp: self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Stopping");
gst::debug!(CAT, imp: self, "Stopping");
self.task.stop().block_on()?;
gst::debug!(CAT, imp = self, "Stopped");
gst::debug!(CAT, imp: self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Starting");
gst::debug!(CAT, imp: self, "Starting");
self.task.start().block_on()?;
gst::debug!(CAT, imp = self, "Started");
gst::debug!(CAT, imp: self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Pausing");
gst::debug!(CAT, imp: self, "Pausing");
self.task.pause().block_on()?;
gst::debug!(CAT, imp = self, "Paused");
gst::debug!(CAT, imp: self, "Paused");
Ok(())
}
}
@ -598,7 +599,7 @@ impl ElementImpl for AppSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -90,7 +90,7 @@ impl PadSrcHandler for AudioTestSrcPadHandler {
type ElementImpl = AudioTestSrc;
fn src_query(self, pad: &gst::Pad, imp: &Self::ElementImpl, query: &mut gst::QueryRef) -> bool {
gst::debug!(CAT, obj = pad, "Received {query:?}");
gst::debug!(CAT, obj: pad, "Received {query:?}");
if let gst::QueryViewMut::Latency(q) = query.view_mut() {
let settings = imp.settings.lock().unwrap();
@ -187,17 +187,17 @@ impl AudioTestSrcTask {
}
let mut caps = pad.peer_query_caps(Some(&DEFAULT_CAPS));
gst::debug!(CAT, imp = imp, "Peer returned {caps:?}");
gst::debug!(CAT, imp: imp, "Peer returned {caps:?}");
if caps.is_empty() {
pad.mark_reconfigure();
let err = gst::error_msg!(gst::CoreError::Pad, ["No common Caps"]);
gst::error!(CAT, imp = imp, "{err}");
gst::error!(CAT, imp: imp, "{err}");
return Err(err);
}
if caps.is_any() {
gst::debug!(CAT, imp = imp, "Using our own Caps");
gst::debug!(CAT, imp: imp, "Using our own Caps");
caps = DEFAULT_CAPS.clone();
}
@ -205,7 +205,7 @@ impl AudioTestSrcTask {
let caps = caps.make_mut();
let s = caps.structure_mut(0).ok_or_else(|| {
let err = gst::error_msg!(gst::CoreError::Pad, ["Invalid peer Caps structure"]);
gst::error!(CAT, imp = imp, "{err}");
gst::error!(CAT, imp: imp, "{err}");
err
})?;
@ -227,7 +227,7 @@ impl AudioTestSrcTask {
}
caps.fixate();
gst::debug!(CAT, imp = imp, "fixated to {caps:?}");
gst::debug!(CAT, imp: imp, "fixated to {caps:?}");
imp.src_pad.push_event(gst::event::Caps::new(&caps)).await;
@ -241,7 +241,7 @@ impl TaskImpl for AudioTestSrcTask {
type Item = gst::Buffer;
fn prepare(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
gst::log!(CAT, obj = self.elem, "Preparing Task");
gst::log!(CAT, obj: self.elem, "Preparing Task");
let imp = self.elem.imp();
let settings = imp.settings.lock().unwrap();
@ -260,10 +260,10 @@ impl TaskImpl for AudioTestSrcTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.elem, "Starting Task");
gst::log!(CAT, obj: self.elem, "Starting Task");
if self.need_initial_events {
gst::debug!(CAT, obj = self.elem, "Pushing initial events");
gst::debug!(CAT, obj: self.elem, "Pushing initial events");
let stream_id =
format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
@ -311,14 +311,14 @@ impl TaskImpl for AudioTestSrcTask {
}
fn pause(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
gst::log!(CAT, obj = self.elem, "Pausing Task");
gst::log!(CAT, obj: self.elem, "Pausing Task");
self.buffer_pool.set_active(false).unwrap();
future::ok(()).boxed()
}
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
gst::log!(CAT, obj = self.elem, "Stopping Task");
gst::log!(CAT, obj: self.elem, "Stopping Task");
self.need_initial_events = true;
self.accumulator = 0.0;
@ -331,7 +331,7 @@ impl TaskImpl for AudioTestSrcTask {
let mut buffer = match self.buffer_pool.acquire_buffer(None) {
Ok(buffer) => buffer,
Err(err) => {
gst::error!(CAT, obj = self.elem, "Failed to acquire buffer {}", err);
gst::error!(CAT, obj: self.elem, "Failed to acquire buffer {}", err);
return future::err(err).boxed();
}
};
@ -399,9 +399,9 @@ impl TaskImpl for AudioTestSrcTask {
async move {
let imp = self.elem.imp();
gst::debug!(CAT, imp = imp, "Pushing {buffer:?}");
gst::debug!(CAT, imp: imp, "Pushing {buffer:?}");
imp.src_pad.push(buffer).await?;
gst::log!(CAT, imp = imp, "Successfully pushed buffer");
gst::log!(CAT, imp: imp, "Successfully pushed buffer");
self.buffer_count += 1;
@ -442,12 +442,12 @@ impl TaskImpl for AudioTestSrcTask {
async move {
match err {
gst::FlowError::Flushing => {
gst::debug!(CAT, obj = self.elem, "Flushing");
gst::debug!(CAT, obj: self.elem, "Flushing");
task::Trigger::FlushStart
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj = self.elem, "EOS");
gst::debug!(CAT, obj: self.elem, "EOS");
self.elem
.imp()
.src_pad
@ -457,7 +457,7 @@ impl TaskImpl for AudioTestSrcTask {
task::Trigger::Stop
}
err => {
gst::error!(CAT, obj = self.elem, "Got error {err}");
gst::error!(CAT, obj: self.elem, "Got error {err}");
gst::element_error!(
&self.elem,
gst::StreamError::Failed,
@ -482,7 +482,7 @@ pub struct AudioTestSrc {
impl AudioTestSrc {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Preparing");
gst::debug!(CAT, imp: self, "Preparing");
let settings = self.settings.lock().unwrap();
let context =
@ -498,37 +498,37 @@ impl AudioTestSrc {
.prepare(AudioTestSrcTask::new(self.obj().clone()), context)
.block_on()?;
gst::debug!(CAT, imp = self, "Prepared");
gst::debug!(CAT, imp: self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp = self, "Unpreparing");
gst::debug!(CAT, imp: self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
gst::debug!(CAT, imp = self, "Unprepared");
gst::debug!(CAT, imp: self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Stopping");
gst::debug!(CAT, imp: self, "Stopping");
self.task.stop().block_on()?;
gst::debug!(CAT, imp = self, "Stopped");
gst::debug!(CAT, imp: self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Starting");
gst::debug!(CAT, imp: self, "Starting");
self.task.start().block_on()?;
gst::debug!(CAT, imp = self, "Started");
gst::debug!(CAT, imp: self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Pausing");
gst::debug!(CAT, imp: self, "Pausing");
self.task.pause().block_on()?;
gst::debug!(CAT, imp = self, "Paused");
gst::debug!(CAT, imp: self, "Paused");
Ok(())
}
@ -695,7 +695,7 @@ impl ElementImpl for AudioTestSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {transition:?}");
gst::trace!(CAT, imp: self, "Changing state {transition:?}");
match transition {
gst::StateChange::NullToReady => {

View file

@ -26,6 +26,7 @@ use once_cell::sync::Lazy;
use std::collections::VecDeque;
use std::sync::Arc;
use std::sync::Mutex as StdMutex;
use std::u32;
static DATA_QUEUE_CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
@ -126,14 +127,10 @@ impl DataQueue {
pub fn start(&self) {
let mut inner = self.0.lock().unwrap();
if inner.state == DataQueueState::Started {
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Data queue already Started"
);
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Data queue already Started");
return;
}
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Starting data queue");
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Starting data queue");
inner.state = DataQueueState::Started;
inner.wake();
}
@ -141,14 +138,10 @@ impl DataQueue {
pub fn stop(&self) {
let mut inner = self.0.lock().unwrap();
if inner.state == DataQueueState::Stopped {
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Data queue already Stopped"
);
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Data queue already Stopped");
return;
}
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Stopping data queue");
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Stopping data queue");
inner.state = DataQueueState::Stopped;
inner.wake();
}
@ -156,7 +149,7 @@ impl DataQueue {
pub fn clear(&self) {
let mut inner = self.0.lock().unwrap();
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Clearing data queue");
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Clearing data queue");
let src_pad = inner.src_pad.clone();
for item in inner.queue.drain(..) {
@ -170,7 +163,7 @@ impl DataQueue {
}
}
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Data queue cleared");
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Data queue cleared");
}
pub fn push(&self, item: DataQueueItem) -> Result<(), DataQueueItem> {
@ -179,7 +172,7 @@ impl DataQueue {
if inner.state == DataQueueState::Stopped {
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
obj: inner.element,
"Rejecting item {:?} in state {:?}",
item,
inner.state
@ -187,12 +180,7 @@ impl DataQueue {
return Err(item);
}
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Pushing item {:?}",
item
);
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Pushing item {:?}", item);
let (count, bytes) = item.size();
let queue_ts = inner.queue.iter().filter_map(|i| i.timestamp()).next();
@ -200,26 +188,14 @@ impl DataQueue {
if let Some(max) = inner.max_size_buffers {
if max <= inner.cur_size_buffers {
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Queue is full (buffers): {} <= {}",
max,
inner.cur_size_buffers
);
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Queue is full (buffers): {} <= {}", max, inner.cur_size_buffers);
return Err(item);
}
}
if let Some(max) = inner.max_size_bytes {
if max <= inner.cur_size_bytes {
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Queue is full (bytes): {} <= {}",
max,
inner.cur_size_bytes
);
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Queue is full (bytes): {} <= {}", max, inner.cur_size_bytes);
return Err(item);
}
}
@ -233,13 +209,7 @@ impl DataQueue {
};
if max <= level {
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Queue is full (time): {} <= {}",
max,
level
);
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Queue is full (time): {} <= {}", max, level);
return Err(item);
}
}
@ -262,15 +232,10 @@ impl DataQueue {
match inner.state {
DataQueueState::Started => match inner.queue.pop_front() {
None => {
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Data queue is empty");
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Data queue is empty");
}
Some(item) => {
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Popped item {:?}",
item
);
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Popped item {:?}", item);
let (count, bytes) = item.size();
inner.cur_size_buffers -= count;
@ -280,7 +245,7 @@ impl DataQueue {
}
},
DataQueueState::Stopped => {
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Data queue Stopped");
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Data queue Stopped");
return None;
}
}

View file

@ -30,6 +30,7 @@ use once_cell::sync::Lazy;
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
use std::time::Duration;
use std::u32;
use crate::runtime::prelude::*;
use crate::runtime::{self, PadSink, PadSrc};
@ -139,7 +140,7 @@ impl InputSelectorPadSinkHandler {
}
if is_active {
gst::log!(CAT, obj = pad, "Forwarding {:?}", buffer);
gst::log!(CAT, obj: pad, "Forwarding {:?}", buffer);
if switched_pad && !buffer.flags().contains(gst::BufferFlags::DISCONT) {
let buffer = buffer.make_mut();
@ -172,7 +173,7 @@ impl PadSinkHandler for InputSelectorPadSinkHandler {
list: gst::BufferList,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::log!(CAT, obj = pad, "Handling buffer list {:?}", list);
gst::log!(CAT, obj: pad, "Handling buffer list {:?}", list);
// TODO: Ideally we would keep the list intact and forward it in one go
for buffer in list.iter_owned() {
self.handle_item(&pad, &elem, buffer).await?;
@ -229,14 +230,14 @@ impl PadSinkHandler for InputSelectorPadSinkHandler {
}
fn sink_query(self, pad: &gst::Pad, imp: &InputSelector, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj = pad, "Handling query {:?}", query);
gst::log!(CAT, obj: pad, "Handling query {:?}", query);
if query.is_serialized() {
// FIXME: How can we do this (drops ALLOCATION and DRAIN)?
gst::log!(CAT, obj = pad, "Dropping serialized query {:?}", query);
gst::log!(CAT, obj: pad, "Dropping serialized query {:?}", query);
false
} else {
gst::log!(CAT, obj = pad, "Forwarding query {:?}", query);
gst::log!(CAT, obj: pad, "Forwarding query {:?}", query);
imp.src_pad.gst_pad().peer_query(query)
}
}
@ -249,7 +250,7 @@ impl PadSrcHandler for InputSelectorPadSrcHandler {
type ElementImpl = InputSelector;
fn src_query(self, pad: &gst::Pad, imp: &InputSelector, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj = pad, "Handling {:?}", query);
gst::log!(CAT, obj: pad, "Handling {:?}", query);
use gst::QueryViewMut;
match query.view_mut() {
@ -339,9 +340,9 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
impl InputSelector {
fn unprepare(&self) {
let mut state = self.state.lock().unwrap();
gst::debug!(CAT, imp = self, "Unpreparing");
gst::debug!(CAT, imp: self, "Unpreparing");
*state = State::default();
gst::debug!(CAT, imp = self, "Unprepared");
gst::debug!(CAT, imp: self, "Unprepared");
}
}
@ -416,8 +417,8 @@ impl ObjectImpl for InputSelector {
let pads = self.pads.lock().unwrap();
let mut old_pad = None;
if let Some(ref pad) = pad {
if pads.sink_pads.contains_key(pad) {
old_pad.clone_from(&state.active_sinkpad);
if pads.sink_pads.get(pad).is_some() {
old_pad = state.active_sinkpad.clone();
state.active_sinkpad = Some(pad.clone());
state.switched_pad = true;
}
@ -515,7 +516,7 @@ impl ElementImpl for InputSelector {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
if let gst::StateChange::ReadyToNull = transition {
self.unprepare();

View file

@ -144,7 +144,7 @@ impl SinkHandler {
// For resetting if seqnum discontinuities
fn reset(&self, inner: &mut SinkHandlerInner, jb: &JitterBuffer) -> BTreeSet<GapPacket> {
gst::info!(CAT, imp = jb, "Resetting");
gst::info!(CAT, imp: jb, "Resetting");
let mut state = jb.state.lock().unwrap();
state.jbuf.flush();
@ -176,17 +176,17 @@ impl SinkHandler {
) -> Result<gst::FlowSuccess, gst::FlowError> {
let s = caps.structure(0).ok_or(gst::FlowError::Error)?;
gst::debug!(CAT, imp = jb, "Parsing {:?}", caps);
gst::debug!(CAT, imp: jb, "Parsing {:?}", caps);
let payload = s.get::<i32>("payload").map_err(|err| {
gst::debug!(CAT, imp = jb, "Caps 'payload': {}", err);
gst::debug!(CAT, imp: jb, "Caps 'payload': {}", err);
gst::FlowError::Error
})?;
if pt != 0 && payload as u8 != pt {
gst::debug!(
CAT,
imp = jb,
imp: jb,
"Caps 'payload' ({}) doesn't match payload type ({})",
payload,
pt
@ -196,12 +196,12 @@ impl SinkHandler {
inner.last_pt = Some(pt);
let clock_rate = s.get::<i32>("clock-rate").map_err(|err| {
gst::debug!(CAT, imp = jb, "Caps 'clock-rate': {}", err);
gst::debug!(CAT, imp: jb, "Caps 'clock-rate': {}", err);
gst::FlowError::Error
})?;
if clock_rate <= 0 {
gst::debug!(CAT, imp = jb, "Caps 'clock-rate' <= 0");
gst::debug!(CAT, imp: jb, "Caps 'clock-rate' <= 0");
return Err(gst::FlowError::Error);
}
state.clock_rate = Some(clock_rate as u32);
@ -258,7 +258,7 @@ impl SinkHandler {
gst::debug!(
CAT,
imp = jb,
imp: jb,
"Handling big gap, gap packets length: {}",
gap_packets_length
);
@ -266,20 +266,20 @@ impl SinkHandler {
inner.gap_packets.insert(GapPacket::new(buffer));
if gap_packets_length > 0 {
let mut prev_gap_seq = u32::MAX;
let mut prev_gap_seq = std::u32::MAX;
let mut all_consecutive = true;
for gap_packet in inner.gap_packets.iter() {
gst::log!(
CAT,
imp = jb,
imp: jb,
"Looking at gap packet with seq {}",
gap_packet.seq,
);
all_consecutive = gap_packet.pt == pt;
if prev_gap_seq == u32::MAX {
if prev_gap_seq == std::u32::MAX {
prev_gap_seq = gap_packet.seq as u32;
} else if gst_rtp::compare_seqnum(gap_packet.seq, prev_gap_seq as u16) != -1 {
all_consecutive = false;
@ -292,7 +292,7 @@ impl SinkHandler {
}
}
gst::debug!(CAT, imp = jb, "all consecutive: {}", all_consecutive);
gst::debug!(CAT, imp: jb, "all consecutive: {}", all_consecutive);
if all_consecutive && gap_packets_length > 3 {
reset = true;
@ -334,7 +334,7 @@ impl SinkHandler {
gst::log!(
CAT,
imp = jb,
imp: jb,
"Storing buffer, seq: {}, rtptime: {}, pt: {}",
seq,
rtptime,
@ -367,7 +367,7 @@ impl SinkHandler {
inner.last_pt = Some(pt);
state.clock_rate = None;
gst::debug!(CAT, obj = pad, "New payload type: {}", pt);
gst::debug!(CAT, obj: pad, "New payload type: {}", pt);
if let Some(caps) = pad.current_caps() {
/* Ignore errors at this point, as we want to emit request-pt-map */
@ -381,7 +381,7 @@ impl SinkHandler {
let caps = element
.emit_by_name::<Option<gst::Caps>>("request-pt-map", &[&(pt as u32)])
.ok_or_else(|| {
gst::error!(CAT, obj = pad, "Signal 'request-pt-map' returned None");
gst::error!(CAT, obj: pad, "Signal 'request-pt-map' returned None");
gst::FlowError::Error
})?;
let mut state = jb.state.lock().unwrap();
@ -404,7 +404,7 @@ impl SinkHandler {
if pts.is_none() {
gst::debug!(
CAT,
imp = jb,
imp: jb,
"cannot calculate a valid pts for #{}, discard",
seq
);
@ -437,7 +437,7 @@ impl SinkHandler {
if gap <= 0 {
state.stats.num_late += 1;
gst::debug!(CAT, imp = jb, "Dropping late {}", seq);
gst::debug!(CAT, imp: jb, "Dropping late {}", seq);
return Ok(gst::FlowSuccess::Ok);
}
}
@ -472,7 +472,7 @@ impl SinkHandler {
(Some(earliest_pts), Some(pts)) if pts < earliest_pts => true,
(Some(earliest_pts), Some(pts)) if pts == earliest_pts => state
.earliest_seqnum
.is_some_and(|earliest_seqnum| seq > earliest_seqnum),
.map_or(false, |earliest_seqnum| seq > earliest_seqnum),
_ => false,
};
@ -481,7 +481,7 @@ impl SinkHandler {
state.earliest_seqnum = Some(seq);
}
gst::log!(CAT, obj = pad, "Stored buffer");
gst::log!(CAT, obj: pad, "Stored buffer");
Ok(gst::FlowSuccess::Ok)
}
@ -527,11 +527,11 @@ impl SinkHandler {
if let Some((next_wakeup, _)) = next_wakeup {
if let Some((previous_next_wakeup, ref abort_handle)) = state.wait_handle {
if previous_next_wakeup.is_none()
|| next_wakeup.is_some_and(|next| previous_next_wakeup.unwrap() > next)
|| next_wakeup.map_or(false, |next| previous_next_wakeup.unwrap() > next)
{
gst::debug!(
CAT,
obj = pad,
obj: pad,
"Rescheduling for new item {} < {}",
next_wakeup.display(),
previous_next_wakeup.display(),
@ -555,7 +555,7 @@ impl PadSinkHandler for SinkHandler {
buffer: gst::Buffer,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::debug!(CAT, obj = pad, "Handling {:?}", buffer);
gst::debug!(CAT, obj: pad, "Handling {:?}", buffer);
self.enqueue_item(pad, elem.imp(), Some(buffer))
}
.boxed()
@ -564,11 +564,11 @@ impl PadSinkHandler for SinkHandler {
fn sink_event(self, pad: &gst::Pad, jb: &JitterBuffer, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj = pad, "Handling {:?}", event);
gst::log!(CAT, obj: pad, "Handling {:?}", event);
if let EventView::FlushStart(..) = event.view() {
if let Err(err) = jb.task.flush_start().await_maybe_on_context() {
gst::error!(CAT, obj = pad, "FlushStart failed {:?}", err);
gst::error!(CAT, obj: pad, "FlushStart failed {:?}", err);
gst::element_imp_error!(
jb,
gst::StreamError::Failed,
@ -579,7 +579,7 @@ impl PadSinkHandler for SinkHandler {
}
}
gst::log!(CAT, obj = pad, "Forwarding {:?}", event);
gst::log!(CAT, obj: pad, "Forwarding {:?}", event);
jb.src_pad.gst_pad().push_event(event)
}
@ -590,7 +590,7 @@ impl PadSinkHandler for SinkHandler {
event: gst::Event,
) -> BoxFuture<'static, bool> {
async move {
gst::log!(CAT, obj = pad, "Handling {:?}", event);
gst::log!(CAT, obj: pad, "Handling {:?}", event);
let jb = elem.imp();
@ -603,7 +603,7 @@ impl PadSinkHandler for SinkHandler {
}
EventView::FlushStop(..) => {
if let Err(err) = jb.task.flush_stop().await_maybe_on_context() {
gst::error!(CAT, obj = pad, "FlushStop failed {:?}", err);
gst::error!(CAT, obj: pad, "FlushStop failed {:?}", err);
gst::element_error!(
elem,
gst::StreamError::Failed,
@ -626,7 +626,7 @@ impl PadSinkHandler for SinkHandler {
if forward {
// FIXME: These events should really be queued up and stay in order
gst::log!(CAT, obj = pad, "Forwarding serialized {:?}", event);
gst::log!(CAT, obj: pad, "Forwarding serialized {:?}", event);
jb.src_pad.push_event(event).await
} else {
true
@ -665,7 +665,7 @@ impl SrcHandler {
gst::debug!(
CAT,
obj = element,
obj: element,
"Generating lost events seq: {}, last popped seq: {:?}",
seqnum,
last_popped_seqnum,
@ -801,22 +801,11 @@ impl SrcHandler {
};
for event in lost_events {
gst::debug!(
CAT,
obj = jb.src_pad.gst_pad(),
"Pushing lost event {:?}",
event
);
gst::debug!(CAT, obj: jb.src_pad.gst_pad(), "Pushing lost event {:?}", event);
let _ = jb.src_pad.push_event(event).await;
}
gst::debug!(
CAT,
obj = jb.src_pad.gst_pad(),
"Pushing {:?} with seq {:?}",
buffer,
seq
);
gst::debug!(CAT, obj: jb.src_pad.gst_pad(), "Pushing {:?} with seq {:?}", buffer, seq);
jb.src_pad.push(buffer).await
}
@ -835,7 +824,7 @@ impl SrcHandler {
gst::debug!(
CAT,
obj = element,
obj: element,
"Now is {}, EOS {}, earliest pts is {}, packet_spacing {} and latency {}",
now.display(),
state.eos,
@ -845,7 +834,7 @@ impl SrcHandler {
);
if state.eos {
gst::debug!(CAT, obj = element, "EOS, not waiting");
gst::debug!(CAT, obj: element, "EOS, not waiting");
return (now, Some((now, Duration::ZERO)));
}
@ -865,7 +854,7 @@ impl SrcHandler {
gst::debug!(
CAT,
obj = element,
obj: element,
"Next wakeup at {} with delay {}",
next_wakeup.display(),
delay
@ -881,12 +870,12 @@ impl PadSrcHandler for SrcHandler {
fn src_event(self, pad: &gst::Pad, jb: &JitterBuffer, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj = pad, "Handling {:?}", event);
gst::log!(CAT, obj: pad, "Handling {:?}", event);
match event.view() {
EventView::FlushStart(..) => {
if let Err(err) = jb.task.flush_start().await_maybe_on_context() {
gst::error!(CAT, obj = pad, "FlushStart failed {:?}", err);
gst::error!(CAT, obj: pad, "FlushStart failed {:?}", err);
gst::element_imp_error!(
jb,
gst::StreamError::Failed,
@ -898,7 +887,7 @@ impl PadSrcHandler for SrcHandler {
}
EventView::FlushStop(..) => {
if let Err(err) = jb.task.flush_stop().await_maybe_on_context() {
gst::error!(CAT, obj = pad, "FlushStop failed {:?}", err);
gst::error!(CAT, obj: pad, "FlushStop failed {:?}", err);
gst::element_imp_error!(
jb,
gst::StreamError::Failed,
@ -911,14 +900,14 @@ impl PadSrcHandler for SrcHandler {
_ => (),
}
gst::log!(CAT, obj = pad, "Forwarding {:?}", event);
gst::log!(CAT, obj: pad, "Forwarding {:?}", event);
jb.sink_pad.gst_pad().push_event(event)
}
fn src_query(self, pad: &gst::Pad, jb: &JitterBuffer, query: &mut gst::QueryRef) -> bool {
use gst::QueryViewMut;
gst::log!(CAT, obj = pad, "Forwarding {:?}", query);
gst::log!(CAT, obj: pad, "Forwarding {:?}", query);
match query.view_mut() {
QueryViewMut::Latency(q) => {
@ -1041,7 +1030,7 @@ impl TaskImpl for JitterBufferTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Starting task");
gst::log!(CAT, obj: self.element, "Starting task");
self.src_pad_handler.clear();
self.sink_pad_handler.clear();
@ -1054,7 +1043,7 @@ impl TaskImpl for JitterBufferTask {
state.jbuf.set_delay(latency);
*jb.state.lock().unwrap() = state;
gst::log!(CAT, obj = self.element, "Task started");
gst::log!(CAT, obj: self.element, "Task started");
Ok(())
}
.boxed()
@ -1114,9 +1103,9 @@ impl TaskImpl for JitterBufferTask {
// Got aborted, reschedule if needed
if let Some(delay_fut) = delay_fut {
gst::debug!(CAT, obj = self.element, "Waiting");
gst::debug!(CAT, obj: self.element, "Waiting");
if let Err(Aborted) = delay_fut.await {
gst::debug!(CAT, obj = self.element, "Waiting aborted");
gst::debug!(CAT, obj: self.element, "Waiting aborted");
return Ok(());
}
}
@ -1134,7 +1123,7 @@ impl TaskImpl for JitterBufferTask {
gst::debug!(
CAT,
obj = self.element,
obj: self.element,
"Woke up at {}, earliest_pts {}",
now.display(),
state.earliest_pts.display()
@ -1177,7 +1166,7 @@ impl TaskImpl for JitterBufferTask {
context_wait,
);
if let Some((Some(next_wakeup), _)) = next_wakeup {
if now.is_some_and(|now| next_wakeup > now) {
if now.map_or(false, |now| next_wakeup > now) {
// Reschedule and wait a bit longer in the next iteration
return Ok(());
}
@ -1190,13 +1179,13 @@ impl TaskImpl for JitterBufferTask {
if let Err(err) = res {
match err {
gst::FlowError::Eos => {
gst::debug!(CAT, obj = self.element, "Pushing EOS event");
gst::debug!(CAT, obj: self.element, "Pushing EOS event");
let _ = jb.src_pad.push_event(gst::event::Eos::new()).await;
}
gst::FlowError::Flushing => {
gst::debug!(CAT, obj = self.element, "Flushing")
gst::debug!(CAT, obj: self.element, "Flushing")
}
err => gst::error!(CAT, obj = self.element, "Error {}", err),
err => gst::error!(CAT, obj: self.element, "Error {}", err),
}
return Err(err);
@ -1212,7 +1201,7 @@ impl TaskImpl for JitterBufferTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Stopping task");
gst::log!(CAT, obj: self.element, "Stopping task");
let jb = self.element.imp();
let mut jb_state = jb.state.lock().unwrap();
@ -1226,7 +1215,7 @@ impl TaskImpl for JitterBufferTask {
*jb_state = State::default();
gst::log!(CAT, obj = self.element, "Task stopped");
gst::log!(CAT, obj: self.element, "Task stopped");
Ok(())
}
.boxed()
@ -1253,7 +1242,7 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
impl JitterBuffer {
fn clear_pt_map(&self) {
gst::debug!(CAT, imp = self, "Clearing PT map");
gst::debug!(CAT, imp: self, "Clearing PT map");
let mut state = self.state.lock().unwrap();
state.clock_rate = None;
@ -1261,7 +1250,7 @@ impl JitterBuffer {
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Preparing");
gst::debug!(CAT, imp: self, "Preparing");
let context = {
let settings = self.settings.lock().unwrap();
@ -1275,28 +1264,28 @@ impl JitterBuffer {
)
.block_on()?;
gst::debug!(CAT, imp = self, "Prepared");
gst::debug!(CAT, imp: self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp = self, "Unpreparing");
gst::debug!(CAT, imp: self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
gst::debug!(CAT, imp = self, "Unprepared");
gst::debug!(CAT, imp: self, "Unprepared");
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Starting");
gst::debug!(CAT, imp: self, "Starting");
self.task.start().block_on()?;
gst::debug!(CAT, imp = self, "Started");
gst::debug!(CAT, imp: self, "Started");
Ok(())
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Stopping");
gst::debug!(CAT, imp: self, "Stopping");
self.task.stop().block_on()?;
gst::debug!(CAT, imp = self, "Stopped");
gst::debug!(CAT, imp: self, "Stopped");
Ok(())
}
}
@ -1539,7 +1528,7 @@ impl ElementImpl for JitterBuffer {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -91,7 +91,7 @@ impl RTPJitterBufferItem {
r#type: 0,
dts: dts.into().into_glib(),
pts: pts.into().into_glib(),
seqnum: seqnum.map(|s| s as u32).unwrap_or(u32::MAX),
seqnum: seqnum.map(|s| s as u32).unwrap_or(std::u32::MAX),
count: 1,
rtptime,
},
@ -138,7 +138,7 @@ impl RTPJitterBufferItem {
pub fn seqnum(&self) -> Option<u16> {
unsafe {
let item = self.0.as_ref().expect("Invalid wrapper");
if item.as_ref().seqnum == u32::MAX {
if item.as_ref().seqnum == std::u32::MAX {
None
} else {
Some(item.as_ref().seqnum as u16)
@ -306,7 +306,7 @@ impl RTPJitterBuffer {
let pts = from_glib(pts.assume_init());
let seqnum = seqnum.assume_init();
let seqnum = if seqnum == u32::MAX {
let seqnum = if seqnum == std::u32::MAX {
None
} else {
Some(seqnum as u16)
@ -339,7 +339,7 @@ impl RTPJitterBuffer {
(None, None)
} else {
let seqnum = (*item).seqnum;
let seqnum = if seqnum == u32::MAX {
let seqnum = if seqnum == std::u32::MAX {
None
} else {
Some(seqnum as u16)

View file

@ -31,6 +31,7 @@ use std::collections::{HashMap, VecDeque};
use std::sync::{Arc, Weak};
use std::sync::{Mutex, MutexGuard};
use std::time::Duration;
use std::{u32, u64};
use crate::runtime::prelude::*;
use crate::runtime::{Context, PadSink, PadSinkWeak, PadSrc, PadSrcWeak, Task};
@ -217,7 +218,7 @@ impl PadSinkHandler for ProxySinkPadHandler {
buffer: gst::Buffer,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::log!(SINK_CAT, obj = pad, "Handling {:?}", buffer);
gst::log!(SINK_CAT, obj: pad, "Handling {:?}", buffer);
let imp = elem.imp();
imp.enqueue_item(DataQueueItem::Buffer(buffer)).await
}
@ -231,7 +232,7 @@ impl PadSinkHandler for ProxySinkPadHandler {
list: gst::BufferList,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::log!(SINK_CAT, obj = pad, "Handling {:?}", list);
gst::log!(SINK_CAT, obj: pad, "Handling {:?}", list);
let imp = elem.imp();
imp.enqueue_item(DataQueueItem::BufferList(list)).await
}
@ -239,7 +240,7 @@ impl PadSinkHandler for ProxySinkPadHandler {
}
fn sink_event(self, pad: &gst::Pad, imp: &ProxySink, event: gst::Event) -> bool {
gst::debug!(SINK_CAT, obj = pad, "Handling non-serialized {:?}", event);
gst::debug!(SINK_CAT, obj: pad, "Handling non-serialized {:?}", event);
let src_pad = {
let proxy_ctx = imp.proxy_ctx.lock().unwrap();
@ -257,12 +258,12 @@ impl PadSinkHandler for ProxySinkPadHandler {
}
if let Some(src_pad) = src_pad {
gst::log!(SINK_CAT, obj = pad, "Forwarding non-serialized {:?}", event);
gst::log!(SINK_CAT, obj: pad, "Forwarding non-serialized {:?}", event);
src_pad.push_event(event)
} else {
gst::error!(
SINK_CAT,
obj = pad,
obj: pad,
"No src pad to forward non-serialized {:?} to",
event
);
@ -277,7 +278,7 @@ impl PadSinkHandler for ProxySinkPadHandler {
event: gst::Event,
) -> BoxFuture<'static, bool> {
async move {
gst::log!(SINK_CAT, obj = pad, "Handling serialized {:?}", event);
gst::log!(SINK_CAT, obj: pad, "Handling serialized {:?}", event);
let imp = elem.imp();
@ -290,7 +291,7 @@ impl PadSinkHandler for ProxySinkPadHandler {
_ => (),
}
gst::log!(SINK_CAT, obj = pad, "Queuing serialized {:?}", event);
gst::log!(SINK_CAT, obj: pad, "Queuing serialized {:?}", event);
imp.enqueue_item(DataQueueItem::Event(event)).await.is_ok()
}
.boxed()
@ -319,7 +320,7 @@ impl ProxySink {
let proxy_ctx = self.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
gst::log!(SINK_CAT, imp = self, "Trying to empty pending queue");
gst::log!(SINK_CAT, imp: self, "Trying to empty pending queue");
let ProxyContextInner {
pending_queue: ref mut pq,
@ -344,7 +345,7 @@ impl ProxySink {
receiver
} else {
gst::log!(SINK_CAT, imp = self, "Pending queue is empty now");
gst::log!(SINK_CAT, imp: self, "Pending queue is empty now");
*pq = None;
return;
}
@ -355,13 +356,13 @@ impl ProxySink {
receiver
}
} else {
gst::log!(SINK_CAT, imp = self, "Flushing, dropping pending queue");
gst::log!(SINK_CAT, imp: self, "Flushing, dropping pending queue");
*pq = None;
return;
}
};
gst::log!(SINK_CAT, imp = self, "Waiting for more queue space");
gst::log!(SINK_CAT, imp: self, "Waiting for more queue space");
let _ = more_queue_space_receiver.await;
}
}
@ -431,18 +432,18 @@ impl ProxySink {
gst::log!(
SINK_CAT,
imp = self,
imp: self,
"Proxy is full - Pushing first item on pending queue"
);
if schedule_now {
gst::log!(SINK_CAT, imp = self, "Scheduling pending queue now");
gst::log!(SINK_CAT, imp: self, "Scheduling pending queue now");
pending_queue.scheduled = true;
let wait_fut = self.schedule_pending_queue();
Some(wait_fut)
} else {
gst::log!(SINK_CAT, imp = self, "Scheduling pending queue later");
gst::log!(SINK_CAT, imp: self, "Scheduling pending queue later");
None
}
@ -462,7 +463,7 @@ impl ProxySink {
};
if let Some(wait_fut) = wait_fut {
gst::log!(SINK_CAT, imp = self, "Blocking until queue has space again");
gst::log!(SINK_CAT, imp: self, "Blocking until queue has space again");
wait_fut.await;
}
@ -472,7 +473,7 @@ impl ProxySink {
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SINK_CAT, imp = self, "Preparing");
gst::debug!(SINK_CAT, imp: self, "Preparing");
let proxy_context = self.settings.lock().unwrap().proxy_context.to_string();
@ -491,22 +492,22 @@ impl ProxySink {
*self.proxy_ctx.lock().unwrap() = Some(proxy_ctx);
gst::debug!(SINK_CAT, imp = self, "Prepared");
gst::debug!(SINK_CAT, imp: self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(SINK_CAT, imp = self, "Unpreparing");
gst::debug!(SINK_CAT, imp: self, "Unpreparing");
*self.proxy_ctx.lock().unwrap() = None;
gst::debug!(SINK_CAT, imp = self, "Unprepared");
gst::debug!(SINK_CAT, imp: self, "Unprepared");
}
fn start(&self) {
let proxy_ctx = self.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
gst::debug!(SINK_CAT, imp = self, "Starting");
gst::debug!(SINK_CAT, imp: self, "Starting");
{
let settings = self.settings.lock().unwrap();
@ -516,19 +517,19 @@ impl ProxySink {
shared_ctx.last_res = Ok(gst::FlowSuccess::Ok);
gst::debug!(SINK_CAT, imp = self, "Started");
gst::debug!(SINK_CAT, imp: self, "Started");
}
fn stop(&self) {
let proxy_ctx = self.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
gst::debug!(SINK_CAT, imp = self, "Stopping");
gst::debug!(SINK_CAT, imp: self, "Stopping");
let _ = shared_ctx.pending_queue.take();
shared_ctx.last_res = Err(gst::FlowError::Flushing);
gst::debug!(SINK_CAT, imp = self, "Stopped");
gst::debug!(SINK_CAT, imp: self, "Stopped");
}
}
@ -631,7 +632,7 @@ impl ElementImpl for ProxySink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(SINK_CAT, imp = self, "Changing state {:?}", transition);
gst::trace!(SINK_CAT, imp: self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {
@ -666,7 +667,7 @@ impl PadSrcHandler for ProxySrcPadHandler {
type ElementImpl = ProxySrc;
fn src_event(self, pad: &gst::Pad, imp: &ProxySrc, event: gst::Event) -> bool {
gst::log!(SRC_CAT, obj = pad, "Handling {:?}", event);
gst::log!(SRC_CAT, obj: pad, "Handling {:?}", event);
let sink_pad = {
let proxy_ctx = imp.proxy_ctx.lock().unwrap();
@ -683,7 +684,7 @@ impl PadSrcHandler for ProxySrcPadHandler {
match event.view() {
EventView::FlushStart(..) => {
if let Err(err) = imp.task.flush_start().await_maybe_on_context() {
gst::error!(SRC_CAT, obj = pad, "FlushStart failed {:?}", err);
gst::error!(SRC_CAT, obj: pad, "FlushStart failed {:?}", err);
gst::element_imp_error!(
imp,
gst::StreamError::Failed,
@ -695,7 +696,7 @@ impl PadSrcHandler for ProxySrcPadHandler {
}
EventView::FlushStop(..) => {
if let Err(err) = imp.task.flush_stop().await_maybe_on_context() {
gst::error!(SRC_CAT, obj = pad, "FlushStop failed {:?}", err);
gst::error!(SRC_CAT, obj: pad, "FlushStop failed {:?}", err);
gst::element_imp_error!(
imp,
gst::StreamError::Failed,
@ -709,16 +710,16 @@ impl PadSrcHandler for ProxySrcPadHandler {
}
if let Some(sink_pad) = sink_pad {
gst::log!(SRC_CAT, obj = pad, "Forwarding {:?}", event);
gst::log!(SRC_CAT, obj: pad, "Forwarding {:?}", event);
sink_pad.push_event(event)
} else {
gst::error!(SRC_CAT, obj = pad, "No sink pad to forward {:?} to", event);
gst::error!(SRC_CAT, obj: pad, "No sink pad to forward {:?} to", event);
false
}
}
fn src_query(self, pad: &gst::Pad, _proxysrc: &ProxySrc, query: &mut gst::QueryRef) -> bool {
gst::log!(SRC_CAT, obj = pad, "Handling {:?}", query);
gst::log!(SRC_CAT, obj: pad, "Handling {:?}", query);
use gst::QueryViewMut;
let ret = match query.view_mut() {
@ -750,9 +751,9 @@ impl PadSrcHandler for ProxySrcPadHandler {
};
if ret {
gst::log!(SRC_CAT, obj = pad, "Handled {:?}", query);
gst::log!(SRC_CAT, obj: pad, "Handled {:?}", query);
} else {
gst::log!(SRC_CAT, obj = pad, "Didn't handle {:?}", query);
gst::log!(SRC_CAT, obj: pad, "Didn't handle {:?}", query);
}
ret
@ -783,15 +784,15 @@ impl ProxySrcTask {
match item {
DataQueueItem::Buffer(buffer) => {
gst::log!(SRC_CAT, obj = self.element, "Forwarding {:?}", buffer);
gst::log!(SRC_CAT, obj: self.element, "Forwarding {:?}", buffer);
proxysrc.src_pad.push(buffer).await.map(drop)
}
DataQueueItem::BufferList(list) => {
gst::log!(SRC_CAT, obj = self.element, "Forwarding {:?}", list);
gst::log!(SRC_CAT, obj: self.element, "Forwarding {:?}", list);
proxysrc.src_pad.push_list(list).await.map(drop)
}
DataQueueItem::Event(event) => {
gst::log!(SRC_CAT, obj = self.element, "Forwarding {:?}", event);
gst::log!(SRC_CAT, obj: self.element, "Forwarding {:?}", event);
proxysrc.src_pad.push_event(event).await;
Ok(())
}
@ -804,7 +805,7 @@ impl TaskImpl for ProxySrcTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(SRC_CAT, obj = self.element, "Starting task");
gst::log!(SRC_CAT, obj: self.element, "Starting task");
let proxysrc = self.element.imp();
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
@ -818,7 +819,7 @@ impl TaskImpl for ProxySrcTask {
self.dataqueue.start();
gst::log!(SRC_CAT, obj = self.element, "Task started");
gst::log!(SRC_CAT, obj: self.element, "Task started");
Ok(())
}
.boxed()
@ -840,25 +841,25 @@ impl TaskImpl for ProxySrcTask {
let proxysrc = self.element.imp();
match res {
Ok(()) => {
gst::log!(SRC_CAT, obj = self.element, "Successfully pushed item");
gst::log!(SRC_CAT, obj: self.element, "Successfully pushed item");
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
shared_ctx.last_res = Ok(gst::FlowSuccess::Ok);
}
Err(gst::FlowError::Flushing) => {
gst::debug!(SRC_CAT, obj = self.element, "Flushing");
gst::debug!(SRC_CAT, obj: self.element, "Flushing");
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
shared_ctx.last_res = Err(gst::FlowError::Flushing);
}
Err(gst::FlowError::Eos) => {
gst::debug!(SRC_CAT, obj = self.element, "EOS");
gst::debug!(SRC_CAT, obj: self.element, "EOS");
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
shared_ctx.last_res = Err(gst::FlowError::Eos);
}
Err(err) => {
gst::error!(SRC_CAT, obj = self.element, "Got error {}", err);
gst::error!(SRC_CAT, obj: self.element, "Got error {}", err);
gst::element_error!(
&self.element,
gst::StreamError::Failed,
@ -878,7 +879,7 @@ impl TaskImpl for ProxySrcTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(SRC_CAT, obj = self.element, "Stopping task");
gst::log!(SRC_CAT, obj: self.element, "Stopping task");
let proxysrc = self.element.imp();
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
@ -893,7 +894,7 @@ impl TaskImpl for ProxySrcTask {
pending_queue.notify_more_queue_space();
}
gst::log!(SRC_CAT, obj = self.element, "Task stopped");
gst::log!(SRC_CAT, obj: self.element, "Task stopped");
Ok(())
}
.boxed()
@ -901,7 +902,7 @@ impl TaskImpl for ProxySrcTask {
fn flush_start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(SRC_CAT, obj = self.element, "Starting task flush");
gst::log!(SRC_CAT, obj: self.element, "Starting task flush");
let proxysrc = self.element.imp();
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
@ -911,7 +912,7 @@ impl TaskImpl for ProxySrcTask {
shared_ctx.last_res = Err(gst::FlowError::Flushing);
gst::log!(SRC_CAT, obj = self.element, "Task flush started");
gst::log!(SRC_CAT, obj: self.element, "Task flush started");
Ok(())
}
.boxed()
@ -937,7 +938,7 @@ static SRC_CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
impl ProxySrc {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SRC_CAT, imp = self, "Preparing");
gst::debug!(SRC_CAT, imp: self, "Preparing");
let settings = self.settings.lock().unwrap().clone();
@ -991,13 +992,13 @@ impl ProxySrc {
.prepare(ProxySrcTask::new(self.obj().clone(), dataqueue), ts_ctx)
.block_on()?;
gst::debug!(SRC_CAT, imp = self, "Prepared");
gst::debug!(SRC_CAT, imp: self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(SRC_CAT, imp = self, "Unpreparing");
gst::debug!(SRC_CAT, imp: self, "Unpreparing");
{
let settings = self.settings.lock().unwrap();
@ -1010,27 +1011,27 @@ impl ProxySrc {
*self.dataqueue.lock().unwrap() = None;
*self.proxy_ctx.lock().unwrap() = None;
gst::debug!(SRC_CAT, imp = self, "Unprepared");
gst::debug!(SRC_CAT, imp: self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SRC_CAT, imp = self, "Stopping");
gst::debug!(SRC_CAT, imp: self, "Stopping");
self.task.stop().await_maybe_on_context()?;
gst::debug!(SRC_CAT, imp = self, "Stopped");
gst::debug!(SRC_CAT, imp: self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SRC_CAT, imp = self, "Starting");
gst::debug!(SRC_CAT, imp: self, "Starting");
self.task.start().await_maybe_on_context()?;
gst::debug!(SRC_CAT, imp = self, "Started");
gst::debug!(SRC_CAT, imp: self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SRC_CAT, imp = self, "Pausing");
gst::debug!(SRC_CAT, imp: self, "Pausing");
self.task.pause().block_on()?;
gst::debug!(SRC_CAT, imp = self, "Paused");
gst::debug!(SRC_CAT, imp: self, "Paused");
Ok(())
}
}
@ -1190,7 +1191,7 @@ impl ElementImpl for ProxySrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(SRC_CAT, imp = self, "Changing state {:?}", transition);
gst::trace!(SRC_CAT, imp: self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -30,6 +30,7 @@ use once_cell::sync::Lazy;
use std::collections::VecDeque;
use std::sync::Mutex;
use std::time::Duration;
use std::{u32, u64};
use crate::runtime::prelude::*;
use crate::runtime::{Context, PadSink, PadSrc, Task};
@ -89,7 +90,7 @@ impl PadSinkHandler for QueuePadSinkHandler {
buffer: gst::Buffer,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::log!(CAT, obj = pad, "Handling {:?}", buffer);
gst::log!(CAT, obj: pad, "Handling {:?}", buffer);
let imp = elem.imp();
imp.enqueue_item(DataQueueItem::Buffer(buffer)).await
}
@ -103,7 +104,7 @@ impl PadSinkHandler for QueuePadSinkHandler {
list: gst::BufferList,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::log!(CAT, obj = pad, "Handling {:?}", list);
gst::log!(CAT, obj: pad, "Handling {:?}", list);
let imp = elem.imp();
imp.enqueue_item(DataQueueItem::BufferList(list)).await
}
@ -111,11 +112,11 @@ impl PadSinkHandler for QueuePadSinkHandler {
}
fn sink_event(self, pad: &gst::Pad, imp: &Queue, event: gst::Event) -> bool {
gst::debug!(CAT, obj = pad, "Handling non-serialized {:?}", event);
gst::debug!(CAT, obj: pad, "Handling non-serialized {:?}", event);
if let gst::EventView::FlushStart(..) = event.view() {
if let Err(err) = imp.task.flush_start().await_maybe_on_context() {
gst::error!(CAT, obj = pad, "FlushStart failed {:?}", err);
gst::error!(CAT, obj: pad, "FlushStart failed {:?}", err);
gst::element_imp_error!(
imp,
gst::StreamError::Failed,
@ -126,7 +127,7 @@ impl PadSinkHandler for QueuePadSinkHandler {
}
}
gst::log!(CAT, obj = pad, "Forwarding non-serialized {:?}", event);
gst::log!(CAT, obj: pad, "Forwarding non-serialized {:?}", event);
imp.src_pad.gst_pad().push_event(event)
}
@ -137,13 +138,13 @@ impl PadSinkHandler for QueuePadSinkHandler {
event: gst::Event,
) -> BoxFuture<'static, bool> {
async move {
gst::log!(CAT, obj = pad, "Handling serialized {:?}", event);
gst::log!(CAT, obj: pad, "Handling serialized {:?}", event);
let imp = elem.imp();
if let gst::EventView::FlushStop(..) = event.view() {
if let Err(err) = imp.task.flush_stop().await_maybe_on_context() {
gst::error!(CAT, obj = pad, "FlushStop failed {:?}", err);
gst::error!(CAT, obj: pad, "FlushStop failed {:?}", err);
gst::element_imp_error!(
imp,
gst::StreamError::Failed,
@ -154,21 +155,21 @@ impl PadSinkHandler for QueuePadSinkHandler {
}
}
gst::log!(CAT, obj = pad, "Queuing serialized {:?}", event);
gst::log!(CAT, obj: pad, "Queuing serialized {:?}", event);
imp.enqueue_item(DataQueueItem::Event(event)).await.is_ok()
}
.boxed()
}
fn sink_query(self, pad: &gst::Pad, imp: &Queue, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj = pad, "Handling {:?}", query);
gst::log!(CAT, obj: pad, "Handling {:?}", query);
if query.is_serialized() {
// FIXME: How can we do this?
gst::log!(CAT, obj = pad, "Dropping serialized {:?}", query);
gst::log!(CAT, obj: pad, "Dropping serialized {:?}", query);
false
} else {
gst::log!(CAT, obj = pad, "Forwarding {:?}", query);
gst::log!(CAT, obj: pad, "Forwarding {:?}", query);
imp.src_pad.gst_pad().peer_query(query)
}
}
@ -181,18 +182,18 @@ impl PadSrcHandler for QueuePadSrcHandler {
type ElementImpl = Queue;
fn src_event(self, pad: &gst::Pad, imp: &Queue, event: gst::Event) -> bool {
gst::log!(CAT, obj = pad, "Handling {:?}", event);
gst::log!(CAT, obj: pad, "Handling {:?}", event);
use gst::EventView;
match event.view() {
EventView::FlushStart(..) => {
if let Err(err) = imp.task.flush_start().await_maybe_on_context() {
gst::error!(CAT, obj = pad, "FlushStart failed {:?}", err);
gst::error!(CAT, obj: pad, "FlushStart failed {:?}", err);
}
}
EventView::FlushStop(..) => {
if let Err(err) = imp.task.flush_stop().await_maybe_on_context() {
gst::error!(CAT, obj = pad, "FlushStop failed {:?}", err);
gst::error!(CAT, obj: pad, "FlushStop failed {:?}", err);
gst::element_imp_error!(
imp,
gst::StreamError::Failed,
@ -205,12 +206,12 @@ impl PadSrcHandler for QueuePadSrcHandler {
_ => (),
}
gst::log!(CAT, obj = pad, "Forwarding {:?}", event);
gst::log!(CAT, obj: pad, "Forwarding {:?}", event);
imp.sink_pad.gst_pad().push_event(event)
}
fn src_query(self, pad: &gst::Pad, imp: &Queue, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj = pad, "Handling {:?}", query);
gst::log!(CAT, obj: pad, "Handling {:?}", query);
if let gst::QueryViewMut::Scheduling(q) = query.view_mut() {
let mut new_query = gst::query::Scheduling::new();
@ -219,7 +220,7 @@ impl PadSrcHandler for QueuePadSrcHandler {
return res;
}
gst::log!(CAT, obj = pad, "Upstream returned {:?}", new_query);
gst::log!(CAT, obj: pad, "Upstream returned {:?}", new_query);
let (flags, min, max, align) = new_query.result();
q.set(flags, min, max, align);
@ -231,11 +232,11 @@ impl PadSrcHandler for QueuePadSrcHandler {
.filter(|m| m != &gst::PadMode::Pull)
.collect::<Vec<_>>(),
);
gst::log!(CAT, obj = pad, "Returning {:?}", q.query_mut());
gst::log!(CAT, obj: pad, "Returning {:?}", q.query_mut());
return true;
}
gst::log!(CAT, obj = pad, "Forwarding {:?}", query);
gst::log!(CAT, obj: pad, "Forwarding {:?}", query);
imp.sink_pad.gst_pad().peer_query(query)
}
}
@ -260,15 +261,15 @@ impl QueueTask {
match item {
DataQueueItem::Buffer(buffer) => {
gst::log!(CAT, obj = self.element, "Forwarding {:?}", buffer);
gst::log!(CAT, obj: self.element, "Forwarding {:?}", buffer);
queue.src_pad.push(buffer).await.map(drop)
}
DataQueueItem::BufferList(list) => {
gst::log!(CAT, obj = self.element, "Forwarding {:?}", list);
gst::log!(CAT, obj: self.element, "Forwarding {:?}", list);
queue.src_pad.push_list(list).await.map(drop)
}
DataQueueItem::Event(event) => {
gst::log!(CAT, obj = self.element, "Forwarding {:?}", event);
gst::log!(CAT, obj: self.element, "Forwarding {:?}", event);
queue.src_pad.push_event(event).await;
Ok(())
}
@ -281,7 +282,7 @@ impl TaskImpl for QueueTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Starting task");
gst::log!(CAT, obj: self.element, "Starting task");
let queue = self.element.imp();
let mut last_res = queue.last_res.lock().unwrap();
@ -290,7 +291,7 @@ impl TaskImpl for QueueTask {
*last_res = Ok(gst::FlowSuccess::Ok);
gst::log!(CAT, obj = self.element, "Task started");
gst::log!(CAT, obj: self.element, "Task started");
Ok(())
}
.boxed()
@ -312,20 +313,20 @@ impl TaskImpl for QueueTask {
let queue = self.element.imp();
match res {
Ok(()) => {
gst::log!(CAT, obj = self.element, "Successfully pushed item");
gst::log!(CAT, obj: self.element, "Successfully pushed item");
*queue.last_res.lock().unwrap() = Ok(gst::FlowSuccess::Ok);
}
Err(gst::FlowError::Flushing) => {
gst::debug!(CAT, obj = self.element, "Flushing");
gst::debug!(CAT, obj: self.element, "Flushing");
*queue.last_res.lock().unwrap() = Err(gst::FlowError::Flushing);
}
Err(gst::FlowError::Eos) => {
gst::debug!(CAT, obj = self.element, "EOS");
gst::debug!(CAT, obj: self.element, "EOS");
*queue.last_res.lock().unwrap() = Err(gst::FlowError::Eos);
queue.src_pad.push_event(gst::event::Eos::new()).await;
}
Err(err) => {
gst::error!(CAT, obj = self.element, "Got error {}", err);
gst::error!(CAT, obj: self.element, "Got error {}", err);
gst::element_error!(
&self.element,
gst::StreamError::Failed,
@ -343,7 +344,7 @@ impl TaskImpl for QueueTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Stopping task");
gst::log!(CAT, obj: self.element, "Stopping task");
let queue = self.element.imp();
let mut last_res = queue.last_res.lock().unwrap();
@ -357,7 +358,7 @@ impl TaskImpl for QueueTask {
*last_res = Err(gst::FlowError::Flushing);
gst::log!(CAT, obj = self.element, "Task stopped");
gst::log!(CAT, obj: self.element, "Task stopped");
Ok(())
}
.boxed()
@ -365,7 +366,7 @@ impl TaskImpl for QueueTask {
fn flush_start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Starting task flush");
gst::log!(CAT, obj: self.element, "Starting task flush");
let queue = self.element.imp();
let mut last_res = queue.last_res.lock().unwrap();
@ -378,7 +379,7 @@ impl TaskImpl for QueueTask {
*last_res = Err(gst::FlowError::Flushing);
gst::log!(CAT, obj = self.element, "Task flush started");
gst::log!(CAT, obj: self.element, "Task flush started");
Ok(())
}
.boxed()
@ -453,7 +454,7 @@ impl Queue {
}
let mut pending_queue_grd = self.pending_queue.lock().unwrap();
gst::log!(CAT, imp = self, "Trying to empty pending queue");
gst::log!(CAT, imp: self, "Trying to empty pending queue");
if let Some(pending_queue) = pending_queue_grd.as_mut() {
let mut failed_item = None;
@ -470,17 +471,17 @@ impl Queue {
receiver
} else {
gst::log!(CAT, imp = self, "Pending queue is empty now");
gst::log!(CAT, imp: self, "Pending queue is empty now");
*pending_queue_grd = None;
return;
}
} else {
gst::log!(CAT, imp = self, "Flushing, dropping pending queue");
gst::log!(CAT, imp: self, "Flushing, dropping pending queue");
return;
}
};
gst::log!(CAT, imp = self, "Waiting for more queue space");
gst::log!(CAT, imp: self, "Waiting for more queue space");
let _ = more_queue_space_receiver.await;
}
}
@ -489,7 +490,7 @@ impl Queue {
let wait_fut = {
let dataqueue = self.dataqueue.lock().unwrap();
let dataqueue = dataqueue.as_ref().ok_or_else(|| {
gst::error!(CAT, imp = self, "No DataQueue");
gst::error!(CAT, imp: self, "No DataQueue");
gst::FlowError::Error
})?;
@ -518,18 +519,18 @@ impl Queue {
gst::log!(
CAT,
imp = self,
imp: self,
"Queue is full - Pushing first item on pending queue"
);
if schedule_now {
gst::log!(CAT, imp = self, "Scheduling pending queue now");
gst::log!(CAT, imp: self, "Scheduling pending queue now");
pending_queue.as_mut().unwrap().scheduled = true;
let wait_fut = self.schedule_pending_queue();
Some(wait_fut)
} else {
gst::log!(CAT, imp = self, "Scheduling pending queue later");
gst::log!(CAT, imp: self, "Scheduling pending queue later");
None
}
} else {
@ -542,7 +543,7 @@ impl Queue {
};
if let Some(wait_fut) = wait_fut {
gst::log!(CAT, imp = self, "Blocking until queue has space again");
gst::log!(CAT, imp: self, "Blocking until queue has space again");
wait_fut.await;
}
@ -550,7 +551,7 @@ impl Queue {
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Preparing");
gst::debug!(CAT, imp: self, "Preparing");
let settings = self.settings.lock().unwrap().clone();
@ -588,13 +589,13 @@ impl Queue {
.prepare(QueueTask::new(self.obj().clone(), dataqueue), context)
.block_on()?;
gst::debug!(CAT, imp = self, "Prepared");
gst::debug!(CAT, imp: self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp = self, "Unpreparing");
gst::debug!(CAT, imp: self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
@ -603,20 +604,20 @@ impl Queue {
*self.last_res.lock().unwrap() = Ok(gst::FlowSuccess::Ok);
gst::debug!(CAT, imp = self, "Unprepared");
gst::debug!(CAT, imp: self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Stopping");
gst::debug!(CAT, imp: self, "Stopping");
self.task.stop().await_maybe_on_context()?;
gst::debug!(CAT, imp = self, "Stopped");
gst::debug!(CAT, imp: self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Starting");
gst::debug!(CAT, imp: self, "Starting");
self.task.start().await_maybe_on_context()?;
gst::debug!(CAT, imp = self, "Started");
gst::debug!(CAT, imp: self, "Started");
Ok(())
}
}
@ -777,7 +778,7 @@ impl ElementImpl for Queue {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -541,7 +541,7 @@ unsafe impl<T: IoSafe + Write> IoSafe for std::io::BufWriter<T> {}
unsafe impl<T: IoSafe + Write> IoSafe for std::io::LineWriter<T> {}
unsafe impl<T: IoSafe + ?Sized> IoSafe for &mut T {}
unsafe impl<T: IoSafe + ?Sized> IoSafe for Box<T> {}
unsafe impl<T: Clone + IoSafe> IoSafe for std::borrow::Cow<'_, T> {}
unsafe impl<T: Clone + IoSafe + ?Sized> IoSafe for std::borrow::Cow<'_, T> {}
impl<T: Read + Send + 'static> AsyncRead for Async<T> {
fn poll_read(

View file

@ -57,7 +57,7 @@ const READ: usize = 0;
const WRITE: usize = 1;
thread_local! {
static CURRENT_REACTOR: RefCell<Option<Reactor>> = const { RefCell::new(None) };
static CURRENT_REACTOR: RefCell<Option<Reactor>> = RefCell::new(None);
}
#[derive(Debug)]

View file

@ -27,7 +27,7 @@ use super::{CallOnDrop, JoinHandle, Reactor};
use crate::runtime::RUNTIME_CAT;
thread_local! {
static CURRENT_SCHEDULER: RefCell<Option<HandleWeak>> = const { RefCell::new(None) };
static CURRENT_SCHEDULER: RefCell<Option<HandleWeak>> = RefCell::new(None);
}
#[derive(Debug)]
@ -301,7 +301,9 @@ impl Scheduler {
.borrow()
.as_ref()
.and_then(HandleWeak::upgrade)
.is_some_and(|cur| std::ptr::eq(self, Arc::as_ptr(&cur.0.scheduler)))
.map_or(false, |cur| {
std::ptr::eq(self, Arc::as_ptr(&cur.0.scheduler))
})
})
}
}

View file

@ -24,7 +24,7 @@ use super::CallOnDrop;
use crate::runtime::RUNTIME_CAT;
thread_local! {
static CURRENT_TASK_ID: Cell<Option<TaskId>> = const { Cell::new(None) };
static CURRENT_TASK_ID: Cell<Option<TaskId>> = Cell::new(None);
}
#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)]

View file

@ -129,7 +129,7 @@ pub trait PadSrcHandler: Clone + Send + Sync + 'static {
if pad.is_active() {
gst::debug!(
RUNTIME_CAT,
obj = pad,
obj: pad,
"Already activated in {:?} mode ",
pad.mode()
);
@ -137,12 +137,7 @@ pub trait PadSrcHandler: Clone + Send + Sync + 'static {
}
pad.activate_mode(gst::PadMode::Push, true).map_err(|err| {
gst::error!(
RUNTIME_CAT,
obj = pad,
"Error in PadSrc activate: {:?}",
err
);
gst::error!(RUNTIME_CAT, obj: pad, "Error in PadSrc activate: {:?}", err);
gst::loggable_error!(RUNTIME_CAT, "Error in PadSrc activate: {:?}", err)
})
}
@ -158,7 +153,7 @@ pub trait PadSrcHandler: Clone + Send + Sync + 'static {
}
fn src_event(self, pad: &gst::Pad, imp: &Self::ElementImpl, event: gst::Event) -> bool {
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", event);
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", event);
let elem = imp.obj();
// FIXME with GAT on `Self::ElementImpl`, we should be able to
@ -183,13 +178,13 @@ pub trait PadSrcHandler: Clone + Send + Sync + 'static {
}
fn src_query(self, pad: &gst::Pad, imp: &Self::ElementImpl, query: &mut gst::QueryRef) -> bool {
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", query);
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", query);
if query.is_serialized() {
// FIXME serialized queries should be handled with the dataflow
// but we can't return a `Future` because we couldn't honor QueryRef's lifetime
false
} else {
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", query);
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", query);
let elem = imp.obj();
// FIXME with GAT on `Self::ElementImpl`, we should be able to
@ -222,61 +217,48 @@ impl PadSrcInner {
}
pub async fn push(&self, buffer: gst::Buffer) -> Result<FlowSuccess, FlowError> {
gst::log!(RUNTIME_CAT, obj = self.gst_pad, "Pushing {:?}", buffer);
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Pushing {:?}", buffer);
let success = self.gst_pad.push(buffer).map_err(|err| {
gst::error!(
RUNTIME_CAT,
obj = self.gst_pad,
gst::error!(RUNTIME_CAT,
obj: self.gst_pad,
"Failed to push Buffer to PadSrc: {:?}",
err,
);
err
})?;
gst::log!(
RUNTIME_CAT,
obj = self.gst_pad,
"Processing any pending sub tasks"
);
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Processing any pending sub tasks");
Context::drain_sub_tasks().await?;
Ok(success)
}
pub async fn push_list(&self, list: gst::BufferList) -> Result<FlowSuccess, FlowError> {
gst::log!(RUNTIME_CAT, obj = self.gst_pad, "Pushing {:?}", list);
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Pushing {:?}", list);
let success = self.gst_pad.push_list(list).map_err(|err| {
gst::error!(
RUNTIME_CAT,
obj = self.gst_pad,
obj: self.gst_pad,
"Failed to push BufferList to PadSrc: {:?}",
err,
);
err
})?;
gst::log!(
RUNTIME_CAT,
obj = self.gst_pad,
"Processing any pending sub tasks"
);
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Processing any pending sub tasks");
Context::drain_sub_tasks().await?;
Ok(success)
}
pub async fn push_event(&self, event: gst::Event) -> bool {
gst::log!(RUNTIME_CAT, obj = self.gst_pad, "Pushing {:?}", event);
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Pushing {:?}", event);
let was_handled = self.gst_pad.push_event(event);
gst::log!(
RUNTIME_CAT,
obj = self.gst_pad,
"Processing any pending sub tasks"
);
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Processing any pending sub tasks");
if Context::drain_sub_tasks().await.is_err() {
return false;
}
@ -383,7 +365,7 @@ impl PadSrc {
H::ElementImpl::catch_panic_pad_function(
parent,
|| {
gst::error!(RUNTIME_CAT, obj = gst_pad, "Panic in PadSrc activate");
gst::error!(RUNTIME_CAT, obj: gst_pad, "Panic in PadSrc activate");
Err(gst::loggable_error!(
RUNTIME_CAT,
"Panic in PadSrc activate"
@ -401,7 +383,7 @@ impl PadSrc {
H::ElementImpl::catch_panic_pad_function(
parent,
|| {
gst::error!(RUNTIME_CAT, obj = gst_pad, "Panic in PadSrc activatemode");
gst::error!(RUNTIME_CAT, obj: gst_pad, "Panic in PadSrc activatemode");
Err(gst::loggable_error!(
RUNTIME_CAT,
"Panic in PadSrc activatemode"
@ -410,7 +392,7 @@ impl PadSrc {
move |imp| {
gst::log!(
RUNTIME_CAT,
obj = gst_pad,
obj: gst_pad,
"ActivateMode {:?}, {}",
mode,
active
@ -419,7 +401,7 @@ impl PadSrc {
if mode == gst::PadMode::Pull {
gst::error!(
RUNTIME_CAT,
obj = gst_pad,
obj: gst_pad,
"Pull mode not supported by PadSrc"
);
return Err(gst::loggable_error!(
@ -460,7 +442,7 @@ impl PadSrc {
} else {
gst::fixme!(
RUNTIME_CAT,
obj = gst_pad,
obj: gst_pad,
"Serialized Query not supported"
);
false
@ -525,7 +507,7 @@ pub trait PadSinkHandler: Clone + Send + Sync + 'static {
if pad.is_active() {
gst::debug!(
RUNTIME_CAT,
obj = pad,
obj: pad,
"Already activated in {:?} mode ",
pad.mode()
);
@ -535,7 +517,7 @@ pub trait PadSinkHandler: Clone + Send + Sync + 'static {
pad.activate_mode(gst::PadMode::Push, true).map_err(|err| {
gst::error!(
RUNTIME_CAT,
obj = pad,
obj: pad,
"Error in PadSink activate: {:?}",
err
);
@ -573,7 +555,7 @@ pub trait PadSinkHandler: Clone + Send + Sync + 'static {
fn sink_event(self, pad: &gst::Pad, imp: &Self::ElementImpl, event: gst::Event) -> bool {
assert!(!event.is_serialized());
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", event);
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", event);
let elem = imp.obj();
// FIXME with GAT on `Self::ElementImpl`, we should be able to
@ -599,7 +581,7 @@ pub trait PadSinkHandler: Clone + Send + Sync + 'static {
let element = unsafe { elem.unsafe_cast::<gst::Element>() };
async move {
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", event);
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", event);
gst::Pad::event_default(&pad, Some(&element), event)
}
@ -642,12 +624,12 @@ pub trait PadSinkHandler: Clone + Send + Sync + 'static {
query: &mut gst::QueryRef,
) -> bool {
if query.is_serialized() {
gst::log!(RUNTIME_CAT, obj = pad, "Dropping {:?}", query);
gst::log!(RUNTIME_CAT, obj: pad, "Dropping {:?}", query);
// FIXME serialized queries should be handled with the dataflow
// but we can't return a `Future` because we couldn't honor QueryRef's lifetime
false
} else {
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", query);
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", query);
let elem = imp.obj();
// FIXME with GAT on `Self::ElementImpl`, we should be able to
@ -782,7 +764,7 @@ impl PadSink {
H::ElementImpl::catch_panic_pad_function(
parent,
|| {
gst::error!(RUNTIME_CAT, obj = gst_pad, "Panic in PadSink activate");
gst::error!(RUNTIME_CAT, obj: gst_pad, "Panic in PadSink activate");
Err(gst::loggable_error!(
RUNTIME_CAT,
"Panic in PadSink activate"
@ -800,11 +782,7 @@ impl PadSink {
H::ElementImpl::catch_panic_pad_function(
parent,
|| {
gst::error!(
RUNTIME_CAT,
obj = gst_pad,
"Panic in PadSink activatemode"
);
gst::error!(RUNTIME_CAT, obj: gst_pad, "Panic in PadSink activatemode");
Err(gst::loggable_error!(
RUNTIME_CAT,
"Panic in PadSink activatemode"
@ -813,7 +791,7 @@ impl PadSink {
move |imp| {
gst::log!(
RUNTIME_CAT,
obj = gst_pad,
obj: gst_pad,
"ActivateMode {:?}, {}",
mode,
active
@ -822,7 +800,7 @@ impl PadSink {
if mode == gst::PadMode::Pull {
gst::error!(
RUNTIME_CAT,
obj = gst_pad,
obj: gst_pad,
"Pull mode not supported by PadSink"
);
return Err(gst::loggable_error!(
@ -945,7 +923,7 @@ impl PadSink {
} else {
gst::fixme!(
RUNTIME_CAT,
obj = gst_pad,
obj: gst_pad,
"Serialized Query not supported"
);
false

View file

@ -33,10 +33,7 @@ use std::net::UdpSocket;
use crate::runtime::Async;
#[cfg(unix)]
use std::os::{
fd::BorrowedFd,
unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd},
};
use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd};
#[cfg(windows)]
use std::os::windows::io::{AsRawSocket, FromRawSocket, IntoRawSocket, RawSocket};
@ -77,7 +74,7 @@ impl<T: SocketRead> Socket<T> {
buffer_pool.set_active(true).map_err(|err| {
gst::error!(
SOCKET_CAT,
obj = element,
obj: element,
"Failed to prepare socket: {}",
err
);
@ -125,7 +122,7 @@ impl<T: SocketRead> Socket<T> {
pub async fn try_next(
&mut self,
) -> Result<(gst::Buffer, Option<std::net::SocketAddr>), SocketError> {
gst::log!(SOCKET_CAT, obj = self.element, "Trying to read data");
gst::log!(SOCKET_CAT, obj: self.element, "Trying to read data");
if self.mapped_buffer.is_none() {
match self.buffer_pool.acquire_buffer(None) {
@ -133,12 +130,7 @@ impl<T: SocketRead> Socket<T> {
self.mapped_buffer = Some(buffer.into_mapped_buffer_writable().unwrap());
}
Err(err) => {
gst::debug!(
SOCKET_CAT,
obj = self.element,
"Failed to acquire buffer {:?}",
err
);
gst::debug!(SOCKET_CAT, obj: self.element, "Failed to acquire buffer {:?}", err);
return Err(SocketError::Gst(err));
}
}
@ -157,7 +149,7 @@ impl<T: SocketRead> Socket<T> {
// so as to display another message
gst::debug!(
SOCKET_CAT,
obj = self.element,
obj: self.element,
"Read {} bytes at {} (clock {})",
len,
running_time.display(),
@ -165,7 +157,7 @@ impl<T: SocketRead> Socket<T> {
);
running_time
} else {
gst::debug!(SOCKET_CAT, obj = self.element, "Read {} bytes", len);
gst::debug!(SOCKET_CAT, obj: self.element, "Read {} bytes", len);
gst::ClockTime::NONE
};
@ -181,7 +173,7 @@ impl<T: SocketRead> Socket<T> {
Ok((buffer, saddr))
}
Err(err) => {
gst::debug!(SOCKET_CAT, obj = self.element, "Read error {:?}", err);
gst::debug!(SOCKET_CAT, obj: self.element, "Read error {:?}", err);
Err(SocketError::Io(err))
}
@ -192,12 +184,7 @@ impl<T: SocketRead> Socket<T> {
impl<T: SocketRead> Drop for Socket<T> {
fn drop(&mut self) {
if let Err(err) = self.buffer_pool.set_active(false) {
gst::error!(
SOCKET_CAT,
obj = self.element,
"Failed to unprepare socket: {}",
err
);
gst::error!(SOCKET_CAT, obj: self.element, "Failed to unprepare socket: {}", err);
}
}
}
@ -234,14 +221,8 @@ impl GioSocketWrapper {
}
#[cfg(any(
target_os = "macos",
target_os = "ios",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "openbsd",
target_os = "netbsd",
target_os = "linux",
target_os = "android",
bsd,
linux_like,
target_os = "aix",
target_os = "fuchsia",
target_os = "haiku",
@ -255,30 +236,18 @@ impl GioSocketWrapper {
let socket = self.as_socket();
sockopt::set_ip_tos(
unsafe { BorrowedFd::borrow_raw(socket.as_raw_fd()) },
tos as u8,
)?;
sockopt::set_ip_tos(socket, tos)?;
if socket.family() == gio::SocketFamily::Ipv6 {
sockopt::set_ipv6_tclass(
unsafe { BorrowedFd::borrow_raw(socket.as_raw_fd()) },
tos as u32,
)?;
sockopt::set_ipv6_tclass(socket, tos)?;
}
Ok(())
}
#[cfg(not(any(
target_os = "macos",
target_os = "ios",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "openbsd",
target_os = "netbsd",
target_os = "linux",
target_os = "android",
bsd,
linux_like,
target_os = "aix",
target_os = "fuchsia",
target_os = "haiku",

View file

@ -31,6 +31,8 @@ use std::io;
use std::net::{IpAddr, SocketAddr, TcpStream};
use std::sync::Mutex;
use std::time::Duration;
use std::u16;
use std::u32;
use crate::runtime::prelude::*;
use crate::runtime::task;
@ -38,8 +40,6 @@ use crate::runtime::{Context, PadSrc, Task, TaskState};
use crate::runtime::Async;
use crate::socket::{Socket, SocketError, SocketRead};
use futures::channel::mpsc::{channel, Receiver, Sender};
use futures::pin_mut;
const DEFAULT_HOST: Option<&str> = Some("127.0.0.1");
const DEFAULT_PORT: i32 = 4953;
@ -48,11 +48,6 @@ const DEFAULT_BLOCKSIZE: u32 = 4096;
const DEFAULT_CONTEXT: &str = "";
const DEFAULT_CONTEXT_WAIT: Duration = Duration::ZERO;
#[derive(Debug, Default)]
struct State {
event_sender: Option<Sender<gst::Event>>,
}
#[derive(Debug, Clone)]
struct Settings {
host: Option<String>,
@ -102,7 +97,7 @@ impl PadSrcHandler for TcpClientSrcPadHandler {
type ElementImpl = TcpClientSrc;
fn src_event(self, pad: &gst::Pad, imp: &TcpClientSrc, event: gst::Event) -> bool {
gst::log!(CAT, obj = pad, "Handling {:?}", event);
gst::log!(CAT, obj: pad, "Handling {:?}", event);
use gst::EventView;
let ret = match event.view() {
@ -114,16 +109,16 @@ impl PadSrcHandler for TcpClientSrcPadHandler {
};
if ret {
gst::log!(CAT, obj = pad, "Handled {:?}", event);
gst::log!(CAT, obj: pad, "Handled {:?}", event);
} else {
gst::log!(CAT, obj = pad, "Didn't handle {:?}", event);
gst::log!(CAT, obj: pad, "Didn't handle {:?}", event);
}
ret
}
fn src_query(self, pad: &gst::Pad, imp: &TcpClientSrc, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj = pad, "Handling {:?}", query);
gst::log!(CAT, obj: pad, "Handling {:?}", query);
use gst::QueryViewMut;
let ret = match query.view_mut() {
@ -155,9 +150,9 @@ impl PadSrcHandler for TcpClientSrcPadHandler {
};
if ret {
gst::log!(CAT, obj = pad, "Handled {:?}", query);
gst::log!(CAT, obj: pad, "Handled {:?}", query);
} else {
gst::log!(CAT, obj = pad, "Didn't handle {:?}", query);
gst::log!(CAT, obj: pad, "Didn't handle {:?}", query);
}
ret
@ -171,16 +166,10 @@ struct TcpClientSrcTask {
socket: Option<Socket<TcpClientReader>>,
need_initial_events: bool,
need_segment: bool,
event_receiver: Receiver<gst::Event>,
}
impl TcpClientSrcTask {
fn new(
element: super::TcpClientSrc,
saddr: SocketAddr,
buffer_pool: gst::BufferPool,
event_receiver: Receiver<gst::Event>,
) -> Self {
fn new(element: super::TcpClientSrc, saddr: SocketAddr, buffer_pool: gst::BufferPool) -> Self {
TcpClientSrcTask {
element,
saddr,
@ -188,7 +177,6 @@ impl TcpClientSrcTask {
socket: None,
need_initial_events: true,
need_segment: true,
event_receiver,
}
}
@ -196,12 +184,12 @@ impl TcpClientSrcTask {
&mut self,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::log!(CAT, obj = self.element, "Handling {:?}", buffer);
gst::log!(CAT, obj: self.element, "Handling {:?}", buffer);
let tcpclientsrc = self.element.imp();
if self.need_initial_events {
gst::debug!(CAT, obj = self.element, "Pushing initial events");
gst::debug!(CAT, obj: self.element, "Pushing initial events");
let stream_id = format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
let stream_start_evt = gst::event::StreamStart::builder(&stream_id)
@ -240,20 +228,20 @@ impl TcpClientSrcTask {
let res = tcpclientsrc.src_pad.push(buffer).await;
match res {
Ok(_) => {
gst::log!(CAT, obj = self.element, "Successfully pushed buffer");
gst::log!(CAT, obj: self.element, "Successfully pushed buffer");
}
Err(gst::FlowError::Flushing) => {
gst::debug!(CAT, obj = self.element, "Flushing");
gst::debug!(CAT, obj: self.element, "Flushing");
}
Err(gst::FlowError::Eos) => {
gst::debug!(CAT, obj = self.element, "EOS");
gst::debug!(CAT, obj: self.element, "EOS");
tcpclientsrc
.src_pad
.push_event(gst::event::Eos::new())
.await;
}
Err(err) => {
gst::error!(CAT, obj = self.element, "Got error {}", err);
gst::error!(CAT, obj: self.element, "Got error {}", err);
gst::element_error!(
self.element,
gst::StreamError::Failed,
@ -272,12 +260,7 @@ impl TaskImpl for TcpClientSrcTask {
fn prepare(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(
CAT,
obj = self.element,
"Preparing task connecting to {:?}",
self.saddr
);
gst::log!(CAT, obj: self.element, "Preparing task connecting to {:?}", self.saddr);
let socket = Async::<TcpStream>::connect(self.saddr)
.await
@ -302,7 +285,7 @@ impl TaskImpl for TcpClientSrcTask {
})?,
);
gst::log!(CAT, obj = self.element, "Task prepared");
gst::log!(CAT, obj: self.element, "Task prepared");
Ok(())
}
.boxed()
@ -330,58 +313,34 @@ impl TaskImpl for TcpClientSrcTask {
fn try_next(&mut self) -> BoxFuture<'_, Result<gst::Buffer, gst::FlowError>> {
async move {
let event_fut = self.event_receiver.next().fuse();
let socket_fut = self.socket.as_mut().unwrap().try_next().fuse();
pin_mut!(event_fut);
pin_mut!(socket_fut);
futures::select! {
event_res = event_fut => match event_res {
Some(event) => {
gst::debug!(CAT, obj = self.element, "Handling element level event {event:?}");
match event.view() {
gst::EventView::Eos(_) => Err(gst::FlowError::Eos),
ev => {
gst::error!(CAT, obj = self.element, "Unexpected event {ev:?} on channel");
Err(gst::FlowError::Error)
}
self.socket
.as_mut()
.unwrap()
.try_next()
.await
.map(|(buffer, _saddr)| buffer)
.map_err(|err| {
gst::error!(CAT, obj: self.element, "Got error {:?}", err);
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {}", err]
);
}
}
None => {
gst::error!(CAT, obj = self.element, "Unexpected return on event channel");
Err(gst::FlowError::Error)
}
},
socket_res = socket_fut => match socket_res {
Ok((buffer, _saddr)) => Ok(buffer),
Err(err) => {
gst::error!(CAT, obj = self.element, "Got error {err:#}");
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {err}"]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {err}"]
);
}
}
Err(gst::FlowError::Error)
}
},
}
gst::FlowError::Error
})
}
.boxed()
}
@ -392,9 +351,9 @@ impl TaskImpl for TcpClientSrcTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Stopping task");
gst::log!(CAT, obj: self.element, "Stopping task");
self.need_initial_events = true;
gst::log!(CAT, obj = self.element, "Task stopped");
gst::log!(CAT, obj: self.element, "Task stopped");
Ok(())
}
.boxed()
@ -402,47 +361,13 @@ impl TaskImpl for TcpClientSrcTask {
fn flush_stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Stopping task flush");
gst::log!(CAT, obj: self.element, "Stopping task flush");
self.need_initial_events = true;
gst::log!(CAT, obj = self.element, "Task flush stopped");
gst::log!(CAT, obj: self.element, "Task flush stopped");
Ok(())
}
.boxed()
}
fn handle_loop_error(&mut self, err: gst::FlowError) -> BoxFuture<'_, task::Trigger> {
async move {
match err {
gst::FlowError::Flushing => {
gst::debug!(CAT, obj = self.element, "Flushing");
task::Trigger::FlushStart
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj = self.element, "EOS");
self.element
.imp()
.src_pad
.push_event(gst::event::Eos::new())
.await;
task::Trigger::Stop
}
err => {
gst::error!(CAT, obj = self.element, "Got error {err}");
gst::element_error!(
&self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
task::Trigger::Error
}
}
}
.boxed()
}
}
pub struct TcpClientSrc {
@ -450,7 +375,6 @@ pub struct TcpClientSrc {
task: Task,
configured_caps: Mutex<Option<gst::Caps>>,
settings: Mutex<Settings>,
state: Mutex<State>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
@ -463,7 +387,7 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
impl TcpClientSrc {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Preparing");
gst::debug!(CAT, imp: self, "Preparing");
let settings = self.settings.lock().unwrap().clone();
let context =
@ -507,59 +431,49 @@ impl TcpClientSrc {
let saddr = SocketAddr::new(host, port as u16);
let (sender, receiver) = channel(1);
// Don't block on `prepare` as the socket connection takes time.
// This will be performed in the background and we'll block on
// `start` which will also ensure `prepare` completed successfully.
let fut = self
.task
.prepare(
TcpClientSrcTask::new(self.obj().clone(), saddr, buffer_pool, receiver),
TcpClientSrcTask::new(self.obj().clone(), saddr, buffer_pool),
context,
)
.check()?;
drop(fut);
let mut state = self.state.lock().unwrap();
state.event_sender = Some(sender);
drop(state);
gst::debug!(CAT, imp = self, "Preparing asynchronously");
gst::debug!(CAT, imp: self, "Preparing asynchronously");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp = self, "Unpreparing");
gst::debug!(CAT, imp: self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
gst::debug!(CAT, imp = self, "Unprepared");
gst::debug!(CAT, imp: self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Stopping");
gst::debug!(CAT, imp: self, "Stopping");
self.task.stop().block_on()?;
gst::debug!(CAT, imp = self, "Stopped");
gst::debug!(CAT, imp: self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Starting");
gst::debug!(CAT, imp: self, "Starting");
self.task.start().block_on()?;
gst::debug!(CAT, imp = self, "Started");
gst::debug!(CAT, imp: self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Pausing");
gst::debug!(CAT, imp: self, "Pausing");
self.task.pause().block_on()?;
gst::debug!(CAT, imp = self, "Paused");
gst::debug!(CAT, imp: self, "Paused");
Ok(())
}
fn state(&self) -> TaskState {
self.task.state()
}
}
#[glib::object_subclass]
@ -577,7 +491,6 @@ impl ObjectSubclass for TcpClientSrc {
task: Task::default(),
configured_caps: Default::default(),
settings: Default::default(),
state: Default::default(),
}
}
}
@ -713,7 +626,7 @@ impl ElementImpl for TcpClientSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {
@ -751,31 +664,4 @@ impl ElementImpl for TcpClientSrc {
Ok(success)
}
fn send_event(&self, event: gst::Event) -> bool {
use gst::EventView;
gst::debug!(CAT, imp = self, "Handling element level event {event:?}");
match event.view() {
EventView::Eos(_) => {
if self.state() != TaskState::Started {
if let Err(err) = self.start() {
gst::error!(CAT, imp = self, "Failed to start task thread {err:?}");
}
}
if self.state() == TaskState::Started {
let mut state = self.state.lock().unwrap();
if let Some(event_tx) = state.event_sender.as_mut() {
return event_tx.try_send(event.clone()).is_ok();
}
}
false
}
_ => self.parent_send_event(event),
}
}
}

View file

@ -37,6 +37,8 @@ use std::collections::BTreeSet;
use std::net::{IpAddr, Ipv4Addr, SocketAddr, UdpSocket};
use std::sync::{Arc, Mutex};
use std::time::Duration;
use std::u16;
use std::u8;
const DEFAULT_HOST: Option<&str> = Some("127.0.0.1");
const DEFAULT_PORT: i32 = 5004;
@ -200,17 +202,17 @@ impl UdpSinkPadHandler {
futures::executor::block_on(async move {
let mut inner = self.0.lock().await;
if inner.clients.contains(&addr) {
gst::warning!(CAT, imp = imp, "Not adding client {addr:?} again");
gst::warning!(CAT, imp: imp, "Not adding client {addr:?} again");
return;
}
match inner.configure_client(&addr) {
Ok(()) => {
gst::info!(CAT, imp = imp, "Added client {addr:?}");
gst::info!(CAT, imp: imp, "Added client {addr:?}");
inner.clients.insert(addr);
}
Err(err) => {
gst::error!(CAT, imp = imp, "Failed to add client {addr:?}: {err}");
gst::error!(CAT, imp: imp, "Failed to add client {addr:?}: {err}");
imp.obj().post_error_message(err);
}
}
@ -221,16 +223,16 @@ impl UdpSinkPadHandler {
futures::executor::block_on(async move {
let mut inner = self.0.lock().await;
if inner.clients.take(&addr).is_none() {
gst::warning!(CAT, imp = imp, "Not removing unknown client {addr:?}");
gst::warning!(CAT, imp: imp, "Not removing unknown client {addr:?}");
return;
}
match inner.unconfigure_client(&addr) {
Ok(()) => {
gst::info!(CAT, imp = imp, "Removed client {addr:?}");
gst::info!(CAT, imp: imp, "Removed client {addr:?}");
}
Err(err) => {
gst::error!(CAT, imp = imp, "Failed to remove client {addr:?}: {err}");
gst::error!(CAT, imp: imp, "Failed to remove client {addr:?}: {err}");
imp.obj().post_error_message(err);
}
}
@ -241,9 +243,9 @@ impl UdpSinkPadHandler {
futures::executor::block_on(async move {
let mut inner = self.0.lock().await;
if new_clients.is_empty() {
gst::info!(CAT, imp = imp, "Clearing clients");
gst::info!(CAT, imp: imp, "Clearing clients");
} else {
gst::info!(CAT, imp = imp, "Replacing clients");
gst::info!(CAT, imp: imp, "Replacing clients");
}
let old_clients = std::mem::take(&mut inner.clients);
@ -255,19 +257,19 @@ impl UdpSinkPadHandler {
// client is already configured
inner.clients.insert(*addr);
} else if let Err(err) = inner.unconfigure_client(addr) {
gst::error!(CAT, imp = imp, "Failed to remove client {addr:?}: {err}");
gst::error!(CAT, imp: imp, "Failed to remove client {addr:?}: {err}");
res = Err(err);
} else {
gst::info!(CAT, imp = imp, "Removed client {addr:?}");
gst::info!(CAT, imp: imp, "Removed client {addr:?}");
}
}
for addr in new_clients.into_iter() {
if let Err(err) = inner.configure_client(&addr) {
gst::error!(CAT, imp = imp, "Failed to add client {addr:?}: {err}");
gst::error!(CAT, imp: imp, "Failed to add client {addr:?}: {err}");
res = Err(err);
} else {
gst::info!(CAT, imp = imp, "Added client {addr:?}");
gst::info!(CAT, imp: imp, "Added client {addr:?}");
inner.clients.insert(addr);
}
}
@ -319,7 +321,7 @@ impl PadSinkHandler for UdpSinkPadHandler {
event: gst::Event,
) -> BoxFuture<'static, bool> {
async move {
gst::debug!(CAT, obj = elem, "Handling {event:?}");
gst::debug!(CAT, obj: elem, "Handling {event:?}");
match event.view() {
EventView::Eos(_) => {
@ -343,7 +345,7 @@ impl PadSinkHandler for UdpSinkPadHandler {
}
fn sink_event(self, _pad: &gst::Pad, imp: &UdpSink, event: gst::Event) -> bool {
gst::debug!(CAT, imp = imp, "Handling {event:?}");
gst::debug!(CAT, imp: imp, "Handling {event:?}");
if let EventView::FlushStart(..) = event.view() {
block_on_or_add_sub_task(async move {
@ -554,7 +556,7 @@ impl UdpSinkPadHandlerInner {
};
if let Some(socket) = socket.as_mut() {
gst::log!(CAT, obj = elem, "Sending to {client:?}");
gst::log!(CAT, obj: elem, "Sending to {client:?}");
socket.send_to(&data, *client).await.map_err(|err| {
gst::element_error!(
elem,
@ -575,7 +577,7 @@ impl UdpSinkPadHandlerInner {
}
}
gst::log!(CAT, obj = elem, "Sent buffer {buffer:?} to all clients");
gst::log!(CAT, obj: elem, "Sent buffer {buffer:?} to all clients");
Ok(gst::FlowSuccess::Ok)
}
@ -585,7 +587,7 @@ impl UdpSinkPadHandlerInner {
let now = elem.current_running_time();
if let Ok(Some(delay)) = running_time.opt_checked_sub(now) {
gst::trace!(CAT, obj = elem, "sync: waiting {delay}");
gst::trace!(CAT, obj: elem, "sync: waiting {delay}");
runtime::timer::delay_for(delay.into()).await;
}
}
@ -596,7 +598,7 @@ impl UdpSinkPadHandlerInner {
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
if self.is_flushing {
gst::info!(CAT, obj = elem, "Discarding {buffer:?} (flushing)");
gst::info!(CAT, obj: elem, "Discarding {buffer:?} (flushing)");
return Err(gst::FlowError::Flushing);
}
@ -612,14 +614,14 @@ impl UdpSinkPadHandlerInner {
self.sync(elem, rtime).await;
if self.is_flushing {
gst::info!(CAT, obj = elem, "Discarding {buffer:?} (flushing)");
gst::info!(CAT, obj: elem, "Discarding {buffer:?} (flushing)");
return Err(gst::FlowError::Flushing);
}
}
}
gst::debug!(CAT, obj = elem, "Handling {buffer:?}");
gst::debug!(CAT, obj: elem, "Handling {buffer:?}");
self.render(elem, buffer).await.map_err(|err| {
element_error!(
@ -698,7 +700,7 @@ impl UdpSink {
};
let saddr = SocketAddr::new(bind_addr, bind_port as u16);
gst::debug!(CAT, imp = self, "Binding to {:?}", saddr);
gst::debug!(CAT, imp: self, "Binding to {:?}", saddr);
let socket = match family {
SocketFamily::Ipv4 => socket2::Socket::new(
@ -718,7 +720,7 @@ impl UdpSink {
Err(err) => {
gst::warning!(
CAT,
imp = self,
imp: self,
"Failed to create {} socket: {}",
match family {
SocketFamily::Ipv4 => "IPv4",
@ -771,7 +773,7 @@ impl UdpSink {
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Preparing");
gst::debug!(CAT, imp: self, "Preparing");
let mut settings = self.settings.lock().unwrap();
@ -789,36 +791,36 @@ impl UdpSink {
.prepare(self, socket, socket_v6, &settings)?;
*self.ts_ctx.lock().unwrap() = Some(ts_ctx);
gst::debug!(CAT, imp = self, "Started preparation");
gst::debug!(CAT, imp: self, "Started preparation");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp = self, "Unpreparing");
gst::debug!(CAT, imp: self, "Unpreparing");
self.sink_pad_handler.unprepare();
*self.ts_ctx.lock().unwrap() = None;
gst::debug!(CAT, imp = self, "Unprepared");
gst::debug!(CAT, imp: self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Stopping");
gst::debug!(CAT, imp: self, "Stopping");
self.sink_pad_handler.stop();
gst::debug!(CAT, imp = self, "Stopped");
gst::debug!(CAT, imp: self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Starting");
gst::debug!(CAT, imp: self, "Starting");
self.sink_pad_handler.start();
gst::debug!(CAT, imp = self, "Started");
gst::debug!(CAT, imp: self, "Started");
Ok(())
}
fn try_into_socket_addr(&self, host: &str, port: i32) -> Result<SocketAddr, ()> {
let addr: IpAddr = match host.parse() {
Err(err) => {
gst::error!(CAT, imp = self, "Failed to parse host {}: {}", host, err);
gst::error!(CAT, imp: self, "Failed to parse host {}: {}", host, err);
return Err(());
}
Ok(addr) => addr,
@ -826,7 +828,7 @@ impl UdpSink {
let port: u16 = match port.try_into() {
Err(err) => {
gst::error!(CAT, imp = self, "Invalid port {}: {}", port, err);
gst::error!(CAT, imp: self, "Invalid port {}: {}", port, err);
return Err(());
}
Ok(port) => port,
@ -1088,19 +1090,19 @@ impl ObjectImpl for UdpSink {
Err(()) => {
gst::error!(
CAT,
imp = self,
imp: self,
"Invalid socket address {addr}:{port}"
);
None
}
},
Err(err) => {
gst::error!(CAT, imp = self, "Invalid port {err}");
gst::error!(CAT, imp: self, "Invalid port {err}");
None
}
}
} else {
gst::error!(CAT, imp = self, "Invalid client {client}");
gst::error!(CAT, imp: self, "Invalid client {client}");
None
}
});
@ -1215,7 +1217,7 @@ impl ElementImpl for UdpSink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -27,17 +27,17 @@ use gst_net::*;
use once_cell::sync::Lazy;
use std::i32;
use std::io;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, UdpSocket};
use std::sync::Mutex;
use std::time::Duration;
use std::u16;
use crate::runtime::prelude::*;
use crate::runtime::{task, Async, Context, PadSrc, Task, TaskState};
use crate::runtime::{Async, Context, PadSrc, Task};
use crate::socket::{wrap_socket, GioSocketWrapper, Socket, SocketError, SocketRead};
use futures::channel::mpsc::{channel, Receiver, Sender};
use futures::pin_mut;
const DEFAULT_ADDRESS: Option<&str> = Some("0.0.0.0");
const DEFAULT_PORT: i32 = 5004;
@ -49,13 +49,6 @@ const DEFAULT_USED_SOCKET: Option<GioSocketWrapper> = None;
const DEFAULT_CONTEXT: &str = "";
const DEFAULT_CONTEXT_WAIT: Duration = Duration::ZERO;
const DEFAULT_RETRIEVE_SENDER_ADDRESS: bool = true;
const DEFAULT_MULTICAST_LOOP: bool = true;
const DEFAULT_BUFFER_SIZE: u32 = 0;
#[derive(Debug, Default)]
struct State {
event_sender: Option<Sender<gst::Event>>,
}
#[derive(Debug, Clone)]
struct Settings {
@ -69,8 +62,6 @@ struct Settings {
context: String,
context_wait: Duration,
retrieve_sender_address: bool,
multicast_loop: bool,
buffer_size: u32,
}
impl Default for Settings {
@ -86,8 +77,6 @@ impl Default for Settings {
context: DEFAULT_CONTEXT.into(),
context_wait: DEFAULT_CONTEXT_WAIT,
retrieve_sender_address: DEFAULT_RETRIEVE_SENDER_ADDRESS,
multicast_loop: DEFAULT_MULTICAST_LOOP,
buffer_size: DEFAULT_BUFFER_SIZE,
}
}
}
@ -125,7 +114,7 @@ impl PadSrcHandler for UdpSrcPadHandler {
type ElementImpl = UdpSrc;
fn src_event(self, pad: &gst::Pad, imp: &UdpSrc, event: gst::Event) -> bool {
gst::log!(CAT, obj = pad, "Handling {:?}", event);
gst::log!(CAT, obj: pad, "Handling {:?}", event);
use gst::EventView;
let ret = match event.view() {
@ -137,16 +126,16 @@ impl PadSrcHandler for UdpSrcPadHandler {
};
if ret {
gst::log!(CAT, obj = pad, "Handled {:?}", event);
gst::log!(CAT, obj: pad, "Handled {:?}", event);
} else {
gst::log!(CAT, obj = pad, "Didn't handle {:?}", event);
gst::log!(CAT, obj: pad, "Didn't handle {:?}", event);
}
ret
}
fn src_query(self, pad: &gst::Pad, imp: &UdpSrc, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj = pad, "Handling {:?}", query);
gst::log!(CAT, obj: pad, "Handling {:?}", query);
use gst::QueryViewMut;
let ret = match query.view_mut() {
@ -178,9 +167,9 @@ impl PadSrcHandler for UdpSrcPadHandler {
};
if ret {
gst::log!(CAT, obj = pad, "Handled {:?}", query);
gst::log!(CAT, obj: pad, "Handled {:?}", query);
} else {
gst::log!(CAT, obj = pad, "Didn't handle {:?}", query);
gst::log!(CAT, obj: pad, "Didn't handle {:?}", query);
}
ret
@ -193,18 +182,16 @@ struct UdpSrcTask {
retrieve_sender_address: bool,
need_initial_events: bool,
need_segment: bool,
event_receiver: Receiver<gst::Event>,
}
impl UdpSrcTask {
fn new(element: super::UdpSrc, event_receiver: Receiver<gst::Event>) -> Self {
fn new(element: super::UdpSrc) -> Self {
UdpSrcTask {
element,
socket: None,
retrieve_sender_address: DEFAULT_RETRIEVE_SENDER_ADDRESS,
need_initial_events: true,
need_segment: true,
event_receiver,
}
}
}
@ -217,7 +204,7 @@ impl TaskImpl for UdpSrcTask {
let udpsrc = self.element.imp();
let mut settings = udpsrc.settings.lock().unwrap();
gst::debug!(CAT, obj = self.element, "Preparing Task");
gst::debug!(CAT, obj: self.element, "Preparing Task");
self.retrieve_sender_address = settings.retrieve_sender_address;
@ -263,7 +250,7 @@ impl TaskImpl for UdpSrcTask {
};
let port = settings.port;
// TODO: TTL etc
// TODO: TTL, multicast loopback, etc
let saddr = if addr.is_multicast() {
let bind_addr = if addr.is_ipv4() {
IpAddr::V4(Ipv4Addr::UNSPECIFIED)
@ -274,7 +261,7 @@ impl TaskImpl for UdpSrcTask {
let saddr = SocketAddr::new(bind_addr, port as u16);
gst::debug!(
CAT,
obj = self.element,
obj: self.element,
"Binding to {:?} for multicast group {:?}",
saddr,
addr
@ -283,7 +270,7 @@ impl TaskImpl for UdpSrcTask {
saddr
} else {
let saddr = SocketAddr::new(addr, port as u16);
gst::debug!(CAT, obj = self.element, "Binding to {:?}", saddr);
gst::debug!(CAT, obj: self.element, "Binding to {:?}", saddr);
saddr
};
@ -315,29 +302,6 @@ impl TaskImpl for UdpSrcTask {
)
})?;
gst::debug!(
CAT,
obj = self.element,
"socket recv buffer size is {:?}",
socket.recv_buffer_size()
);
if settings.buffer_size != 0 {
gst::debug!(
CAT,
obj = self.element,
"changing the socket recv buffer size to {}",
settings.buffer_size
);
socket
.set_recv_buffer_size(settings.buffer_size as usize)
.map_err(|err| {
gst::error_msg!(
gst::ResourceError::OpenRead,
["Failed to set buffer_size: {}", err]
)
})?;
}
#[cfg(unix)]
{
socket.set_reuse_port(settings.reuse).map_err(|err| {
@ -375,20 +339,6 @@ impl TaskImpl for UdpSrcTask {
["Failed to join multicast group: {}", err]
)
})?;
socket
.as_ref()
.set_multicast_loop_v4(settings.multicast_loop)
.map_err(|err| {
gst::error_msg!(
gst::ResourceError::OpenWrite,
[
"Failed to set multicast loop to {}: {}",
settings.multicast_loop,
err
]
)
})?;
}
IpAddr::V6(addr) => {
socket.as_ref().join_multicast_v6(&addr, 0).map_err(|err| {
@ -397,20 +347,6 @@ impl TaskImpl for UdpSrcTask {
["Failed to join multicast group: {}", err]
)
})?;
socket
.as_ref()
.set_multicast_loop_v6(settings.multicast_loop)
.map_err(|err| {
gst::error_msg!(
gst::ResourceError::OpenWrite,
[
"Failed to set multicast loop to {}: {}",
settings.multicast_loop,
err
]
)
})?;
}
}
}
@ -464,7 +400,7 @@ impl TaskImpl for UdpSrcTask {
fn unprepare(&mut self) -> BoxFuture<'_, ()> {
async move {
gst::debug!(CAT, obj = self.element, "Unpreparing Task");
gst::debug!(CAT, obj: self.element, "Unpreparing Task");
let udpsrc = self.element.imp();
udpsrc.settings.lock().unwrap().used_socket = None;
self.element.notify("used-socket");
@ -474,12 +410,12 @@ impl TaskImpl for UdpSrcTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Starting task");
gst::log!(CAT, obj: self.element, "Starting task");
self.socket
.as_mut()
.unwrap()
.set_clock(self.element.clock(), self.element.base_time());
gst::log!(CAT, obj = self.element, "Task started");
gst::log!(CAT, obj: self.element, "Task started");
Ok(())
}
.boxed()
@ -487,80 +423,55 @@ impl TaskImpl for UdpSrcTask {
fn try_next(&mut self) -> BoxFuture<'_, Result<gst::Buffer, gst::FlowError>> {
async move {
let event_fut = self.event_receiver.next().fuse();
let socket_fut = self.socket.as_mut().unwrap().try_next().fuse();
pin_mut!(event_fut);
pin_mut!(socket_fut);
futures::select! {
event_res = event_fut => match event_res {
Some(event) => {
gst::debug!(CAT, obj = self.element, "Handling element level event {event:?}");
match event.view() {
gst::EventView::Eos(_) => Err(gst::FlowError::Eos),
ev => {
gst::error!(CAT, obj = self.element, "Unexpected event {ev:?} on channel");
Err(gst::FlowError::Error)
}
self.socket
.as_mut()
.unwrap()
.try_next()
.await
.map(|(mut buffer, saddr)| {
if let Some(saddr) = saddr {
if self.retrieve_sender_address {
NetAddressMeta::add(
buffer.get_mut().unwrap(),
&gio::InetSocketAddress::from(saddr),
);
}
}
None => {
gst::error!(CAT, obj = self.element, "Unexpected return on event channel");
Err(gst::FlowError::Error)
}
},
socket_res = socket_fut => match socket_res {
Ok((mut buffer, saddr)) => {
if let Some(saddr) = saddr {
if self.retrieve_sender_address {
NetAddressMeta::add(
buffer.get_mut().unwrap(),
&gio::InetSocketAddress::from(saddr),
);
}
buffer
})
.map_err(|err| {
gst::error!(CAT, obj: self.element, "Got error {:?}", err);
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
}
Ok(buffer)
},
Err(err) => {
gst::error!(CAT, obj = self.element, "Got error {err:#}");
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {err}"]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {err}"]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {}", err]
);
}
Err(gst::FlowError::Error)
}
},
}
gst::FlowError::Error
})
}
.boxed()
}
fn handle_item(&mut self, buffer: gst::Buffer) -> BoxFuture<'_, Result<(), gst::FlowError>> {
async {
gst::log!(CAT, obj = self.element, "Handling {:?}", buffer);
gst::log!(CAT, obj: self.element, "Handling {:?}", buffer);
let udpsrc = self.element.imp();
if self.need_initial_events {
gst::debug!(CAT, obj = self.element, "Pushing initial events");
gst::debug!(CAT, obj: self.element, "Pushing initial events");
let stream_id =
format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
@ -591,14 +502,14 @@ impl TaskImpl for UdpSrcTask {
let res = udpsrc.src_pad.push(buffer).await.map(drop);
match res {
Ok(_) => gst::log!(CAT, obj = self.element, "Successfully pushed buffer"),
Err(gst::FlowError::Flushing) => gst::debug!(CAT, obj = self.element, "Flushing"),
Ok(_) => gst::log!(CAT, obj: self.element, "Successfully pushed buffer"),
Err(gst::FlowError::Flushing) => gst::debug!(CAT, obj: self.element, "Flushing"),
Err(gst::FlowError::Eos) => {
gst::debug!(CAT, obj = self.element, "EOS");
gst::debug!(CAT, obj: self.element, "EOS");
udpsrc.src_pad.push_event(gst::event::Eos::new()).await;
}
Err(err) => {
gst::error!(CAT, obj = self.element, "Got error {}", err);
gst::error!(CAT, obj: self.element, "Got error {}", err);
gst::element_error!(
self.element,
gst::StreamError::Failed,
@ -615,10 +526,10 @@ impl TaskImpl for UdpSrcTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Stopping task");
gst::log!(CAT, obj: self.element, "Stopping task");
self.need_initial_events = true;
self.need_segment = true;
gst::log!(CAT, obj = self.element, "Task stopped");
gst::log!(CAT, obj: self.element, "Task stopped");
Ok(())
}
.boxed()
@ -626,47 +537,13 @@ impl TaskImpl for UdpSrcTask {
fn flush_stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj = self.element, "Stopping task flush");
gst::log!(CAT, obj: self.element, "Stopping task flush");
self.need_segment = true;
gst::log!(CAT, obj = self.element, "Stopped task flush");
gst::log!(CAT, obj: self.element, "Stopped task flush");
Ok(())
}
.boxed()
}
fn handle_loop_error(&mut self, err: gst::FlowError) -> BoxFuture<'_, task::Trigger> {
async move {
match err {
gst::FlowError::Flushing => {
gst::debug!(CAT, obj = self.element, "Flushing");
task::Trigger::FlushStart
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj = self.element, "EOS");
self.element
.imp()
.src_pad
.push_event(gst::event::Eos::new())
.await;
task::Trigger::Stop
}
err => {
gst::error!(CAT, obj = self.element, "Got error {err}");
gst::element_error!(
&self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
task::Trigger::Error
}
}
}
.boxed()
}
}
pub struct UdpSrc {
@ -674,7 +551,6 @@ pub struct UdpSrc {
task: Task,
configured_caps: Mutex<Option<gst::Caps>>,
settings: Mutex<Settings>,
state: Mutex<State>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
@ -687,7 +563,7 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
impl UdpSrc {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Preparing");
gst::debug!(CAT, imp: self, "Preparing");
let settings = self.settings.lock().unwrap();
let context =
@ -699,52 +575,42 @@ impl UdpSrc {
})?;
drop(settings);
let (sender, receiver) = channel(1);
*self.configured_caps.lock().unwrap() = None;
self.task
.prepare(UdpSrcTask::new(self.obj().clone(), receiver), context)
.prepare(UdpSrcTask::new(self.obj().clone()), context)
.block_on()?;
let mut state = self.state.lock().unwrap();
state.event_sender = Some(sender);
drop(state);
gst::debug!(CAT, imp = self, "Prepared");
gst::debug!(CAT, imp: self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp = self, "Unpreparing");
gst::debug!(CAT, imp: self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
gst::debug!(CAT, imp = self, "Unprepared");
gst::debug!(CAT, imp: self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Stopping");
gst::debug!(CAT, imp: self, "Stopping");
self.task.stop().block_on()?;
gst::debug!(CAT, imp = self, "Stopped");
gst::debug!(CAT, imp: self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Starting");
gst::debug!(CAT, imp: self, "Starting");
self.task.start().block_on()?;
gst::debug!(CAT, imp = self, "Started");
gst::debug!(CAT, imp: self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp = self, "Pausing");
gst::debug!(CAT, imp: self, "Pausing");
self.task.pause().block_on()?;
gst::debug!(CAT, imp = self, "Paused");
gst::debug!(CAT, imp: self, "Paused");
Ok(())
}
fn state(&self) -> TaskState {
self.task.state()
}
}
#[glib::object_subclass]
@ -762,7 +628,6 @@ impl ObjectSubclass for UdpSrc {
task: Task::default(),
configured_caps: Default::default(),
settings: Default::default(),
state: Default::default(),
}
}
}
@ -814,18 +679,6 @@ impl ObjectImpl for UdpSrc {
.blurb("Whether to retrieve the sender address and add it to buffers as meta. Disabling this might result in minor performance improvements in certain scenarios")
.default_value(DEFAULT_RETRIEVE_SENDER_ADDRESS)
.build(),
glib::ParamSpecBoolean::builder("loop")
.nick("Loop")
.blurb("Set the multicast loop parameter")
.default_value(DEFAULT_MULTICAST_LOOP)
.build(),
glib::ParamSpecUInt::builder("buffer-size")
.nick("Buffer Size")
.blurb("Size of the kernel receive buffer in bytes, 0=default")
.maximum(u32::MAX)
.default_value(DEFAULT_BUFFER_SIZE)
.build(),
];
#[cfg(not(windows))]
@ -892,12 +745,6 @@ impl ObjectImpl for UdpSrc {
"retrieve-sender-address" => {
settings.retrieve_sender_address = value.get().expect("type checked upstream");
}
"loop" => {
settings.multicast_loop = value.get().expect("type checked upstream");
}
"buffer-size" => {
settings.buffer_size = value.get().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
@ -923,8 +770,6 @@ impl ObjectImpl for UdpSrc {
"context" => settings.context.to_value(),
"context-wait" => (settings.context_wait.as_millis() as u32).to_value(),
"retrieve-sender-address" => settings.retrieve_sender_address.to_value(),
"loop" => settings.multicast_loop.to_value(),
"buffer-size" => settings.buffer_size.to_value(),
_ => unimplemented!(),
}
}
@ -975,7 +820,7 @@ impl ElementImpl for UdpSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {
@ -1013,31 +858,4 @@ impl ElementImpl for UdpSrc {
Ok(success)
}
fn send_event(&self, event: gst::Event) -> bool {
use gst::EventView;
gst::debug!(CAT, imp = self, "Handling element level event {event:?}");
match event.view() {
EventView::Eos(_) => {
if self.state() != TaskState::Started {
if let Err(err) = self.start() {
gst::error!(CAT, imp = self, "Failed to start task thread {err:?}");
}
}
if self.state() == TaskState::Started {
let mut state = self.state.lock().unwrap();
if let Some(event_tx) = state.event_sender.as_mut() {
return event_tx.try_send(event.clone()).is_ok();
}
}
false
}
_ => self.parent_send_event(event),
}
}
}

View file

@ -88,7 +88,7 @@ mod imp_src {
type ElementImpl = ElementSrcTest;
fn src_event(self, pad: &gst::Pad, imp: &ElementSrcTest, event: gst::Event) -> bool {
gst::log!(SRC_CAT, obj = pad, "Handling {:?}", event);
gst::log!(SRC_CAT, obj: pad, "Handling {:?}", event);
let ret = match event.view() {
EventView::FlushStart(..) => {
@ -100,9 +100,9 @@ mod imp_src {
};
if ret {
gst::log!(SRC_CAT, obj = pad, "Handled {:?}", event);
gst::log!(SRC_CAT, obj: pad, "Handled {:?}", event);
} else {
gst::log!(SRC_CAT, obj = pad, "Didn't handle {:?}", event);
gst::log!(SRC_CAT, obj: pad, "Didn't handle {:?}", event);
}
ret
@ -127,7 +127,7 @@ mod imp_src {
while let Ok(Some(_item)) = self.receiver.try_next() {}
}
async fn push_item(&self, item: Item) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::debug!(SRC_CAT, obj = self.element, "Handling {:?}", item);
gst::debug!(SRC_CAT, obj: self.element, "Handling {:?}", item);
let elementsrctest = self.element.imp();
match item {
@ -148,7 +148,7 @@ mod imp_src {
fn try_next(&mut self) -> BoxFuture<'_, Result<Item, gst::FlowError>> {
async move {
self.receiver.next().await.ok_or_else(|| {
gst::log!(SRC_CAT, obj = self.element, "SrcPad channel aborted");
gst::log!(SRC_CAT, obj: self.element, "SrcPad channel aborted");
gst::FlowError::Eos
})
}
@ -159,9 +159,9 @@ mod imp_src {
async move {
let res = self.push_item(item).await.map(drop);
match res {
Ok(_) => gst::log!(SRC_CAT, obj = self.element, "Successfully pushed item"),
Ok(_) => gst::log!(SRC_CAT, obj: self.element, "Successfully pushed item"),
Err(gst::FlowError::Flushing) => {
gst::debug!(SRC_CAT, obj = self.element, "Flushing")
gst::debug!(SRC_CAT, obj: self.element, "Flushing")
}
Err(err) => panic!("Got error {err}"),
}
@ -173,9 +173,9 @@ mod imp_src {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(SRC_CAT, obj = self.element, "Stopping task");
gst::log!(SRC_CAT, obj: self.element, "Stopping task");
self.flush();
gst::log!(SRC_CAT, obj = self.element, "Task stopped");
gst::log!(SRC_CAT, obj: self.element, "Task stopped");
Ok(())
}
.boxed()
@ -183,9 +183,9 @@ mod imp_src {
fn flush_start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(SRC_CAT, obj = self.element, "Starting task flush");
gst::log!(SRC_CAT, obj: self.element, "Starting task flush");
self.flush();
gst::log!(SRC_CAT, obj = self.element, "Task flush started");
gst::log!(SRC_CAT, obj: self.element, "Task flush started");
Ok(())
}
.boxed()
@ -219,7 +219,7 @@ mod imp_src {
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SRC_CAT, imp = self, "Preparing");
gst::debug!(SRC_CAT, imp: self, "Preparing");
let settings = self.settings.lock().unwrap().clone();
let context =
@ -240,36 +240,36 @@ mod imp_src {
)
.block_on()?;
gst::debug!(SRC_CAT, imp = self, "Prepared");
gst::debug!(SRC_CAT, imp: self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(SRC_CAT, imp = self, "Unpreparing");
gst::debug!(SRC_CAT, imp: self, "Unpreparing");
*self.sender.lock().unwrap() = None;
self.task.unprepare().block_on().unwrap();
gst::debug!(SRC_CAT, imp = self, "Unprepared");
gst::debug!(SRC_CAT, imp: self, "Unprepared");
}
fn stop(&self) {
gst::debug!(SRC_CAT, imp = self, "Stopping");
gst::debug!(SRC_CAT, imp: self, "Stopping");
self.task.stop().await_maybe_on_context().unwrap();
gst::debug!(SRC_CAT, imp = self, "Stopped");
gst::debug!(SRC_CAT, imp: self, "Stopped");
}
fn start(&self) {
gst::debug!(SRC_CAT, imp = self, "Starting");
gst::debug!(SRC_CAT, imp: self, "Starting");
self.task.start().await_maybe_on_context().unwrap();
gst::debug!(SRC_CAT, imp = self, "Started");
gst::debug!(SRC_CAT, imp: self, "Started");
}
fn pause(&self) {
gst::debug!(SRC_CAT, imp = self, "Pausing");
gst::debug!(SRC_CAT, imp: self, "Pausing");
self.task.pause().block_on().unwrap();
gst::debug!(SRC_CAT, imp = self, "Paused");
gst::debug!(SRC_CAT, imp: self, "Paused");
}
}
@ -366,7 +366,7 @@ mod imp_src {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::log!(SRC_CAT, imp = self, "Changing state {:?}", transition);
gst::log!(SRC_CAT, imp: self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {
@ -464,7 +464,7 @@ mod imp_sink {
}
fn sink_event(self, pad: &gst::Pad, imp: &ElementSinkTest, event: gst::Event) -> bool {
gst::debug!(SINK_CAT, obj = pad, "Handling non-serialized {:?}", event);
gst::debug!(SINK_CAT, obj: pad, "Handling non-serialized {:?}", event);
match event.view() {
EventView::FlushStart(..) => {
@ -482,7 +482,7 @@ mod imp_sink {
event: gst::Event,
) -> BoxFuture<'static, bool> {
async move {
gst::log!(SINK_CAT, obj = pad, "Handling serialized {:?}", event);
gst::log!(SINK_CAT, obj: pad, "Handling serialized {:?}", event);
let imp = elem.imp();
if let EventView::FlushStop(..) = event.view() {
@ -505,7 +505,7 @@ mod imp_sink {
impl ElementSinkTest {
async fn forward_item(&self, item: Item) -> Result<gst::FlowSuccess, gst::FlowError> {
if !self.flushing.load(Ordering::SeqCst) {
gst::debug!(SINK_CAT, imp = self, "Forwarding {:?}", item);
gst::debug!(SINK_CAT, imp: self, "Forwarding {:?}", item);
let mut sender = self
.sender
.lock()
@ -521,7 +521,7 @@ mod imp_sink {
} else {
gst::debug!(
SINK_CAT,
imp = self,
imp: self,
"Not forwarding {:?} due to flushing",
item
);
@ -530,31 +530,31 @@ mod imp_sink {
}
fn start(&self) {
gst::debug!(SINK_CAT, imp = self, "Starting");
gst::debug!(SINK_CAT, imp: self, "Starting");
self.flushing.store(false, Ordering::SeqCst);
gst::debug!(SINK_CAT, imp = self, "Started");
gst::debug!(SINK_CAT, imp: self, "Started");
}
fn stop(&self) {
gst::debug!(SINK_CAT, imp = self, "Stopping");
gst::debug!(SINK_CAT, imp: self, "Stopping");
self.flushing.store(true, Ordering::SeqCst);
gst::debug!(SINK_CAT, imp = self, "Stopped");
gst::debug!(SINK_CAT, imp: self, "Stopped");
}
pub fn push_flush_start(&self) {
gst::debug!(SINK_CAT, imp = self, "Pushing FlushStart");
gst::debug!(SINK_CAT, imp: self, "Pushing FlushStart");
self.sink_pad
.gst_pad()
.push_event(gst::event::FlushStart::new());
gst::debug!(SINK_CAT, imp = self, "FlushStart pushed");
gst::debug!(SINK_CAT, imp: self, "FlushStart pushed");
}
pub fn push_flush_stop(&self) {
gst::debug!(SINK_CAT, imp = self, "Pushing FlushStop");
gst::debug!(SINK_CAT, imp: self, "Pushing FlushStop");
self.sink_pad
.gst_pad()
.push_event(gst::event::FlushStop::new(true));
gst::debug!(SINK_CAT, imp = self, "FlushStop pushed");
gst::debug!(SINK_CAT, imp: self, "FlushStop pushed");
}
}
@ -657,7 +657,7 @@ mod imp_sink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::log!(SINK_CAT, imp = self, "Changing state {:?}", transition);
gst::log!(SINK_CAT, imp: self, "Changing state {:?}", transition);
if let gst::StateChange::PausedToReady = transition {
self.stop();

View file

@ -218,7 +218,7 @@ fn multiple_contexts_proxy() {
.name(format!("proxysrc-{pipeline_index}").as_str())
.property(
"context",
format!("context-{}", (pipeline_index as u32) % CONTEXT_NB),
&format!("context-{}", (pipeline_index as u32) % CONTEXT_NB),
)
.property("proxy-context", format!("proxy-{pipeline_index}"))
.build()
@ -364,7 +364,7 @@ fn eos() {
sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |appsink| {
gst::debug!(CAT, obj = appsink, "eos: pulling sample");
gst::debug!(CAT, obj: appsink, "eos: pulling sample");
let _ = appsink.pull_sample().unwrap();
sample_notifier.send(()).unwrap();
@ -376,7 +376,7 @@ fn eos() {
);
fn push_buffer(src: &gst::Element) -> bool {
gst::debug!(CAT, obj = src, "eos: pushing buffer");
gst::debug!(CAT, obj: src, "eos: pushing buffer");
src.emit_by_name::<bool>("push-buffer", &[&gst::Buffer::from_slice(vec![0; 1024])])
}
@ -498,7 +498,7 @@ fn premature_shutdown() {
sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |appsink| {
gst::debug!(CAT, obj = appsink, "premature_shutdown: pulling sample");
gst::debug!(CAT, obj: appsink, "premature_shutdown: pulling sample");
let _sample = appsink.pull_sample().unwrap();
appsink_sender.send(()).unwrap();
@ -511,7 +511,7 @@ fn premature_shutdown() {
fn push_buffer(src: &gst::Element, intent: &str) -> bool {
gst::debug!(
CAT,
obj = src,
obj: src,
"premature_shutdown: pushing buffer {}",
intent
);
@ -609,8 +609,6 @@ fn premature_shutdown() {
}
#[test]
// FIXME: racy: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/250
#[ignore]
fn socket_play_null_play() {
use gio::{
prelude::SocketExt, InetAddress, InetSocketAddress, SocketFamily, SocketProtocol,

View file

@ -76,8 +76,6 @@ fn test_client_management() {
}
#[test]
// FIXME: racy: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/250
#[ignore]
fn test_chain() {
init();

View file

@ -1,7 +1,7 @@
project('gst-plugins-rs',
'rust',
'c',
version: '0.14.0-alpha.1',
version: '0.12.1',
meson_version : '>= 1.1')
# dependencies.py needs a toml parsing module
@ -86,7 +86,7 @@ if get_option('tests').allowed()
deps += [['gstreamer-check-1.0', 'gstreamer', 'gst_check_dep', 'gst_check']]
endif
if get_option('gtk4').allowed()
deps += [['gstreamer-gl-1.0', 'gst-plugins-base', 'gstgl_dep', 'gstgl', get_option('gtk4')]]
deps += [['gstreamer-gl-1.0', 'gst-plugins-base', 'gst_gl_dep', 'gstgl']]
endif
if get_option('threadshare').allowed() or get_option('rtsp').allowed()
deps += [['gstreamer-net-1.0', 'gstreamer', 'gst_net_dep', 'gst_net']]
@ -97,7 +97,7 @@ deps_cache += {'glib-2.0': glib_dep}
foreach d: deps
dep = dependency(d[0], version: gst_req,
fallback : [d[1], d[2]], required: d.get(4, true))
fallback : [d[1], d[2]])
set_variable(d[2], dep)
deps_cache += {d[0]: dep}
if dep.type_name() == 'internal'
@ -118,7 +118,6 @@ plugins = {
'spotify': {'library': 'libgstspotify'},
'file': {'library': 'libgstrsfile'},
'originalbuffer': {'library': 'libgstoriginalbuffer'},
# sodium can have an external dependency, see below
'threadshare': {
'library': 'libgstthreadshare',
@ -145,7 +144,6 @@ plugins = {
'library': 'libgstaws',
'extra-deps': {'openssl': ['>=1.1']},
},
'mpegtslive': {'library': 'libgstmpegtslive'},
'hlssink3': {'library': 'libgsthlssink3'},
'ndi': {'library': 'libgstndi'},
'onvif': {
@ -172,7 +170,6 @@ plugins = {
'library': 'libgsturiplaylistbin',
'examples': ['playlist'],
'features': ['clap'],
'gst-version': '>=1.23.90',
},
'cdg': {'library': 'libgstcdg'},
@ -186,7 +183,7 @@ plugins = {
},
'dav1d': {
'library': 'libgstdav1d',
'extra-deps': {'dav1d': ['>=1.3']},
'extra-deps': {'dav1d': ['>=1.0', '<1.3']},
},
'ffv1': {'library': 'libgstffv1'},
'flavors': {'library': 'libgstrsflv'},
@ -205,11 +202,34 @@ plugins = {
'library': 'libgstrsvideofx',
'extra-deps': {'cairo-gobject': []},
},
'gopbuffer': {'library': 'libgstgopbuffer'},
'quinn': {'library': 'libgstquinn'},
'speechmatics': {'library': 'libgstspeechmatics'},
}
if get_option('examples').allowed()
plugins += {
'fallbackswitch': {
'library': 'libgstfallbackswitch',
'examples': ['gtk-fallbackswitch'],
'features': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
'livesync': {
'library': 'libgstlivesync',
'examples': ['gtk-livesync'],
'features': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
'togglerecord': {
'library': 'libgsttogglerecord',
'examples': ['gtk-recording'],
'features': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
}
else
plugins += {
'fallbackswitch': { 'library': 'libgstfallbackswitch'},
'livesync': { 'library': 'libgstlivesync'},
'togglerecord': { 'library': 'libgsttogglerecord'},
}
endif
# Won't build on platforms where it bundles the sources because of:
# https://github.com/qnighy/libwebp-sys2-rs/issues/12
# the fix is:
@ -264,8 +284,8 @@ endif
if get_option('gtk4').allowed()
gtk4_features = []
gl_winsys = gstgl_dep.get_variable('gl_winsys').split()
gl_platforms = gstgl_dep.get_variable('gl_platforms').split()
gl_winsys = gst_gl_dep.get_variable('gl_winsys').split()
gl_platforms = gst_gl_dep.get_variable('gl_platforms').split()
if 'wayland' in gl_winsys
gtk4_features += 'wayland'
endif
@ -281,61 +301,13 @@ if get_option('gtk4').allowed()
gtk4_features += 'winegl'
endif
endif
gst_allocators_dep = dependency('gstreamer-allocators-1.0', version: '>=1.24', required: false)
gtk_dep = dependency('gtk4', version: '>=4.6', required: get_option('gtk4'))
if gtk_dep.found()
if host_system == 'linux' and gtk_dep.version().version_compare('>=4.14') and \
gst_allocators_dep.found() and 'wayland' in gtk4_features
gtk4_features += 'dmabuf'
endif
if gtk_dep.version().version_compare('>=4.14')
gtk4_features += 'gtk_v4_14'
elif gtk_dep.version().version_compare('>=4.12')
gtk4_features += 'gtk_v4_12'
elif gtk_dep.version().version_compare('>=4.10')
gtk4_features += 'gtk_v4_10'
endif
plugins += {
'gtk4': {
'library': 'libgstgtk4',
'examples': ['gtksink'],
'extra-deps': {'gtk4': ['>=4.6']},
'features': gtk4_features,
},
}
endif
endif
examples_opt = get_option('examples')
if examples_opt.allowed() and 'gtk4' in plugins
plugins += {
'fallbackswitch': {
'library': 'libgstfallbackswitch',
'examples_features': {
'gtk-fallbackswitch': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
'gtk4': {
'library': 'libgstgtk4',
'examples': ['gtksink'],
'extra-deps': {'gtk4': ['>=4.6']},
'features': gtk4_features,
},
'livesync': {
'library': 'libgstlivesync',
'examples_features': {
'gtk-livesync': ['gtk', 'gio', 'gst-plugin-gtk4'],
}
},
'togglerecord': {
'library': 'libgsttogglerecord',
'examples_features': {
'gtk-recording': ['gtk', 'gio', 'gst-plugin-gtk4'],
}
},
}
else
plugins += {
'fallbackswitch': { 'library': 'libgstfallbackswitch'},
'livesync': { 'library': 'libgstlivesync'},
'togglerecord': { 'library': 'libgsttogglerecord'},
}
endif
@ -401,107 +373,51 @@ endif
foreach plugin_name, details: plugins
plugin_opt = get_variable(f'@plugin_name@_option', get_option(plugin_name))
if not plugin_opt.allowed()
debug(f'@plugin_name@ is disabled')
continue
endif
plugin_deps_found = true
# Check whether we have all needed deps
foreach dep_name, dep_ver: details.get('extra-deps', {})
if dep_ver.length() != 0
dep = dependency(dep_name, version: dep_ver, required: plugin_opt)
else
dep = dependency(dep_name, required: plugin_opt)
endif
deps_cache += {dep_name: dep}
if not dep.found()
if plugin_opt.allowed()
plugin_deps_found = true
foreach dep_name, dep_ver: details.get('extra-deps', {})
if dep_ver.length() != 0
dep_ver_msg = ' '.join(dep_ver)
debug(f'@plugin_name@ dependency @dep_name@ @dep_ver_msg@ not found, skipping')
dep = dependency(dep_name, version: dep_ver, required: plugin_opt)
else
debug(f'@plugin_name@ dependency @dep_name@ not found, skipping')
dep = dependency(dep_name, required: plugin_opt)
endif
plugin_deps_found = false
break
endif
endforeach
if not plugin_deps_found
continue
endif
# Validate gst-plugin features
plugin_features = details.get('features', [])
foreach feature: plugin_features
if feature.startswith('gst-plugin') and not packages.contains(feature)
msg = f'@plugin_name@ required feature @feature@ not found'
if plugin_opt.enabled()
error(msg)
endif
message(msg + ', skipping')
plugin_deps_found = false
break
endif
endforeach
if not plugin_deps_found
continue
endif
# Check if we have the required GStreamer version
if details.has_key('gst-version') and not \
deps_cache['gstreamer-1.0'].version().version_compare(details['gst-version'])
msg = '@0@ requires gstreamer version @1@'.format(plugin_name, details['gst-version'])
if plugin_opt.enabled()
error(msg)
endif
message(msg + ', skipping')
continue
endif
# Parse and enable examples
plugin_examples = details.get('examples', [])
foreach example: plugin_examples
examples += example
endforeach
plugin_examples_features = details.get('examples_features', {})
foreach example, examples_features: plugin_examples_features
example_deps_found = true
foreach feature: examples_features
if feature.startswith('gst-plugin') and not packages.contains(feature)
msg = f'@plugin_name@ example @example@ required feature @feature@ not found'
if plugin_opt.enabled() and examples_opt.enabled()
error(msg)
endif
message(msg + ', skipping')
example_deps_found = false
deps_cache += {dep_name: dep}
if not dep.found()
plugin_deps_found = false
break
endif
endforeach
features += examples_features
if example_deps_found
examples += example
plugin_features = details.get('features', [])
if plugin_deps_found
# Validate gst-plugin features
foreach feature: plugin_features
if feature.startswith('gst-plugin') and not packages.contains(feature)
plugin_deps_found = false
break
endif
endforeach
endif
endforeach
if plugin_deps_found
packages += f'gst-plugin-@plugin_name@'
features += plugin_features
extra_features = run_command('dependencies.py', meson.current_source_dir(), plugin_name,
'--feature', '--gst-version', gst_dep.version(), capture: true, check: true).stdout().strip()
if extra_features != ''
features += extra_features.split(',')
endif
packages += f'gst-plugin-@plugin_name@'
features += plugin_features
extra_features = run_command('dependencies.py', meson.current_source_dir(), plugin_name,
'--feature', '--gst-version', gst_dep.version(), capture: true, check: true).stdout().strip()
if extra_features != ''
features += extra_features.split(',')
endif
lib = details.get('library')
# No 'lib' suffix with MSVC
if cc.get_argument_syntax() == 'msvc'
lib = lib.substring(3)
endif
if default_library in ['shared', 'both']
output += [lib + '.' + ext_dynamic]
endif
if default_library in ['static', 'both']
output += [lib + '.' + ext_static]
lib = details.get('library')
# No 'lib' suffix with MSVC
if cc.get_argument_syntax() == 'msvc'
lib = lib.substring(3)
endif
if default_library in ['shared', 'both']
output += [lib + '.' + ext_dynamic]
endif
if default_library in ['static', 'both']
output += [lib + '.' + ext_static]
endif
endif
endif
endforeach
@ -573,16 +489,6 @@ foreach plugin : plugins
plugin_name = plugin_name.substring(3)
endif
plugin_display_name = plugin_name
if plugin_name.startswith('gst')
plugin_display_name = plugin_name.substring(3)
endif
if plugin_display_name in plugin_names
# When default_library=both plugins are duplicated.
continue
endif
plugin_names += plugin_display_name
option_name = plugin_name.substring(3)
if option_name.startswith('rs')
option_name = option_name.substring(2)
@ -627,7 +533,13 @@ foreach plugin : plugins
warning('Static plugin @0@ is known to fail. It will not be included in libgstreamer-full.'.format(plugin_name))
else
gst_plugins += dep
pc_files += [plugin_name + '.pc']
if plugin_name.startswith('gst')
plugin_names += [plugin_name.substring(3)]
else
plugin_names += [plugin_name]
endif
endif
endforeach

View file

@ -6,12 +6,9 @@ option('claxon', type: 'feature', value: 'auto', description: 'Build claxon plug
option('csound', type: 'feature', value: 'auto', description: 'Build csound plugin')
option('lewton', type: 'feature', value: 'auto', description: 'Build lewton plugin')
option('spotify', type: 'feature', value: 'auto', description: 'Build spotify plugin')
option('speechmatics', type: 'feature', value: 'auto', description: 'Build speechmatics plugin')
# generic
option('file', type: 'feature', value: 'auto', description: 'Build file plugin')
option('originalbuffer', type: 'feature', value: 'auto', description: 'Build originalbuffer plugin')
option('gopbuffer', type: 'feature', value: 'auto', description: 'Build gopbuffer plugin')
option('sodium', type: 'feature', value: 'auto', description: 'Build sodium plugin')
option('sodium-source', type: 'combo',
choices: ['system', 'built-in'], value: 'built-in',
@ -27,7 +24,6 @@ option('mp4', type: 'feature', value: 'auto', description: 'Build mp4 plugin')
# net
option('aws', type: 'feature', value: 'auto', description: 'Build aws plugin')
option('hlssink3', type: 'feature', value: 'auto', description: 'Build hlssink3 plugin')
option('mpegtslive', type: 'feature', value: 'auto', description: 'Build mpegtslive plugin')
option('ndi', type: 'feature', value: 'auto', description: 'Build ndi plugin')
option('onvif', type: 'feature', value: 'auto', description: 'Build onvif plugin')
option('raptorq', type: 'feature', value: 'auto', description: 'Build raptorq plugin')
@ -36,7 +32,6 @@ option('rtsp', type: 'feature', value: 'auto', description: 'Build rtsp plugin')
option('rtp', type: 'feature', value: 'auto', description: 'Build rtp plugin')
option('webrtc', type: 'feature', value: 'auto', yield: true, description: 'Build webrtc plugin')
option('webrtchttp', type: 'feature', value: 'auto', description: 'Build webrtchttp plugin')
option('quinn', type: 'feature', value: 'auto', description: 'Build quinn plugin')
# text
option('textahead', type: 'feature', value: 'auto', description: 'Build textahead plugin')

View file

@ -11,7 +11,6 @@
pub use byteorder::{BigEndian, LittleEndian, ReadBytesExt, WriteBytesExt};
use std::io;
#[allow(unused)]
pub trait ReadBytesExtShort: io::Read {
fn read_u16le(&mut self) -> io::Result<u16> {
self.read_u16::<LittleEndian>()
@ -77,7 +76,6 @@ pub trait ReadBytesExtShort: io::Read {
impl<T> ReadBytesExtShort for T where T: ReadBytesExt {}
#[allow(unused)]
pub trait WriteBytesExtShort: WriteBytesExt {
fn write_u16le(&mut self, n: u16) -> io::Result<()> {
self.write_u16::<LittleEndian>(n)

View file

@ -311,10 +311,10 @@ impl FlvDemux {
// gst::SchedulingFlags::SEEKABLE,
// )
// {
// gst::debug!(CAT, obj = pad, "Activating in Pull mode");
// gst::debug!(CAT, obj: pad, "Activating in Pull mode");
// gst::PadMode::Pull
// } else {
gst::debug!(CAT, obj = pad, "Activating in Push mode");
gst::debug!(CAT, obj: pad, "Activating in Push mode");
gst::PadMode::Push
// }
};
@ -366,7 +366,7 @@ impl FlvDemux {
fn sink_event(&self, pad: &gst::Pad, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj = pad, "Handling event {:?}", event);
gst::log!(CAT, obj: pad, "Handling event {:?}", event);
match event.view() {
EventView::Eos(..) => {
// TODO implement
@ -453,7 +453,7 @@ impl FlvDemux {
pad: &gst::Pad,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::log!(CAT, obj = pad, "Handling buffer {:?}", buffer);
gst::log!(CAT, obj: pad, "Handling buffer {:?}", buffer);
let mut adapter = self.adapter.lock().unwrap();
adapter.push(buffer);
@ -466,7 +466,7 @@ impl FlvDemux {
let header = match self.find_header(&mut adapter) {
Ok(header) => header,
Err(_) => {
gst::trace!(CAT, imp = self, "Need more data");
gst::trace!(CAT, imp: self, "Need more data");
return Ok(gst::FlowSuccess::Ok);
}
};
@ -495,7 +495,7 @@ impl FlvDemux {
} => {
let avail = adapter.available();
if avail == 0 {
gst::trace!(CAT, imp = self, "Need more data");
gst::trace!(CAT, imp: self, "Need more data");
return Ok(gst::FlowSuccess::Ok);
}
let skip = cmp::min(avail, *skip_left as usize);
@ -507,7 +507,7 @@ impl FlvDemux {
match res {
Ok(None) => {
gst::trace!(CAT, imp = self, "Need more data");
gst::trace!(CAT, imp: self, "Need more data");
return Ok(gst::FlowSuccess::Ok);
}
Ok(Some(events)) => {
@ -534,7 +534,7 @@ impl FlvDemux {
let data = adapter.map(9).unwrap();
if let Ok((_, header)) = flavors::header(&data) {
gst::debug!(CAT, imp = self, "Found FLV header: {:?}", header);
gst::debug!(CAT, imp: self, "Found FLV header: {:?}", header);
drop(data);
adapter.flush(9);
@ -597,7 +597,7 @@ impl FlvDemux {
let res = pad.push(buffer);
gst::trace!(
CAT,
imp = self,
imp: self,
"Pushing buffer for stream {:?} returned {:?}",
stream,
res
@ -687,7 +687,7 @@ impl StreamingState {
match be_u32::<_, (_, nom::error::ErrorKind)>(&data[0..4]) {
Err(_) => unreachable!(),
Ok((_, previous_size)) => {
gst::trace!(CAT, imp = imp, "Previous tag size {}", previous_size);
gst::trace!(CAT, imp: imp, "Previous tag size {}", previous_size);
// Nothing to do here, we just consume it for now
}
}
@ -703,7 +703,7 @@ impl StreamingState {
Ok((_, tag_header)) => tag_header,
};
gst::trace!(CAT, imp = imp, "Parsed tag header {:?}", tag_header);
gst::trace!(CAT, imp: imp, "Parsed tag header {:?}", tag_header);
drop(data);
@ -715,17 +715,17 @@ impl StreamingState {
match tag_header.tag_type {
flavors::TagType::Script => {
gst::trace!(CAT, imp = imp, "Found script tag");
gst::trace!(CAT, imp: imp, "Found script tag");
Ok(self.handle_script_tag(imp, &tag_header, adapter))
}
flavors::TagType::Audio => {
gst::trace!(CAT, imp = imp, "Found audio tag");
gst::trace!(CAT, imp: imp, "Found audio tag");
self.handle_audio_tag(imp, &tag_header, adapter)
}
flavors::TagType::Video => {
gst::trace!(CAT, imp = imp, "Found video tag");
gst::trace!(CAT, imp: imp, "Found video tag");
self.handle_video_tag(imp, &tag_header, adapter)
}
@ -747,10 +747,10 @@ impl StreamingState {
match flavors::script_data(&data) {
Ok((_, ref script_data)) if script_data.name == "onMetaData" => {
gst::trace!(CAT, imp = imp, "Got script tag: {:?}", script_data);
gst::trace!(CAT, imp: imp, "Got script tag: {:?}", script_data);
let metadata = Metadata::new(script_data);
gst::debug!(CAT, imp = imp, "Got metadata: {:?}", metadata);
gst::debug!(CAT, imp: imp, "Got metadata: {:?}", metadata);
let audio_changed = self
.audio
@ -778,10 +778,10 @@ impl StreamingState {
}
}
Ok((_, ref script_data)) => {
gst::trace!(CAT, imp = imp, "Got script tag: {:?}", script_data);
gst::trace!(CAT, imp: imp, "Got script tag: {:?}", script_data);
}
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
gst::error!(CAT, imp = imp, "Error parsing script tag: {:?}", err);
gst::error!(CAT, imp: imp, "Error parsing script tag: {:?}", err);
}
Err(nom::Err::Incomplete(_)) => {
// ignore
@ -801,7 +801,7 @@ impl StreamingState {
) -> SmallVec<[Event; 4]> {
let mut events = SmallVec::new();
gst::trace!(CAT, imp = imp, "Got audio data header: {:?}", data_header);
gst::trace!(CAT, imp: imp, "Got audio data header: {:?}", data_header);
let new_audio_format =
AudioFormat::new(data_header, &self.metadata, &self.aac_sequence_header);
@ -809,7 +809,7 @@ impl StreamingState {
if self.audio.as_ref() != Some(&new_audio_format) {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Got new audio format: {:?}",
new_audio_format
);
@ -827,7 +827,7 @@ impl StreamingState {
&& self.audio.is_some()
&& !self.got_all_streams
{
gst::debug!(CAT, imp = imp, "Have all expected streams now");
gst::debug!(CAT, imp: imp, "Have all expected streams now");
self.got_all_streams = true;
events.push(Event::HaveAllStreams);
}
@ -846,7 +846,7 @@ impl StreamingState {
adapter.flush((tag_header.data_size - 1) as usize);
gst::warning!(
CAT,
imp = imp,
imp: imp,
"Too small packet for AAC packet header {}",
tag_header.data_size
);
@ -857,14 +857,14 @@ impl StreamingState {
match flavors::aac_audio_packet_header(&data) {
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
gst::error!(CAT, imp = imp, "Invalid AAC audio packet header: {:?}", err);
gst::error!(CAT, imp: imp, "Invalid AAC audio packet header: {:?}", err);
drop(data);
adapter.flush((tag_header.data_size - 1) as usize);
Ok(true)
}
Err(nom::Err::Incomplete(_)) => unreachable!(),
Ok((_, header)) => {
gst::trace!(CAT, imp = imp, "Got AAC packet header {:?}", header);
gst::trace!(CAT, imp: imp, "Got AAC packet header {:?}", header);
match header.packet_type {
flavors::AACPacketType::SequenceHeader => {
drop(data);
@ -872,7 +872,7 @@ impl StreamingState {
let buffer = adapter
.take_buffer((tag_header.data_size - 1 - 1) as usize)
.unwrap();
gst::debug!(CAT, imp = imp, "Got AAC sequence header {:?}", buffer,);
gst::debug!(CAT, imp: imp, "Got AAC sequence header {:?}", buffer,);
self.aac_sequence_header = Some(buffer);
Ok(true)
@ -898,7 +898,7 @@ impl StreamingState {
let data = adapter.map(1).unwrap();
let data_header = match flavors::audio_data_header(&data) {
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
gst::error!(CAT, imp = imp, "Invalid audio data header: {:?}", err);
gst::error!(CAT, imp: imp, "Invalid audio data header: {:?}", err);
drop(data);
adapter.flush(tag_header.data_size as usize);
return Ok(SmallVec::new());
@ -943,7 +943,7 @@ impl StreamingState {
gst::trace!(
CAT,
imp = imp,
imp: imp,
"Outputting audio buffer {:?} for tag {:?}",
buffer,
tag_header,
@ -963,7 +963,7 @@ impl StreamingState {
) -> SmallVec<[Event; 4]> {
let mut events = SmallVec::new();
gst::trace!(CAT, imp = imp, "Got video data header: {:?}", data_header);
gst::trace!(CAT, imp: imp, "Got video data header: {:?}", data_header);
let new_video_format =
VideoFormat::new(data_header, &self.metadata, &self.avc_sequence_header);
@ -971,7 +971,7 @@ impl StreamingState {
if self.video.as_ref() != Some(&new_video_format) {
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Got new video format: {:?}",
new_video_format
);
@ -989,7 +989,7 @@ impl StreamingState {
&& self.video.is_some()
&& !self.got_all_streams
{
gst::debug!(CAT, imp = imp, "Have all expected streams now");
gst::debug!(CAT, imp: imp, "Have all expected streams now");
self.got_all_streams = true;
events.push(Event::HaveAllStreams);
}
@ -1008,7 +1008,7 @@ impl StreamingState {
adapter.flush((tag_header.data_size - 1) as usize);
gst::warning!(
CAT,
imp = imp,
imp: imp,
"Too small packet for AVC packet header {}",
tag_header.data_size
);
@ -1018,14 +1018,14 @@ impl StreamingState {
let data = adapter.map(4).unwrap();
match flavors::avc_video_packet_header(&data) {
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
gst::error!(CAT, imp = imp, "Invalid AVC video packet header: {:?}", err);
gst::error!(CAT, imp: imp, "Invalid AVC video packet header: {:?}", err);
drop(data);
adapter.flush((tag_header.data_size - 1) as usize);
Ok(None)
}
Err(nom::Err::Incomplete(_)) => unreachable!(),
Ok((_, header)) => {
gst::trace!(CAT, imp = imp, "Got AVC packet header {:?}", header);
gst::trace!(CAT, imp: imp, "Got AVC packet header {:?}", header);
match header.packet_type {
flavors::AVCPacketType::SequenceHeader => {
drop(data);
@ -1035,7 +1035,7 @@ impl StreamingState {
.unwrap();
gst::debug!(
CAT,
imp = imp,
imp: imp,
"Got AVC sequence header {:?} of size {}",
buffer,
tag_header.data_size - 1 - 4
@ -1071,7 +1071,7 @@ impl StreamingState {
let data = adapter.map(1).unwrap();
let data_header = match flavors::video_data_header(&data) {
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
gst::error!(CAT, imp = imp, "Invalid video data header: {:?}", err);
gst::error!(CAT, imp: imp, "Invalid video data header: {:?}", err);
drop(data);
adapter.flush(tag_header.data_size as usize);
return Ok(SmallVec::new());
@ -1147,7 +1147,7 @@ impl StreamingState {
gst::trace!(
CAT,
imp = imp,
imp: imp,
"Outputting video buffer {:?} for tag {:?}, keyframe: {}",
buffer,
tag_header,

View file

@ -14,9 +14,8 @@ gst = { workspace = true, features = ["v1_18"] }
gst-base = { workspace = true, features = ["v1_18"] }
gst-audio = { workspace = true, features = ["v1_18"] }
gst-video = { workspace = true, features = ["v1_18"] }
gst-pbutils = { workspace = true, features = ["v1_20"] }
gst-pbutils = { workspace = true, features = ["v1_18"] }
once_cell.workspace = true
bitstream-io = "2.3"
[lib]
name = "gstfmp4"
@ -26,10 +25,9 @@ path = "src/lib.rs"
[dev-dependencies]
gst-app = { workspace = true, features = ["v1_18"] }
gst-check = { workspace = true, features = ["v1_18"] }
gst-pbutils = { workspace = true, features = ["v1_20"] }
m3u8-rs = "5.0"
chrono = "0.4.35"
dash-mpd = { version = "0.17", default-features = false }
chrono = "0.4"
dash-mpd = { version = "0.14", default-features = false }
quick-xml = { version = "0.31", features = ["serialize"] }
serde = "1"

View file

@ -86,7 +86,7 @@ fn main() -> Result<(), Error> {
drop(map);
// Remove the header from the buffer list
buffer_list.make_mut().remove(0..1);
buffer_list.make_mut().remove(0, 1);
// If the list is now empty then it only contained the media header and nothing
// else.
@ -179,18 +179,19 @@ fn main() -> Result<(), Error> {
// Write the whole segment timeline out here, compressing multiple segments with
// the same duration to a repeated segment.
let mut segments = vec![];
let mut write_segment = |start: gst::ClockTime, duration: u64, repeat: usize| {
let mut s = dash_mpd::S {
t: Some(start.mseconds()),
d: duration,
..Default::default()
};
if repeat > 0 {
s.r = Some(repeat as i64);
}
let mut write_segment =
|start: gst::ClockTime, duration: gst::ClockTime, repeat: usize| {
let mut s = dash_mpd::S {
t: Some(start.mseconds() as i64),
d: duration.mseconds() as i64,
..Default::default()
};
if repeat > 0 {
s.r = Some(repeat as i64);
}
segments.push(s);
};
segments.push(s);
};
let mut start = None;
let mut num_segments = 0;
@ -200,15 +201,15 @@ fn main() -> Result<(), Error> {
start = Some(segment.start_time);
}
if last_duration.is_none() {
last_duration = Some(segment.duration.mseconds());
last_duration = Some(segment.duration);
}
// If the duration of this segment is different from the previous one then we
// have to write out the segment now.
if last_duration != Some(segment.duration.mseconds()) {
if last_duration != Some(segment.duration) {
write_segment(start.unwrap(), last_duration.unwrap(), num_segments - 1);
start = Some(segment.start_time);
last_duration = Some(segment.duration.mseconds());
last_duration = Some(segment.duration);
num_segments = 1;
} else {
num_segments += 1;

View file

@ -153,7 +153,7 @@ fn trim_segments(state: &mut StreamState) {
// safe side
removal_time: segment
.date_time
.checked_add_signed(Duration::try_seconds(20).unwrap())
.checked_add_signed(Duration::seconds(20))
.unwrap(),
path: segment.path.clone(),
});
@ -267,7 +267,7 @@ fn setup_appsink(appsink: &gst_app::AppSink, name: &str, path: &Path, is_video:
drop(map);
// Remove the header from the buffer list
buffer_list.make_mut().remove(0..1);
buffer_list.make_mut().remove(0, 1);
// If the list is now empty then it only contained the media header and nothing
// else.
@ -422,6 +422,7 @@ impl VideoStream {
.build()?;
let mux = gst::ElementFactory::make("cmafmux")
.property("fragment-duration", 2500.mseconds())
.property_from_str("header-update-mode", "update")
.property("write-mehd", true)
.build()?;
let appsink = gst_app::AppSink::builder().buffer_list(true).build();

View file

@ -170,7 +170,7 @@ fn setup_appsink(appsink: &gst_app::AppSink, name: &str, path: &Path, is_video:
drop(map);
// Remove the header from the buffer list
buffer_list.make_mut().remove(0..1);
buffer_list.make_mut().remove(0, 1);
// If the list is now empty then it only contained the media header and nothing
// else.
@ -360,10 +360,6 @@ impl AudioStream {
.property("samplesperbuffer", 4410)
.property_from_str("wave", &self.wave)
.build()?;
let taginject = gst::ElementFactory::make("taginject")
.property_from_str("tags", &format!("language-code={}", self.lang))
.property_from_str("scope", "stream")
.build()?;
let raw_capsfilter = gst::ElementFactory::make("capsfilter")
.property(
"caps",
@ -378,23 +374,9 @@ impl AudioStream {
.build()?;
let appsink = gst_app::AppSink::builder().buffer_list(true).build();
pipeline.add_many([
&src,
&taginject,
&raw_capsfilter,
&enc,
&mux,
appsink.upcast_ref(),
])?;
pipeline.add_many([&src, &raw_capsfilter, &enc, &mux, appsink.upcast_ref()])?;
gst::Element::link_many([
&src,
&taginject,
&raw_capsfilter,
&enc,
&mux,
appsink.upcast_ref(),
])?;
gst::Element::link_many([&src, &raw_capsfilter, &enc, &mux, appsink.upcast_ref()])?;
probe_encoder(state, enc);
@ -434,7 +416,7 @@ fn main() -> Result<(), Error> {
},
AudioStream {
name: "audio_1".to_string(),
lang: "fra".to_string(),
lang: "fre".to_string(),
default: false,
wave: "white-noise".to_string(),
},

View file

@ -9,9 +9,8 @@
use gst::prelude::*;
use anyhow::{anyhow, bail, Context, Error};
use std::convert::TryFrom;
use super::{Buffer, ImageOrientation, IDENTITY_MATRIX};
use super::Buffer;
fn write_box<T, F: FnOnce(&mut Vec<u8>) -> Result<T, Error>>(
vec: &mut Vec<u8>,
@ -161,13 +160,6 @@ fn cmaf_brands_from_caps(caps: &gst::CapsRef, compatible_brands: &mut Vec<&'stat
"audio/mpeg" => {
compatible_brands.push(b"caac");
}
"audio/x-opus" => {
compatible_brands.push(b"opus");
}
"video/x-av1" => {
compatible_brands.push(b"av01");
compatible_brands.push(b"cmf2");
}
"video/x-h265" => {
let width = s.get::<i32>("width").ok();
let height = s.get::<i32>("height").ok();
@ -585,7 +577,7 @@ fn write_trak(
fn write_tkhd(
v: &mut Vec<u8>,
cfg: &super::HeaderConfiguration,
_cfg: &super::HeaderConfiguration,
idx: usize,
stream: &super::HeaderStream,
creation_time: u64,
@ -612,8 +604,9 @@ fn write_tkhd(
// Volume
let s = stream.caps.structure(0).unwrap();
match s.name().as_str() {
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => v.extend((1u16 << 8).to_be_bytes()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
v.extend((1u16 << 8).to_be_bytes())
}
_ => v.extend(0u16.to_be_bytes()),
}
@ -621,15 +614,21 @@ fn write_tkhd(
v.extend([0u8; 2]);
// Matrix
let matrix = match s.name().as_str() {
x if x.starts_with("video/") || x.starts_with("image/") => cfg
.orientation
.unwrap_or(ImageOrientation::Rotate0)
.transform_matrix(),
_ => &IDENTITY_MATRIX,
};
v.extend(matrix.iter().flatten());
v.extend(
[
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(16384u32 << 16).to_be_bytes(),
]
.into_iter()
.flatten(),
);
// Width/height
match s.name().as_str() {
@ -701,6 +700,7 @@ fn write_tref(
fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
let lang = lang.borrow();
// TODO: Need to relax this once we get the language code from tags
assert!(lang.iter().all(u8::is_ascii_lowercase));
(((lang[0] as u16 - 0x60) & 0x1F) << 10)
@ -710,7 +710,7 @@ fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
fn write_mdhd(
v: &mut Vec<u8>,
cfg: &super::HeaderConfiguration,
_cfg: &super::HeaderConfiguration,
stream: &super::HeaderStream,
creation_time: u64,
) -> Result<(), Error> {
@ -724,11 +724,8 @@ fn write_mdhd(
v.extend(0u64.to_be_bytes());
// Language as ISO-639-2/T
if let Some(lang) = cfg.language_code {
v.extend(language_code(lang).to_be_bytes());
} else {
v.extend(language_code(b"und").to_be_bytes());
}
// TODO: get actual language from the tags
v.extend(language_code(b"und").to_be_bytes());
// Pre-defined
v.extend([0u8; 2]);
@ -748,8 +745,9 @@ fn write_hdlr(
let (handler_type, name) = match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => (b"vide", b"VideoHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => (b"soun", b"SoundHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
(b"soun", b"SoundHandler\0".as_slice())
}
"application/x-onvif-metadata" => (b"meta", b"MetadataHandler\0".as_slice()),
_ => unreachable!(),
};
@ -779,8 +777,7 @@ fn write_minf(
// Flags are always 1 for unspecified reasons
write_full_box(v, b"vmhd", FULL_BOX_VERSION_0, 1, |v| write_vmhd(v, cfg))?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => {
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_full_box(v, b"smhd", FULL_BOX_VERSION_0, FULL_BOX_FLAGS_NONE, |v| {
write_smhd(v, cfg)
})?
@ -889,8 +886,9 @@ fn write_stsd(
match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => write_visual_sample_entry(v, cfg, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => write_audio_sample_entry(v, cfg, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_audio_sample_entry(v, cfg, stream)?
}
"application/x-onvif-metadata" => write_xml_meta_data_sample_entry(v, cfg, stream)?,
_ => unreachable!(),
}
@ -1100,9 +1098,9 @@ fn write_visual_sample_entry(
"professional" => 2,
_ => unreachable!(),
};
// TODO: Use `gst_codec_utils_av1_get_seq_level_idx` when exposed in bindings
let level = av1_seq_level_idx(s.get::<&str>("level").ok());
let tier = av1_tier(s.get::<&str>("tier").ok());
let level = 1; // FIXME
let tier = 0; // FIXME
let (high_bitdepth, twelve_bit) =
match s.get::<u32>("bit-depth-luma").unwrap() {
8 => (false, false),
@ -1147,10 +1145,6 @@ fn write_visual_sample_entry(
v.extend_from_slice(&codec_data);
}
if let Some(extra_data) = &stream.extra_header_data {
// configOBUs
v.extend_from_slice(extra_data.as_slice());
}
Ok(())
})?;
}
@ -1259,44 +1253,6 @@ fn write_visual_sample_entry(
Ok(())
}
fn av1_seq_level_idx(level: Option<&str>) -> u8 {
match level {
Some("2.0") => 0,
Some("2.1") => 1,
Some("2.2") => 2,
Some("2.3") => 3,
Some("3.0") => 4,
Some("3.1") => 5,
Some("3.2") => 6,
Some("3.3") => 7,
Some("4.0") => 8,
Some("4.1") => 9,
Some("4.2") => 10,
Some("4.3") => 11,
Some("5.0") => 12,
Some("5.1") => 13,
Some("5.2") => 14,
Some("5.3") => 15,
Some("6.0") => 16,
Some("6.1") => 17,
Some("6.2") => 18,
Some("6.3") => 19,
Some("7.0") => 20,
Some("7.1") => 21,
Some("7.2") => 22,
Some("7.3") => 23,
_ => 1,
}
}
fn av1_tier(tier: Option<&str>) -> u8 {
match tier {
Some("main") => 0,
Some("high") => 1,
_ => 0,
}
}
fn write_audio_sample_entry(
v: &mut Vec<u8>,
_cfg: &super::HeaderConfiguration,
@ -1306,7 +1262,6 @@ fn write_audio_sample_entry(
let fourcc = match s.name().as_str() {
"audio/mpeg" => b"mp4a",
"audio/x-opus" => b"Opus",
"audio/x-flac" => b"fLaC",
"audio/x-alaw" => b"alaw",
"audio/x-mulaw" => b"ulaw",
"audio/x-adpcm" => {
@ -1325,10 +1280,6 @@ fn write_audio_sample_entry(
let bitrate = s.get::<i32>("bitrate").context("no ADPCM bitrate field")?;
(bitrate / 8000) as u16
}
"audio/x-flac" => with_flac_metadata(&stream.caps, |streaminfo, _| {
1 + (u16::from_be_bytes([streaminfo[16], streaminfo[17]]) >> 4 & 0b11111)
})
.context("FLAC metadata error")?,
_ => 16u16,
};
@ -1371,9 +1322,6 @@ fn write_audio_sample_entry(
"audio/x-opus" => {
write_dops(v, &stream.caps)?;
}
"audio/x-flac" => {
write_dfla(v, &stream.caps)?;
}
"audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
// Nothing to do here
}
@ -1568,35 +1516,6 @@ fn write_dops(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
})
}
fn with_flac_metadata<R>(
caps: &gst::Caps,
cb: impl FnOnce(&[u8], &[gst::glib::SendValue]) -> R,
) -> Result<R, Error> {
let caps = caps.structure(0).unwrap();
let header = caps.get::<gst::ArrayRef>("streamheader").unwrap();
let (streaminfo, remainder) = header.as_ref().split_first().unwrap();
let streaminfo = streaminfo.get::<&gst::BufferRef>().unwrap();
let streaminfo = streaminfo.map_readable().unwrap();
// 13 bytes for the Ogg/FLAC prefix and 38 for the streaminfo itself.
match <&[_; 13 + 38]>::try_from(streaminfo.as_slice()) {
Ok(i) if i.starts_with(b"\x7FFLAC\x01\x00") => Ok(cb(&i[13..], remainder)),
Ok(_) | Err(_) => bail!("Unknown streamheader format"),
}
}
fn write_dfla(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
write_full_box(v, b"dfLa", 0, 0, move |v| {
with_flac_metadata(caps, |streaminfo, remainder| {
v.extend(streaminfo);
for metadata in remainder {
let metadata = metadata.get::<&gst::BufferRef>().unwrap();
let metadata = metadata.map_readable().unwrap();
v.extend(&metadata[..]);
}
})
})
}
fn write_xml_meta_data_sample_entry(
v: &mut Vec<u8>,
_cfg: &super::HeaderConfiguration,

File diff suppressed because it is too large Load diff

View file

@ -12,8 +12,6 @@ use gst::prelude::*;
mod boxes;
mod imp;
mod obu;
glib::wrapper! {
pub(crate) struct FMP4MuxPad(ObjectSubclass<imp::FMP4MuxPad>) @extends gst_base::AggregatorPad, gst::Pad, gst::Object;
}
@ -73,80 +71,6 @@ pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
Ok(())
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub(crate) enum ImageOrientation {
Rotate0,
Rotate90,
Rotate180,
Rotate270,
// TODO:
// FlipRotate0,
// FlipRotate90,
// FlipRotate180,
// FlipRotate270,
}
type TransformMatrix = [[u8; 4]; 9];
const IDENTITY_MATRIX: TransformMatrix = [
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_90_MATRIX: TransformMatrix = [
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_180_MATRIX: TransformMatrix = [
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_270_MATRIX: TransformMatrix = [
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
impl ImageOrientation {
pub(crate) fn transform_matrix(&self) -> &'static TransformMatrix {
match self {
ImageOrientation::Rotate0 => &IDENTITY_MATRIX,
ImageOrientation::Rotate90 => &ROTATE_90_MATRIX,
ImageOrientation::Rotate180 => &ROTATE_180_MATRIX,
ImageOrientation::Rotate270 => &ROTATE_270_MATRIX,
}
}
}
#[derive(Debug)]
pub(crate) struct HeaderConfiguration {
variant: Variant,
@ -161,8 +85,6 @@ pub(crate) struct HeaderConfiguration {
write_mehd: bool,
duration: Option<gst::ClockTime>,
language_code: Option<[u8; 3]>,
orientation: Option<ImageOrientation>,
/// Start UTC time in ONVIF mode.
/// Since Jan 1 1601 in 100ns units.
@ -179,9 +101,6 @@ pub(crate) struct HeaderStream {
/// Pre-defined trak timescale if not 0.
trak_timescale: u32,
// More data to be included in the fragmented stream header
extra_header_data: Option<Vec<u8>>,
}
#[derive(Debug)]

View file

@ -1,303 +0,0 @@
//
// Copyright (C) 2022 Vivienne Watermeier <vwatermeier@igalia.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(non_camel_case_types)]
use bitstream_io::{BigEndian, BitRead, BitReader, Endianness};
use std::io::{self, Cursor, Read, Seek, SeekFrom};
pub fn parse_leb128<R, E>(reader: &mut BitReader<R, E>) -> io::Result<(u32, u32)>
where
R: Read + Seek,
E: Endianness,
{
let mut value = 0;
let mut num_bytes = 0;
for i in 0..8 {
let byte = reader.read::<u32>(8)?;
value |= (byte & 0x7f) << (i * 7);
num_bytes += 1;
if byte & 0x80 == 0 {
break;
}
}
reader.byte_align();
Ok((value, num_bytes))
}
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
pub struct SizedObu {
pub obu_type: ObuType,
pub has_extension: bool,
/// If the OBU header is followed by a leb128 size field.
pub has_size_field: bool,
pub temporal_id: u8,
pub spatial_id: u8,
/// size of the OBU payload in bytes.
/// This may refer to different sizes in different contexts, not always
/// to the entire OBU payload as it is in the AV1 bitstream.
pub size: u32,
/// the number of bytes the leb128 size field will take up
/// when written with write_leb128().
/// This does not imply `has_size_field`, and does not necessarily match with
/// the length of the internal size field if present.
pub leb_size: u32,
pub header_len: u32,
/// indicates that only part of this OBU has been processed so far
pub is_fragment: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ObuType {
Reserved,
SequenceHeader,
TemporalDelimiter,
FrameHeader,
TileGroup,
Metadata,
Frame,
RedundantFrameHeader,
TileList,
Padding,
}
impl Default for ObuType {
fn default() -> Self {
Self::Reserved
}
}
impl SizedObu {
/// Parse an OBU header and size field. If the OBU is not expected to contain
/// a size field, but the size is known from external information,
/// parse as an `UnsizedObu` and use `to_sized`.
pub fn parse<R, E>(reader: &mut BitReader<R, E>) -> io::Result<Self>
where
R: Read + Seek,
E: Endianness,
{
// check the forbidden bit
if reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"forbidden bit in OBU header is set",
));
}
let obu_type = reader.read::<u8>(4)?.into();
let has_extension = reader.read_bit()?;
// require a size field
if !reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"expected a size field",
));
}
// ignore the reserved bit
let _ = reader.read_bit()?;
let (temporal_id, spatial_id) = if has_extension {
(reader.read::<u8>(3)?, reader.read::<u8>(2)?)
} else {
(0, 0)
};
reader.byte_align();
let (size, leb_size) = parse_leb128(reader)?;
Ok(Self {
obu_type,
has_extension,
has_size_field: true,
temporal_id,
spatial_id,
size,
leb_size,
header_len: has_extension as u32 + 1,
is_fragment: false,
})
}
/// The amount of bytes this OBU will take up, including the space needed for
/// its leb128 size field.
pub fn full_size(&self) -> u32 {
self.size + self.leb_size + self.header_len
}
}
pub fn read_seq_header_obu_bytes(data: &[u8]) -> io::Result<Option<Vec<u8>>> {
let mut cursor = Cursor::new(data);
while cursor.position() < data.len() as u64 {
let obu_start = cursor.position();
let Ok(obu) = SizedObu::parse(&mut BitReader::endian(&mut cursor, BigEndian)) else {
break;
};
// set reader to the beginning of the OBU
cursor.seek(SeekFrom::Start(obu_start))?;
if obu.obu_type != ObuType::SequenceHeader {
// Skip the full OBU
cursor.seek(SeekFrom::Current(obu.full_size() as i64))?;
continue;
};
// read the full OBU
let mut bytes = vec![0; obu.full_size() as usize];
cursor.read_exact(&mut bytes)?;
return Ok(Some(bytes));
}
Ok(None)
}
impl From<u8> for ObuType {
fn from(n: u8) -> Self {
assert!(n < 16);
match n {
1 => Self::SequenceHeader,
2 => Self::TemporalDelimiter,
3 => Self::FrameHeader,
4 => Self::TileGroup,
5 => Self::Metadata,
6 => Self::Frame,
7 => Self::RedundantFrameHeader,
8 => Self::TileList,
15 => Self::Padding,
_ => Self::Reserved,
}
}
}
impl From<ObuType> for u8 {
fn from(ty: ObuType) -> Self {
match ty {
ObuType::Reserved => 0,
ObuType::SequenceHeader => 1,
ObuType::TemporalDelimiter => 2,
ObuType::FrameHeader => 3,
ObuType::TileGroup => 4,
ObuType::Metadata => 5,
ObuType::Frame => 6,
ObuType::RedundantFrameHeader => 7,
ObuType::TileList => 8,
ObuType::Padding => 15,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use bitstream_io::{BigEndian, BitReader};
use once_cell::sync::Lazy;
use std::io::Cursor;
#[allow(clippy::type_complexity)]
static OBUS: Lazy<Vec<(SizedObu, Vec<u8>)>> = Lazy::new(|| {
vec![
(
SizedObu {
obu_type: ObuType::TemporalDelimiter,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 0,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0001_0010, 0b0000_0000],
),
(
SizedObu {
obu_type: ObuType::Padding,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 10,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0111_1010, 0b0000_1010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
),
(
SizedObu {
obu_type: ObuType::SequenceHeader,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0000_1110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
(
SizedObu {
obu_type: ObuType::Frame,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0011_0110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
]
});
#[test]
fn test_parse_rtp_obu() {
for (idx, (sized_obu, raw_bytes)) in (*OBUS).iter().enumerate() {
println!("running test {idx}...");
let mut reader = BitReader::endian(Cursor::new(&raw_bytes), BigEndian);
let obu_parsed = SizedObu::parse(&mut reader).unwrap();
assert_eq!(&obu_parsed, sized_obu);
if let Some(seq_header_obu_bytes) = read_seq_header_obu_bytes(raw_bytes).unwrap() {
println!("validation of sequence header obu read/write...");
assert_eq!(&seq_header_obu_bytes, raw_bytes);
}
}
}
#[test]
fn test_read_seq_header_from_bitstream() {
let mut bitstream = Vec::new();
let mut seq_header_bytes_raw = None;
for (obu, raw_bytes) in (*OBUS).iter() {
bitstream.extend(raw_bytes);
if obu.obu_type == ObuType::SequenceHeader {
seq_header_bytes_raw = Some(raw_bytes.clone());
}
}
let seq_header_obu_bytes = read_seq_header_obu_bytes(&bitstream).unwrap().unwrap();
assert_eq!(seq_header_obu_bytes, seq_header_bytes_raw.unwrap());
}
}

View file

@ -19,33 +19,6 @@ fn init() {
});
}
fn to_completion(pipeline: &gst::Pipeline) {
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in pipeline.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
fn test_buffer_flags_single_stream(cmaf: bool, set_dts: bool, caps: gst::Caps) {
let mut h = if cmaf {
gst_check::Harness::new("cmafmux")
@ -236,26 +209,6 @@ fn test_buffer_flags_single_vp9_stream_iso() {
test_buffer_flags_single_stream(false, false, caps);
}
#[test]
fn test_buffer_flags_single_av1_stream_cmaf() {
init();
let caps = gst::Caps::builder("video/x-av1")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("profile", "main")
.field("tier", "main")
.field("level", "4.1")
.field("chroma-format", "4:2:0")
.field("bit-depth-luma", 8u32)
.field("bit-depth-chroma", 8u32)
.field("colorimetry", "bt709")
.build();
test_buffer_flags_single_stream(true, false, caps);
}
#[test]
fn test_buffer_flags_multi_stream() {
init();
@ -1334,328 +1287,6 @@ fn test_buffer_multi_stream_short_gops() {
assert_eq!(ev.type_(), gst::EventType::Eos);
}
#[test]
fn test_single_stream_manual_fragment() {
init();
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::with_size(1).unwrap())
.build();
let mut h = gst_check::Harness::new("cmafmux");
// fragment duration long enough to be ignored, 1s chunk duration
h.element()
.unwrap()
.set_property("fragment-duration", 1.hours());
h.set_src_caps(caps);
h.play();
// request fragment at 4 seconds, should be created at 11th buffer
h.element()
.unwrap()
.emit_by_name::<()>("split-at-running-time", &[&4.seconds()]);
// Push 15 buffers of 0.5s each, 1st, 11th and 16th buffer without DELTA_UNIT flag
for i in 0..20 {
let mut buffer = gst::Buffer::with_size(1).unwrap();
{
let buffer = buffer.get_mut().unwrap();
buffer.set_pts(i * 500.mseconds());
buffer.set_dts(i * 500.mseconds());
buffer.set_duration(500.mseconds());
if i != 0 && i != 10 && i != 15 {
buffer.set_flags(gst::BufferFlags::DELTA_UNIT);
}
}
assert_eq!(h.push(buffer), Ok(gst::FlowSuccess::Ok));
if i == 2 {
let ev = loop {
let ev = h.pull_upstream_event().unwrap();
if ev.type_() != gst::EventType::Reconfigure
&& ev.type_() != gst::EventType::Latency
{
break ev;
}
};
assert_eq!(ev.type_(), gst::EventType::CustomUpstream);
assert_eq!(
gst_video::UpstreamForceKeyUnitEvent::parse(&ev).unwrap(),
gst_video::UpstreamForceKeyUnitEvent {
running_time: Some(4.seconds()),
all_headers: true,
count: 0
}
);
}
}
// Crank the clock: this should bring us to the end of the first fragment
h.crank_single_clock_wait().unwrap();
let header = h.pull().unwrap();
assert_eq!(
header.flags(),
gst::BufferFlags::HEADER | gst::BufferFlags::DISCONT
);
assert_eq!(header.pts(), Some(gst::ClockTime::ZERO));
assert_eq!(header.dts(), Some(gst::ClockTime::ZERO));
// first fragment
let fragment_header = h.pull().unwrap();
assert_eq!(fragment_header.flags(), gst::BufferFlags::HEADER);
assert_eq!(fragment_header.pts(), Some(gst::ClockTime::ZERO));
assert_eq!(fragment_header.dts(), Some(gst::ClockTime::ZERO));
assert_eq!(fragment_header.duration(), Some(5.seconds()));
for buffer_idx in 0..10 {
let buffer = h.pull().unwrap();
if buffer_idx == 9 {
assert_eq!(
buffer.flags(),
gst::BufferFlags::DELTA_UNIT | gst::BufferFlags::MARKER
);
} else {
assert_eq!(buffer.flags(), gst::BufferFlags::DELTA_UNIT);
}
assert_eq!(buffer.pts(), Some(buffer_idx * 500.mseconds()));
assert_eq!(buffer.dts(), Some(buffer_idx * 500.mseconds()));
assert_eq!(buffer.duration(), Some(500.mseconds()));
}
// second manual fragment
let fragment_header = h.pull().unwrap();
assert_eq!(fragment_header.flags(), gst::BufferFlags::HEADER);
assert_eq!(fragment_header.pts(), Some(5.seconds()));
assert_eq!(fragment_header.dts(), Some(5.seconds()));
assert_eq!(fragment_header.duration(), Some(2500.mseconds()));
for buffer_idx in 0..5 {
let buffer = h.pull().unwrap();
if buffer_idx == 4 {
assert_eq!(
buffer.flags(),
gst::BufferFlags::DELTA_UNIT | gst::BufferFlags::MARKER
);
} else {
assert_eq!(buffer.flags(), gst::BufferFlags::DELTA_UNIT);
}
assert_eq!(
buffer.pts(),
Some(5.seconds() + buffer_idx * 500.mseconds())
);
assert_eq!(
buffer.dts(),
Some(5.seconds() + buffer_idx * 500.mseconds())
);
assert_eq!(buffer.duration(), Some(500.mseconds()));
}
h.push_event(gst::event::Eos::new());
// There should be the second fragment now
let fragment_header = h.pull().unwrap();
assert_eq!(fragment_header.flags(), gst::BufferFlags::HEADER);
assert_eq!(fragment_header.pts(), Some(7500.mseconds()));
assert_eq!(fragment_header.dts(), Some(7500.mseconds()));
assert_eq!(fragment_header.duration(), Some(2500.mseconds()));
for buffer_idx in 0..5 {
let buffer = h.pull().unwrap();
if buffer_idx == 4 {
assert_eq!(
buffer.flags(),
gst::BufferFlags::DELTA_UNIT | gst::BufferFlags::MARKER
);
} else {
assert_eq!(buffer.flags(), gst::BufferFlags::DELTA_UNIT);
}
assert_eq!(
buffer.pts(),
Some(7500.mseconds() + buffer_idx * 500.mseconds())
);
assert_eq!(
buffer.dts(),
Some(7500.mseconds() + buffer_idx * 500.mseconds())
);
assert_eq!(buffer.duration(), Some(500.mseconds()));
}
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::StreamStart);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Caps);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Segment);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Eos);
}
#[test]
fn test_chunking_single_stream_manual_fragment() {
init();
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::with_size(1).unwrap())
.build();
let mut h = gst_check::Harness::new("cmafmux");
// fragment duration long enough to be ignored, 1s chunk duration
h.element()
.unwrap()
.set_property("fragment-duration", 1.hours());
h.element()
.unwrap()
.set_property("chunk-duration", 1.seconds());
h.set_src_caps(caps);
h.play();
// request fragment at 4 seconds, should be created at 11th buffer
h.element()
.unwrap()
.emit_by_name::<()>("split-at-running-time", &[&4.seconds()]);
// Push 15 buffers of 0.5s each, 1st and 11th buffer without DELTA_UNIT flag
for i in 0..15 {
let mut buffer = gst::Buffer::with_size(1).unwrap();
{
let buffer = buffer.get_mut().unwrap();
buffer.set_pts(i * 500.mseconds());
buffer.set_dts(i * 500.mseconds());
buffer.set_duration(500.mseconds());
if i != 0 && i != 10 {
buffer.set_flags(gst::BufferFlags::DELTA_UNIT);
}
}
assert_eq!(h.push(buffer), Ok(gst::FlowSuccess::Ok));
if i == 2 {
let ev = loop {
let ev = h.pull_upstream_event().unwrap();
if ev.type_() != gst::EventType::Reconfigure
&& ev.type_() != gst::EventType::Latency
{
break ev;
}
};
assert_eq!(ev.type_(), gst::EventType::CustomUpstream);
assert_eq!(
gst_video::UpstreamForceKeyUnitEvent::parse(&ev).unwrap(),
gst_video::UpstreamForceKeyUnitEvent {
running_time: Some(4.seconds()),
all_headers: true,
count: 0
}
);
}
}
// Crank the clock: this should bring us to the end of the first fragment
h.crank_single_clock_wait().unwrap();
let header = h.pull().unwrap();
assert_eq!(
header.flags(),
gst::BufferFlags::HEADER | gst::BufferFlags::DISCONT
);
assert_eq!(header.pts(), Some(gst::ClockTime::ZERO));
assert_eq!(header.dts(), Some(gst::ClockTime::ZERO));
// There should be 7 chunks now, and the 1st and 6th are starting a fragment.
// Each chunk should have two buffers.
for chunk in 0..7 {
let chunk_header = h.pull().unwrap();
if chunk == 0 || chunk == 5 {
assert_eq!(chunk_header.flags(), gst::BufferFlags::HEADER);
} else {
assert_eq!(
chunk_header.flags(),
gst::BufferFlags::HEADER | gst::BufferFlags::DELTA_UNIT
);
}
assert_eq!(chunk_header.pts(), Some(chunk * 1.seconds()));
assert_eq!(chunk_header.dts(), Some(chunk * 1.seconds()));
assert_eq!(chunk_header.duration(), Some(1.seconds()));
for buffer_idx in 0..2 {
let buffer = h.pull().unwrap();
if buffer_idx == 1 {
assert_eq!(
buffer.flags(),
gst::BufferFlags::DELTA_UNIT | gst::BufferFlags::MARKER
);
} else {
assert_eq!(buffer.flags(), gst::BufferFlags::DELTA_UNIT);
}
assert_eq!(
buffer.pts(),
Some((chunk * 2 + buffer_idx) * 500.mseconds())
);
assert_eq!(
buffer.dts(),
Some((chunk * 2 + buffer_idx) * 500.mseconds())
);
assert_eq!(buffer.duration(), Some(500.mseconds()));
}
}
h.push_event(gst::event::Eos::new());
// There should be the remaining chunk now, containing one 500ms buffer.
for chunk in 7..8 {
let chunk_header = h.pull().unwrap();
assert_eq!(
chunk_header.flags(),
gst::BufferFlags::HEADER | gst::BufferFlags::DELTA_UNIT
);
assert_eq!(chunk_header.pts(), Some(chunk * 1.seconds()));
assert_eq!(chunk_header.dts(), Some(chunk * 1.seconds()));
assert_eq!(chunk_header.duration(), Some(500.mseconds()));
for buffer_idx in 0..1 {
let buffer = h.pull().unwrap();
assert_eq!(
buffer.flags(),
gst::BufferFlags::DELTA_UNIT | gst::BufferFlags::MARKER
);
assert_eq!(
buffer.pts(),
Some((chunk * 2 + buffer_idx) * 500.mseconds())
);
assert_eq!(
buffer.dts(),
Some((chunk * 2 + buffer_idx) * 500.mseconds())
);
assert_eq!(buffer.duration(), Some(500.mseconds()));
}
}
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::StreamStart);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Caps);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Segment);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Eos);
}
#[test]
fn test_chunking_single_stream() {
init();
@ -2362,466 +1993,3 @@ fn test_chunking_single_stream_gops_after_fragment_end_after_next_chunk_end() {
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Eos);
}
#[test]
fn test_early_eos() {
init();
let mut h = gst_check::Harness::with_padnames("isofmp4mux", Some("sink_0"), Some("src"));
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::with_size(1).unwrap())
.build();
h.element()
.unwrap()
.set_property("fragment-duration", 1.seconds());
h.set_src_caps(caps);
h.play();
for i in 0..5 {
let mut buffer = gst::Buffer::with_size(1).unwrap();
{
let buffer = buffer.get_mut().unwrap();
buffer.set_pts(i * 100.mseconds());
buffer.set_dts(i * 100.mseconds());
buffer.set_duration(100.mseconds());
buffer.set_flags(gst::BufferFlags::DELTA_UNIT);
}
assert_eq!(h.push(buffer), Ok(gst::FlowSuccess::Ok));
}
h.push_event(gst::event::Eos::new());
assert_eq!(h.buffers_in_queue(), 0);
}
#[test]
fn test_roundtrip_vp9_flac() {
init();
let pipeline = gst::parse::launch(
r#"
videotestsrc num-buffers=99 ! vp9enc ! vp9parse ! mux.
audiotestsrc num-buffers=149 ! flacenc ! flacparse ! mux.
isofmp4mux name=mux ! qtdemux name=demux
demux.audio_0 ! queue ! flacdec ! fakesink
demux.video_0 ! queue ! vp9dec ! fakesink
"#,
)
.unwrap();
let pipeline = pipeline.downcast().unwrap();
to_completion(&pipeline);
}
#[track_caller]
fn test_caps_changed_verify(
h: &mut gst_check::Harness,
num_bufs: usize,
caps_changed: bool,
chunk: bool,
) {
for i in 0..num_bufs {
let b = h.pull().unwrap();
// FIXME: Rust 1.71 does not detect that the match is exhaustive so a `_` pattern has to be
// added, but newer Rust warns (correctly) about that pattern being unreachable.
#[allow(unreachable_patterns)]
match (caps_changed, i, chunk) {
(true, 0, _) => assert_eq!(
b.flags(),
gst::BufferFlags::HEADER | gst::BufferFlags::DISCONT
),
(false, 0, false) | (true, 1, false) => assert_eq!(b.flags(), gst::BufferFlags::HEADER),
(false, 0, true) | (true, 1, true) => assert_eq!(
b.flags(),
gst::BufferFlags::HEADER | gst::BufferFlags::DELTA_UNIT
),
(false, 1, _) | (_, 2.., _) => {
if i == num_bufs - 1 {
assert_eq!(
b.flags(),
gst::BufferFlags::MARKER | gst::BufferFlags::DELTA_UNIT
);
} else {
assert_eq!(b.flags(), gst::BufferFlags::DELTA_UNIT);
}
}
_ => unreachable!(),
}
}
}
#[track_caller]
fn test_caps_changed_buffers(
h: &mut gst_check::Harness,
num_bufs: u64,
gop_size: u64,
caps_change: u64,
duration: u64,
key_frame_on_caps_change: bool,
drop_first_buffer: bool,
) {
for i in 0..num_bufs {
let mut buffer = gst::Buffer::with_size(1).unwrap();
{
let buffer = buffer.get_mut().unwrap();
buffer.set_pts(i * duration.mseconds());
buffer.set_dts(i * duration.mseconds());
buffer.set_duration(duration.mseconds());
if i % gop_size != 0 && (i != caps_change || !key_frame_on_caps_change) {
buffer.set_flags(gst::BufferFlags::DELTA_UNIT);
}
}
if i == 0 && drop_first_buffer {
continue;
}
if i == caps_change {
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1280i32)
.field("height", 720i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::from_slice([1, 2, 3, 4]))
.build();
h.push_event(gst::event::Caps::new(&caps));
}
assert_eq!(h.push(buffer), Ok(gst::FlowSuccess::Ok));
}
}
#[test]
fn test_caps_change_at_gop_boundary() {
init();
let mut h = gst_check::Harness::with_padnames("isofmp4mux", Some("sink_0"), Some("src"));
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::from_slice([1, 2, 3, 4]))
.build();
h.element()
.unwrap()
.set_property("fragment-duration", 1.seconds());
h.set_src_caps(caps);
h.play();
test_caps_changed_buffers(&mut h, 30, 10, 10, 100, true, false);
h.crank_single_clock_wait().unwrap();
// Initial fragment with HEADER and DISCONT
test_caps_changed_verify(&mut h, 1 + 1 + 10, true, false);
h.crank_single_clock_wait().unwrap();
// Full GOP with HEADER and DISCONT due to caps change
test_caps_changed_verify(&mut h, 1 + 1 + 10, true, false);
h.crank_single_clock_wait().unwrap();
h.push_event(gst::event::Eos::new());
// Full GOP with HEADER but no DISCONT because no caps change
test_caps_changed_verify(&mut h, 1 + 10, false, false);
assert_eq!(h.buffers_in_queue(), 0);
}
#[test]
fn test_caps_change_at_gop_boundary_compatible() {
init();
let mut h = gst_check::Harness::with_padnames("isofmp4mux", Some("sink_0"), Some("src"));
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1280i32)
.field("height", 720i32)
.field("framerate", gst::Fraction::new(10, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::from_slice([1, 2, 3, 4]))
.build();
h.element()
.unwrap()
.set_property("fragment-duration", 1.seconds());
h.set_src_caps(caps);
h.play();
test_caps_changed_buffers(&mut h, 30, 10, 10, 100, true, false);
h.crank_single_clock_wait().unwrap();
// Initial fragment with HEADER and DISCONT
test_caps_changed_verify(&mut h, 1 + 1 + 10, true, false);
h.crank_single_clock_wait().unwrap();
// Full GOP with HEADER but no DISCONT because compatible caps
// change
test_caps_changed_verify(&mut h, 1 + 10, false, false);
h.crank_single_clock_wait().unwrap();
h.push_event(gst::event::Eos::new());
// Full GOP with HEADER but no DISCONT because no caps change
test_caps_changed_verify(&mut h, 1 + 10, false, false);
assert_eq!(h.buffers_in_queue(), 0);
}
#[test]
fn test_caps_change_at_gop_boundary_not_allowed() {
init();
let mut h = gst_check::Harness::with_padnames("isofmp4mux", Some("sink_0"), Some("src"));
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::from_slice([1, 2, 3, 4]))
.build();
h.element()
.unwrap()
.set_property("fragment-duration", 1.seconds());
h.element()
.unwrap()
.set_property_from_str("header-update-mode", "rewrite");
h.set_src_caps(caps);
h.play();
test_caps_changed_buffers(&mut h, 30, 10, 10, 100, true, false);
h.crank_single_clock_wait().unwrap();
// Initial fragment with HEADER and DISCONT
test_caps_changed_verify(&mut h, 1 + 1 + 10, true, false);
h.crank_single_clock_wait().unwrap();
// Full GOP with HEADER but no DISCONT because caps change not
// allowed from header-update-modex
test_caps_changed_verify(&mut h, 1 + 10, false, false);
h.crank_single_clock_wait().unwrap();
h.push_event(gst::event::Eos::new());
// Full GOP with HEADER but no DISCONT because no caps change
test_caps_changed_verify(&mut h, 1 + 10, false, false);
assert_eq!(h.buffers_in_queue(), 0);
}
#[test]
fn test_caps_change_within_gop() {
init();
let mut h = gst_check::Harness::with_padnames("isofmp4mux", Some("sink_0"), Some("src"));
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::from_slice([1, 2, 3, 4]))
.build();
h.element()
.unwrap()
.set_property("fragment-duration", 1.seconds());
h.set_src_caps(caps);
h.play();
test_caps_changed_buffers(&mut h, 20, 10, 5, 100, true, false);
h.crank_single_clock_wait().unwrap();
// Initial fragment with HEADER and DISCONT
test_caps_changed_verify(&mut h, 1 + 1 + 5, true, false);
h.crank_single_clock_wait().unwrap();
// Reduced GOP with HEADER and DISCONT due to caps change
test_caps_changed_verify(&mut h, 1 + 1 + 5, true, false);
h.crank_single_clock_wait().unwrap();
h.push_event(gst::event::Eos::new());
// Full GOP with HEADER but no DISCONT because no caps change
test_caps_changed_verify(&mut h, 1 + 10, false, false);
assert_eq!(h.buffers_in_queue(), 0);
}
#[test]
fn test_caps_change_within_gop_start_without_key() {
init();
let mut h = gst_check::Harness::with_padnames("isofmp4mux", Some("sink_0"), Some("src"));
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::from_slice([1, 2, 3, 4]))
.build();
h.element()
.unwrap()
.set_property("fragment-duration", 1.seconds());
h.set_src_caps(caps);
h.play();
test_caps_changed_buffers(&mut h, 20, 10, 5, 100, true, true);
// Same as test_caps_change_within_gop() but without the first
// fragment since all frames are dropped due to missing key frame
h.crank_single_clock_wait().unwrap();
// Reduced GOP with HEADER and DISCONT due to caps change
test_caps_changed_verify(&mut h, 1 + 1 + 5, true, false);
h.crank_single_clock_wait().unwrap();
h.push_event(gst::event::Eos::new());
// Full GOP with HEADER but no DISCONT because no caps change
test_caps_changed_verify(&mut h, 1 + 10, false, false);
assert_eq!(h.buffers_in_queue(), 0);
}
#[test]
fn test_caps_change_within_gop_chunked() {
init();
let mut h = gst_check::Harness::with_padnames("isofmp4mux", Some("sink_0"), Some("src"));
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::from_slice([1, 2, 3, 4]))
.build();
h.element()
.unwrap()
.set_property("fragment-duration", 1.seconds());
h.element()
.unwrap()
.set_property("chunk-duration", 300.mseconds());
h.set_src_caps(caps);
h.play();
test_caps_changed_buffers(&mut h, 22, 10, 5, 30, true, false);
h.crank_single_clock_wait().unwrap();
// Initial fragment with HEADER and DISCONT
test_caps_changed_verify(&mut h, 1 + 1 + 5, true, false);
h.crank_single_clock_wait().unwrap();
// Fragment with HEADER and DISCONT due to caps change
test_caps_changed_verify(&mut h, 1 + 1 + 10, true, false);
h.crank_single_clock_wait().unwrap();
// Reduced chunk due to GOP end inbetween
test_caps_changed_verify(&mut h, 1 + 5, false, true);
h.crank_single_clock_wait().unwrap();
h.push_event(gst::event::Eos::new());
// Everything left until EOS
test_caps_changed_verify(&mut h, 1 + 2, false, true);
assert_eq!(h.buffers_in_queue(), 0);
}
#[test]
fn test_caps_change_within_gop_no_key() {
init();
let mut h = gst_check::Harness::with_padnames("isofmp4mux", Some("sink_0"), Some("src"));
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::from_slice([1, 2, 3, 4]))
.build();
h.element()
.unwrap()
.set_property("fragment-duration", 1.seconds());
h.set_src_caps(caps);
h.play();
test_caps_changed_buffers(&mut h, 22, 10, 5, 100, false, false);
h.crank_single_clock_wait().unwrap();
// Initial fragment with HEADER and DISCONT
test_caps_changed_verify(&mut h, 1 + 1 + 5, true, false);
h.crank_single_clock_wait().unwrap();
// Reduced GOP with HEADER and DISCONT due to caps change
test_caps_changed_verify(&mut h, 1 + 1 + 10, true, false);
h.crank_single_clock_wait().unwrap();
h.push_event(gst::event::Eos::new());
// Everything left until EOS
test_caps_changed_verify(&mut h, 1 + 2, false, false);
assert_eq!(h.buffers_in_queue(), 0);
}
#[test]
fn test_caps_change_before_first_frame() {
init();
let mut h = gst_check::Harness::with_padnames("isofmp4mux", Some("sink_0"), Some("src"));
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::from_slice([1, 2, 3, 4]))
.build();
h.element()
.unwrap()
.set_property("fragment-duration", 1.seconds());
h.set_src_caps(caps);
h.play();
test_caps_changed_buffers(&mut h, 22, 10, 0, 100, true, false);
h.crank_single_clock_wait().unwrap();
// Initial fragment with HEADER and DISCONT
test_caps_changed_verify(&mut h, 1 + 1 + 10, true, false);
h.crank_single_clock_wait().unwrap();
// 2nd fragment with HEADER
test_caps_changed_verify(&mut h, 1 + 10, false, false);
assert_eq!(h.buffers_in_queue(), 0);
}

View file

@ -16,7 +16,6 @@ gst-audio = { workspace = true, features = ["v1_18"] }
gst-video = { workspace = true, features = ["v1_18"] }
gst-pbutils = { workspace = true, features = ["v1_18"] }
once_cell.workspace = true
bitstream-io = "2.3"
[lib]
name = "gstmp4"

View file

@ -9,10 +9,8 @@
use gst::prelude::*;
use anyhow::{anyhow, bail, Context, Error};
use std::convert::TryFrom;
use std::str::FromStr;
use super::{ImageOrientation, IDENTITY_MATRIX};
use std::str::FromStr;
fn write_box<T, F: FnOnce(&mut Vec<u8>) -> Result<T, Error>>(
vec: &mut Vec<u8>,
@ -58,31 +56,18 @@ fn write_full_box<T, F: FnOnce(&mut Vec<u8>) -> Result<T, Error>>(
}
/// Creates `ftyp` box
pub(super) fn create_ftyp(
variant: super::Variant,
content_caps: &[&gst::CapsRef],
) -> Result<gst::Buffer, Error> {
pub(super) fn create_ftyp(variant: super::Variant) -> Result<gst::Buffer, Error> {
let mut v = vec![];
let mut minor_version = 0u32;
let (brand, mut compatible_brands) = match variant {
let (brand, compatible_brands) = match variant {
super::Variant::ISO | super::Variant::ONVIF => (b"iso4", vec![b"mp41", b"mp42", b"isom"]),
};
for caps in content_caps {
let s = caps.structure(0).unwrap();
if let (super::Variant::ISO, "video/x-av1") = (variant, s.name().as_str()) {
minor_version = 1;
compatible_brands = vec![b"iso4", b"av01"];
break;
}
}
write_box(&mut v, b"ftyp", |v| {
// major brand
v.extend(brand);
// minor version
v.extend(minor_version.to_be_bytes());
v.extend(0u32.to_be_bytes());
// compatible brands
v.extend(compatible_brands.into_iter().flatten());
@ -397,8 +382,9 @@ fn write_tkhd(
// Volume
let s = stream.caps.structure(0).unwrap();
match s.name().as_str() {
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => v.extend((1u16 << 8).to_be_bytes()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
v.extend((1u16 << 8).to_be_bytes())
}
_ => v.extend(0u16.to_be_bytes()),
}
@ -406,14 +392,21 @@ fn write_tkhd(
v.extend([0u8; 2]);
// Matrix
let matrix = match s.name().as_str() {
x if x.starts_with("video/") || x.starts_with("image/") => stream
.orientation
.unwrap_or(ImageOrientation::Rotate0)
.transform_matrix(),
_ => &IDENTITY_MATRIX,
};
v.extend(matrix.iter().flatten());
v.extend(
[
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(16384u32 << 16).to_be_bytes(),
]
.into_iter()
.flatten(),
);
// Width/height
match s.name().as_str() {
@ -467,6 +460,7 @@ fn write_mdia(
fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
let lang = lang.borrow();
// TODO: Need to relax this once we get the language code from tags
assert!(lang.iter().all(u8::is_ascii_lowercase));
(((lang[0] as u16 - 0x60) & 0x1F) << 10)
@ -476,7 +470,7 @@ fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
fn write_mdhd(
v: &mut Vec<u8>,
header: &super::Header,
_header: &super::Header,
stream: &super::Stream,
creation_time: u64,
) -> Result<(), Error> {
@ -499,11 +493,8 @@ fn write_mdhd(
v.extend(duration.to_be_bytes());
// Language as ISO-639-2/T
if let Some(lang) = header.language_code {
v.extend(language_code(lang).to_be_bytes());
} else {
v.extend(language_code(b"und").to_be_bytes());
}
// TODO: get actual language from the tags
v.extend(language_code(b"und").to_be_bytes());
// Pre-defined
v.extend([0u8; 2]);
@ -523,8 +514,9 @@ fn write_hdlr(
let (handler_type, name) = match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => (b"vide", b"VideoHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => (b"soun", b"SoundHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
(b"soun", b"SoundHandler\0".as_slice())
}
"application/x-onvif-metadata" => (b"meta", b"MetadataHandler\0".as_slice()),
_ => unreachable!(),
};
@ -554,8 +546,7 @@ fn write_minf(
// Flags are always 1 for unspecified reasons
write_full_box(v, b"vmhd", FULL_BOX_VERSION_0, 1, |v| write_vmhd(v, header))?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => {
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_full_box(v, b"smhd", FULL_BOX_VERSION_0, FULL_BOX_FLAGS_NONE, |v| {
write_smhd(v, header)
})?
@ -712,8 +703,9 @@ fn write_stsd(
match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => write_visual_sample_entry(v, header, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => write_audio_sample_entry(v, header, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_audio_sample_entry(v, header, stream)?
}
"application/x-onvif-metadata" => write_xml_meta_data_sample_entry(v, header, stream)?,
_ => unreachable!(),
}
@ -924,9 +916,8 @@ fn write_visual_sample_entry(
_ => unreachable!(),
};
// TODO: Use `gst_codec_utils_av1_get_seq_level_idx` when exposed in bindings
let level = av1_seq_level_idx(s.get::<&str>("level").ok());
let tier = av1_tier(s.get::<&str>("tier").ok());
let level = 1; // FIXME
let tier = 0; // FIXME
let (high_bitdepth, twelve_bit) =
match s.get::<u32>("bit-depth-luma").unwrap() {
8 => (false, false),
@ -971,10 +962,6 @@ fn write_visual_sample_entry(
v.extend_from_slice(&codec_data);
}
if let Some(extra_data) = &stream.extra_header_data {
// unsigned int(8) configOBUs[];
v.extend_from_slice(extra_data.as_slice());
}
Ok(())
})?;
}
@ -1083,44 +1070,6 @@ fn write_visual_sample_entry(
Ok(())
}
fn av1_seq_level_idx(level: Option<&str>) -> u8 {
match level {
Some("2.0") => 0,
Some("2.1") => 1,
Some("2.2") => 2,
Some("2.3") => 3,
Some("3.0") => 4,
Some("3.1") => 5,
Some("3.2") => 6,
Some("3.3") => 7,
Some("4.0") => 8,
Some("4.1") => 9,
Some("4.2") => 10,
Some("4.3") => 11,
Some("5.0") => 12,
Some("5.1") => 13,
Some("5.2") => 14,
Some("5.3") => 15,
Some("6.0") => 16,
Some("6.1") => 17,
Some("6.2") => 18,
Some("6.3") => 19,
Some("7.0") => 20,
Some("7.1") => 21,
Some("7.2") => 22,
Some("7.3") => 23,
_ => 1,
}
}
fn av1_tier(tier: Option<&str>) -> u8 {
match tier {
Some("main") => 0,
Some("high") => 1,
_ => 0,
}
}
fn write_audio_sample_entry(
v: &mut Vec<u8>,
_header: &super::Header,
@ -1130,7 +1079,6 @@ fn write_audio_sample_entry(
let fourcc = match s.name().as_str() {
"audio/mpeg" => b"mp4a",
"audio/x-opus" => b"Opus",
"audio/x-flac" => b"fLaC",
"audio/x-alaw" => b"alaw",
"audio/x-mulaw" => b"ulaw",
"audio/x-adpcm" => {
@ -1149,10 +1097,6 @@ fn write_audio_sample_entry(
let bitrate = s.get::<i32>("bitrate").context("no ADPCM bitrate field")?;
(bitrate / 8000) as u16
}
"audio/x-flac" => with_flac_metadata(&stream.caps, |streaminfo, _| {
1 + (u16::from_be_bytes([streaminfo[16], streaminfo[17]]) >> 4 & 0b11111)
})
.context("FLAC metadata error")?,
_ => 16u16,
};
@ -1195,9 +1139,6 @@ fn write_audio_sample_entry(
"audio/x-opus" => {
write_dops(v, &stream.caps)?;
}
"audio/x-flac" => {
write_dfla(v, &stream.caps)?;
}
"audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
// Nothing to do here
}
@ -1392,35 +1333,6 @@ fn write_dops(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
})
}
fn with_flac_metadata<R>(
caps: &gst::Caps,
cb: impl FnOnce(&[u8], &[gst::glib::SendValue]) -> R,
) -> Result<R, Error> {
let caps = caps.structure(0).unwrap();
let header = caps.get::<gst::ArrayRef>("streamheader").unwrap();
let (streaminfo, remainder) = header.as_ref().split_first().unwrap();
let streaminfo = streaminfo.get::<&gst::BufferRef>().unwrap();
let streaminfo = streaminfo.map_readable().unwrap();
// 13 bytes for the Ogg/FLAC prefix and 38 for the streaminfo itself.
match <&[_; 13 + 38]>::try_from(streaminfo.as_slice()) {
Ok(i) if i.starts_with(b"\x7FFLAC\x01\x00") => Ok(cb(&i[13..], remainder)),
Ok(_) | Err(_) => bail!("Unknown streamheader format"),
}
}
fn write_dfla(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
write_full_box(v, b"dfLa", 0, 0, move |v| {
with_flac_metadata(caps, |streaminfo, remainder| {
v.extend(streaminfo);
for metadata in remainder {
let metadata = metadata.get::<&gst::BufferRef>().unwrap();
let metadata = metadata.map_readable().unwrap();
v.extend(&metadata[..]);
}
})
})
}
fn write_xml_meta_data_sample_entry(
v: &mut Vec<u8>,
_header: &super::Header,

View file

@ -15,10 +15,9 @@ use gst_base::subclass::prelude::*;
use std::collections::VecDeque;
use std::sync::Mutex;
use crate::mp4mux::obu::read_seq_header_obu_bytes;
use once_cell::sync::Lazy;
use super::{boxes, ImageOrientation};
use super::boxes;
/// Offset between NTP and UNIX epoch in seconds.
/// NTP = UNIX + NTP_UNIX_OFFSET.
@ -109,8 +108,6 @@ struct Stream {
caps: gst::Caps,
/// Whether this stream is intra-only and has frame reordering.
delta_frames: super::DeltaFrames,
/// Whether this stream might have header frames without timestamps that should be ignored.
discard_header_buffers: bool,
/// Already written out chunks with their samples for this stream
chunks: Vec<super::Chunk>,
@ -136,11 +133,6 @@ struct Stream {
/// In ONVIF mode, the mapping between running time and UTC time (UNIX)
running_time_utc_time_mapping: Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
extra_header_data: Option<Vec<u8>>,
/// Orientation from tags
orientation: Option<ImageOrientation>,
}
#[derive(Default)]
@ -159,9 +151,6 @@ struct State {
/// Size of the `mdat` as written so far.
mdat_size: u64,
/// Language code from tags
language_code: Option<[u8; 3]>,
}
#[derive(Default)]
@ -176,24 +165,19 @@ impl MP4Mux {
buffer: &gst::BufferRef,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
discard_headers: bool,
) -> Result<(), gst::FlowError> {
if discard_headers && buffer.flags().contains(gst::BufferFlags::HEADER) {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
if delta_frames.requires_dts() && buffer.dts().is_none() {
gst::error!(CAT, obj = sinkpad, "Require DTS for video streams");
gst::error!(CAT, obj: sinkpad, "Require DTS for video streams");
return Err(gst::FlowError::Error);
}
if buffer.pts().is_none() {
gst::error!(CAT, obj = sinkpad, "Require timestamped buffers");
gst::error!(CAT, obj: sinkpad, "Require timestamped buffers");
return Err(gst::FlowError::Error);
}
if delta_frames.intra_only() && buffer.flags().contains(gst::BufferFlags::DELTA_UNIT) {
gst::error!(CAT, obj = sinkpad, "Intra-only stream with delta units");
gst::error!(CAT, obj: sinkpad, "Intra-only stream with delta units");
return Err(gst::FlowError::Error);
}
@ -204,7 +188,6 @@ impl MP4Mux {
&self,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
discard_headers: bool,
pre_queue: &mut VecDeque<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>,
running_time_utc_time_mapping: &Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
) -> Result<Option<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>, gst::FlowError> {
@ -212,14 +195,17 @@ impl MP4Mux {
return Ok(Some((segment.clone(), buffer.clone())));
}
let Some(mut buffer) = sinkpad.peek_buffer() else {
return Ok(None);
let mut buffer = match sinkpad.peek_buffer() {
None => return Ok(None),
Some(buffer) => buffer,
};
Self::check_buffer(&buffer, sinkpad, delta_frames, discard_headers)?;
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
let mut segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
None => {
gst::error!(CAT, obj = sinkpad, "Got buffer before segment");
gst::error!(CAT, obj: sinkpad, "Got buffer before segment");
return Err(gst::FlowError::Error);
}
};
@ -245,7 +231,7 @@ impl MP4Mux {
// Calculate from the mapping
running_time_to_utc_time(pts, running_time_utc_time_mapping).ok_or_else(
|| {
gst::error!(CAT, obj = sinkpad, "Stream has negative PTS UTC time");
gst::error!(CAT, obj: sinkpad, "Stream has negative PTS UTC time");
gst::FlowError::Error
},
)?
@ -255,7 +241,7 @@ impl MP4Mux {
gst::trace!(
CAT,
obj = sinkpad,
obj: sinkpad,
"Mapped PTS running time {pts} to UTC time {utc_time}"
);
@ -266,12 +252,12 @@ impl MP4Mux {
if let Some(dts) = dts {
let dts_utc_time =
running_time_to_utc_time(dts, (pts, utc_time)).ok_or_else(|| {
gst::error!(CAT, obj = sinkpad, "Stream has negative DTS UTC time");
gst::error!(CAT, obj: sinkpad, "Stream has negative DTS UTC time");
gst::FlowError::Error
})?;
gst::trace!(
CAT,
obj = sinkpad,
obj: sinkpad,
"Mapped DTS running time {dts} to UTC time {dts_utc_time}"
);
buffer.set_dts(dts_utc_time);
@ -290,20 +276,19 @@ impl MP4Mux {
fn pop_buffer(
&self,
stream: &mut Stream,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
pre_queue: &mut VecDeque<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>,
running_time_utc_time_mapping: &mut Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
) -> Result<Option<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>, gst::FlowError> {
let Stream {
sinkpad, pre_queue, ..
} = stream;
// In ONVIF mode we need to get UTC times for each buffer and synchronize based on that.
// Queue up to 6s of data to get the first UTC time and then backdate.
if self.obj().class().as_ref().variant == super::Variant::ONVIF
&& stream.running_time_utc_time_mapping.is_none()
&& running_time_utc_time_mapping.is_none()
{
if let Some((last, first)) = Option::zip(pre_queue.back(), pre_queue.front()) {
// Existence of PTS/DTS checked below
let (last, first) = if stream.delta_frames.requires_dts() {
let (last, first) = if delta_frames.requires_dts() {
(
last.0.to_running_time_full(last.1.dts()).unwrap(),
first.0.to_running_time_full(first.1.dts()).unwrap(),
@ -320,32 +305,31 @@ impl MP4Mux {
{
gst::error!(
CAT,
obj = sinkpad,
obj: sinkpad,
"Got no UTC time in the first 6s of the stream"
);
return Err(gst::FlowError::Error);
}
}
let Some(buffer) = sinkpad.pop_buffer() else {
if sinkpad.is_eos() {
gst::error!(CAT, obj = sinkpad, "Got no UTC time before EOS");
return Err(gst::FlowError::Error);
} else {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
let buffer = match sinkpad.pop_buffer() {
None => {
if sinkpad.is_eos() {
gst::error!(CAT, obj: sinkpad, "Got no UTC time before EOS");
return Err(gst::FlowError::Error);
} else {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
}
Some(buffer) => buffer,
};
Self::check_buffer(
&buffer,
sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
)?;
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
let segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
None => {
gst::error!(CAT, obj = sinkpad, "Got buffer before segment");
gst::error!(CAT, obj: sinkpad, "Got buffer before segment");
return Err(gst::FlowError::Error);
}
};
@ -361,12 +345,12 @@ impl MP4Mux {
let running_time = segment.to_running_time_full(buffer.pts().unwrap()).unwrap();
gst::info!(
CAT,
obj = sinkpad,
obj: sinkpad,
"Got initial UTC time {utc_time} at PTS running time {running_time}",
);
let mapping = (running_time, utc_time);
stream.running_time_utc_time_mapping = Some(mapping);
*running_time_utc_time_mapping = Some(mapping);
// Push the buffer onto the pre-queue and re-timestamp it and all other buffers
// based on the mapping above.
@ -377,12 +361,12 @@ impl MP4Mux {
let pts = segment.to_running_time_full(buffer.pts().unwrap()).unwrap();
let pts_utc_time = running_time_to_utc_time(pts, mapping).ok_or_else(|| {
gst::error!(CAT, obj = sinkpad, "Stream has negative PTS UTC time");
gst::error!(CAT, obj: sinkpad, "Stream has negative PTS UTC time");
gst::FlowError::Error
})?;
gst::trace!(
CAT,
obj = sinkpad,
obj: sinkpad,
"Mapped PTS running time {pts} to UTC time {pts_utc_time}"
);
buffer.set_pts(pts_utc_time);
@ -390,12 +374,12 @@ impl MP4Mux {
if let Some(dts) = buffer.dts() {
let dts = segment.to_running_time_full(dts).unwrap();
let dts_utc_time = running_time_to_utc_time(dts, mapping).ok_or_else(|| {
gst::error!(CAT, obj = sinkpad, "Stream has negative DTS UTC time");
gst::error!(CAT, obj: sinkpad, "Stream has negative DTS UTC time");
gst::FlowError::Error
})?;
gst::trace!(
CAT,
obj = sinkpad,
obj: sinkpad,
"Mapped DTS running time {dts} to UTC time {dts_utc_time}"
);
buffer.set_dts(dts_utc_time);
@ -407,7 +391,7 @@ impl MP4Mux {
// Fall through below and pop the first buffer finally
}
if let Some((segment, buffer)) = stream.pre_queue.pop_front() {
if let Some((segment, buffer)) = pre_queue.pop_front() {
return Ok(Some((segment, buffer)));
}
@ -416,26 +400,23 @@ impl MP4Mux {
// for calculating the duration to the previous buffer, and then put into the pre-queue
// - or this is the very first buffer and we just put it into the queue overselves above
if self.obj().class().as_ref().variant == super::Variant::ONVIF {
if stream.sinkpad.is_eos() {
if sinkpad.is_eos() {
return Ok(None);
}
unreachable!();
}
let Some(buffer) = stream.sinkpad.pop_buffer() else {
return Ok(None);
let buffer = match sinkpad.pop_buffer() {
None => return Ok(None),
Some(buffer) => buffer,
};
Self::check_buffer(
&buffer,
&stream.sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
)?;
let segment = match stream.sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
let segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
None => {
gst::error!(CAT, obj = stream.sinkpad, "Got buffer before segment");
gst::error!(CAT, obj: sinkpad, "Got buffer before segment");
return Err(gst::FlowError::Error);
}
};
@ -461,12 +442,6 @@ impl MP4Mux {
Some(PendingBuffer {
duration: Some(_), ..
}) => return Ok(()),
Some(PendingBuffer { ref buffer, .. })
if stream.discard_header_buffers
&& buffer.flags().contains(gst::BufferFlags::HEADER) =>
{
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
Some(PendingBuffer {
timestamp,
pts,
@ -474,28 +449,26 @@ impl MP4Mux {
ref mut duration,
..
}) => {
let peek_outcome = self.peek_buffer(
// Already have a pending buffer but no duration, so try to get that now
let (segment, buffer) = match self.peek_buffer(
&stream.sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
&mut stream.pre_queue,
&stream.running_time_utc_time_mapping,
)?;
// Already have a pending buffer but no duration, so try to get that now
let (segment, buffer) = match peek_outcome {
)? {
Some(res) => res,
None => {
if stream.sinkpad.is_eos() {
let dur = buffer.duration().unwrap_or(gst::ClockTime::ZERO);
gst::trace!(
CAT,
obj = stream.sinkpad,
obj: stream.sinkpad,
"Stream is EOS, using {dur} as duration for queued buffer",
);
let pts = pts + dur;
if stream.end_pts.map_or(true, |end_pts| end_pts < pts) {
gst::trace!(CAT, obj = stream.sinkpad, "Stream end PTS {pts}");
gst::trace!(CAT, obj: stream.sinkpad, "Stream end PTS {pts}");
stream.end_pts = Some(pts);
}
@ -503,11 +476,7 @@ impl MP4Mux {
return Ok(());
} else {
gst::trace!(
CAT,
obj = stream.sinkpad,
"Stream has no buffer queued"
);
gst::trace!(CAT, obj: stream.sinkpad, "Stream has no buffer queued");
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
}
@ -528,7 +497,7 @@ impl MP4Mux {
gst::trace!(
CAT,
obj = stream.sinkpad,
obj: stream.sinkpad,
"Stream has buffer with timestamp {next_timestamp} queued",
);
@ -538,7 +507,7 @@ impl MP4Mux {
.unwrap_or_else(|| {
gst::warning!(
CAT,
obj = stream.sinkpad,
obj: stream.sinkpad,
"Stream timestamps going backwards {next_timestamp} < {timestamp}",
);
gst::ClockTime::ZERO
@ -546,57 +515,41 @@ impl MP4Mux {
gst::trace!(
CAT,
obj = stream.sinkpad,
obj: stream.sinkpad,
"Using {dur} as duration for queued buffer",
);
let pts = pts + dur;
if stream.end_pts.map_or(true, |end_pts| end_pts < pts) {
gst::trace!(CAT, obj = stream.sinkpad, "Stream end PTS {pts}");
gst::trace!(CAT, obj: stream.sinkpad, "Stream end PTS {pts}");
stream.end_pts = Some(pts);
}
*duration = Some(dur);
// If the stream is AV1, we need to parse the SequenceHeader OBU to include in the
// extra data of the 'av1C' box. It makes the stream playable in some browsers.
let s = stream.caps.structure(0).unwrap();
if !buffer.flags().contains(gst::BufferFlags::DELTA_UNIT)
&& s.name().as_str() == "video/x-av1"
{
let buf_map = buffer.map_readable().map_err(|_| {
gst::error!(CAT, obj = stream.sinkpad, "Failed to map buffer");
gst::FlowError::Error
})?;
stream.extra_header_data = read_seq_header_obu_bytes(buf_map.as_slice())
.map_err(|_| {
gst::error!(
CAT,
obj = stream.sinkpad,
"Failed to parse AV1 SequenceHeader OBU"
);
gst::FlowError::Error
})?;
}
return Ok(());
}
None => {
// Have no buffer queued at all yet
let (segment, buffer) = match self.pop_buffer(stream)? {
let (segment, buffer) = match self.pop_buffer(
&stream.sinkpad,
stream.delta_frames,
&mut stream.pre_queue,
&mut stream.running_time_utc_time_mapping,
)? {
Some(res) => res,
None => {
if stream.sinkpad.is_eos() {
gst::trace!(CAT, obj = stream.sinkpad, "Stream is EOS",);
gst::trace!(
CAT,
obj: stream.sinkpad,
"Stream is EOS",
);
return Err(gst::FlowError::Eos);
} else {
gst::trace!(
CAT,
obj = stream.sinkpad,
"Stream has no buffer queued"
);
gst::trace!(CAT, obj: stream.sinkpad, "Stream has no buffer queued");
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
}
@ -606,16 +559,9 @@ impl MP4Mux {
let pts_position = buffer.pts().unwrap();
let dts_position = buffer.dts();
let pts = segment
.to_running_time_full(pts_position)
.unwrap()
.positive()
.unwrap_or_else(|| {
gst::error!(
CAT,
obj = stream.sinkpad,
"Stream has negative PTS running time"
);
let pts = segment.to_running_time_full(pts_position).unwrap()
.positive().unwrap_or_else(|| {
gst::error!(CAT, obj: stream.sinkpad, "Stream has negative PTS running time");
gst::ClockTime::ZERO
});
@ -627,7 +573,7 @@ impl MP4Mux {
let dts = dts.unwrap();
if stream.start_dts.is_none() {
gst::debug!(CAT, obj = stream.sinkpad, "Stream start DTS {dts}");
gst::debug!(CAT, obj: stream.sinkpad, "Stream start DTS {dts}");
stream.start_dts = Some(dts);
}
@ -640,7 +586,7 @@ impl MP4Mux {
.earliest_pts
.map_or(true, |earliest_pts| earliest_pts > pts)
{
gst::debug!(CAT, obj = stream.sinkpad, "Stream earliest PTS {pts}");
gst::debug!(CAT, obj: stream.sinkpad, "Stream earliest PTS {pts}");
stream.earliest_pts = Some(pts);
}
@ -649,7 +595,7 @@ impl MP4Mux {
let dts = dts.unwrap(); // set above
Some(i64::try_from((pts - dts).nseconds()).map_err(|_| {
gst::error!(CAT, obj = stream.sinkpad, "Too big PTS/DTS difference");
gst::error!(CAT, obj: stream.sinkpad, "Too big PTS/DTS difference");
gst::FlowError::Error
})?)
} else {
@ -658,7 +604,7 @@ impl MP4Mux {
gst::trace!(
CAT,
obj = stream.sinkpad,
obj: stream.sinkpad,
"Stream has buffer of size {} with timestamp {timestamp} pending",
buffer.size(),
);
@ -705,7 +651,7 @@ impl MP4Mux {
}))
{
gst::trace!(CAT,
obj = stream.sinkpad,
obj: stream.sinkpad,
"Continuing current chunk: single stream {single_stream}, or {} >= {} and {} >= {}",
gst::format::Bytes::from_u64(stream.queued_chunk_bytes),
settings.interleave_bytes.map(gst::format::Bytes::from_u64).display(),
@ -715,25 +661,16 @@ impl MP4Mux {
}
state.current_stream_idx = None;
gst::debug!(
CAT,
obj = stream.sinkpad,
gst::debug!(CAT,
obj: stream.sinkpad,
"Switching to next chunk: {} < {} and {} < {}",
gst::format::Bytes::from_u64(stream.queued_chunk_bytes),
settings
.interleave_bytes
.map(gst::format::Bytes::from_u64)
.display(),
stream.queued_chunk_time,
settings.interleave_time.display(),
settings.interleave_bytes.map(gst::format::Bytes::from_u64).display(),
stream.queued_chunk_time, settings.interleave_time.display(),
);
}
Err(gst::FlowError::Eos) => {
gst::debug!(
CAT,
obj = stream.sinkpad,
"Stream is EOS, switching to next stream"
);
gst::debug!(CAT, obj: stream.sinkpad, "Stream is EOS, switching to next stream");
state.current_stream_idx = None;
}
Err(err) => {
@ -762,7 +699,10 @@ impl MP4Mux {
let timestamp = stream.pending_buffer.as_ref().unwrap().timestamp;
gst::trace!(CAT, obj = stream.sinkpad, "Stream at timestamp {timestamp}",);
gst::trace!(CAT,
obj: stream.sinkpad,
"Stream at timestamp {timestamp}",
);
all_eos = false;
@ -790,21 +730,21 @@ impl MP4Mux {
}
if !all_have_data_or_eos {
gst::trace!(CAT, imp = self, "Not all streams have a buffer or are EOS");
gst::trace!(CAT, imp: self, "Not all streams have a buffer or are EOS");
Err(gst_base::AGGREGATOR_FLOW_NEED_DATA)
} else if all_eos {
gst::info!(CAT, imp = self, "All streams are EOS");
gst::info!(CAT, imp: self, "All streams are EOS");
Err(gst::FlowError::Eos)
} else if let Some((idx, stream, earliest_timestamp)) = earliest_stream {
gst::debug!(
CAT,
obj = stream.sinkpad,
obj: stream.sinkpad,
"Stream is earliest stream with timestamp {earliest_timestamp}",
);
gst::debug!(
CAT,
obj = stream.sinkpad,
obj: stream.sinkpad,
"Starting new chunk at offset {}",
state.current_offset,
);
@ -838,7 +778,7 @@ impl MP4Mux {
&& buffer.buffer.flags().contains(gst::BufferFlags::DROPPABLE)
&& buffer.buffer.size() == 0
{
gst::trace!(CAT, obj = stream.sinkpad, "Skipping gap buffer {buffer:?}");
gst::trace!(CAT, obj: stream.sinkpad, "Skipping gap buffer {buffer:?}");
// If a new chunk was just started for the gap buffer, don't bother and get rid
// of this chunk again for now and search for the next stream.
@ -856,19 +796,10 @@ impl MP4Mux {
if let Some(previous_sample) =
stream.chunks.last_mut().and_then(|c| c.samples.last_mut())
{
gst::trace!(
CAT,
obj = stream.sinkpad,
"Adding gap duration {} to previous sample",
buffer.duration.unwrap()
);
gst::trace!(CAT, obj: stream.sinkpad, "Adding gap duration {} to previous sample", buffer.duration.unwrap());
previous_sample.duration += buffer.duration.unwrap();
} else {
gst::trace!(
CAT,
obj = stream.sinkpad,
"Resetting stream start time because it started with a gap"
);
gst::trace!(CAT, obj: stream.sinkpad, "Resetting stream start time because it started with a gap");
// If there was no previous sample yet then the next sample needs to start
// earlier or alternatively we change the start PTS. We do the latter here
// as otherwise the first sample would be displayed too early.
@ -880,12 +811,7 @@ impl MP4Mux {
continue;
}
gst::trace!(
CAT,
obj = stream.sinkpad,
"Handling buffer {buffer:?} at offset {}",
state.current_offset
);
gst::trace!(CAT, obj: stream.sinkpad, "Handling buffer {buffer:?} at offset {}", state.current_offset);
let duration = buffer.duration.unwrap();
let composition_time_offset = buffer.composition_time_offset;
@ -923,7 +849,7 @@ impl MP4Mux {
}
fn create_streams(&self, state: &mut State) -> Result<(), gst::FlowError> {
gst::info!(CAT, imp = self, "Creating streams");
gst::info!(CAT, imp: self, "Creating streams");
for pad in self
.obj()
@ -934,21 +860,20 @@ impl MP4Mux {
let caps = match pad.current_caps() {
Some(caps) => caps,
None => {
gst::warning!(CAT, obj = pad, "Skipping pad without caps");
gst::warning!(CAT, obj: pad, "Skipping pad without caps");
continue;
}
};
gst::info!(CAT, obj = pad, "Configuring caps {caps:?}");
gst::info!(CAT, obj: pad, "Configuring caps {caps:?}");
let s = caps.structure(0).unwrap();
let mut delta_frames = super::DeltaFrames::IntraOnly;
let mut discard_header_buffers = false;
match s.name().as_str() {
"video/x-h264" | "video/x-h265" => {
if !s.has_field_with_type("codec_data", gst::Buffer::static_type()) {
gst::error!(CAT, obj = pad, "Received caps without codec_data");
gst::error!(CAT, obj: pad, "Received caps without codec_data");
return Err(gst::FlowError::NotNegotiated);
}
delta_frames = super::DeltaFrames::Bidirectional;
@ -958,7 +883,7 @@ impl MP4Mux {
}
"video/x-vp9" => {
if !s.has_field_with_type("colorimetry", str::static_type()) {
gst::error!(CAT, obj = pad, "Received caps without colorimetry");
gst::error!(CAT, obj: pad, "Received caps without colorimetry");
return Err(gst::FlowError::NotNegotiated);
}
delta_frames = super::DeltaFrames::PredictiveOnly;
@ -969,7 +894,7 @@ impl MP4Mux {
"image/jpeg" => (),
"audio/mpeg" => {
if !s.has_field_with_type("codec_data", gst::Buffer::static_type()) {
gst::error!(CAT, obj = pad, "Received caps without codec_data");
gst::error!(CAT, obj: pad, "Received caps without codec_data");
return Err(gst::FlowError::NotNegotiated);
}
}
@ -980,26 +905,14 @@ impl MP4Mux {
.and_then(|a| a.first().and_then(|v| v.get::<gst::Buffer>().ok()))
{
if gst_pbutils::codec_utils_opus_parse_header(&header, None).is_err() {
gst::error!(CAT, obj = pad, "Received invalid Opus header");
gst::error!(CAT, obj: pad, "Received invalid Opus header");
return Err(gst::FlowError::NotNegotiated);
}
} else if gst_pbutils::codec_utils_opus_parse_caps(&caps, None).is_err() {
gst::error!(CAT, obj = pad, "Received invalid Opus caps");
gst::error!(CAT, obj: pad, "Received invalid Opus caps");
return Err(gst::FlowError::NotNegotiated);
}
}
"audio/x-flac" => {
discard_header_buffers = true;
if let Err(e) = s.get::<gst::ArrayRef>("streamheader") {
gst::error!(
CAT,
obj = pad,
"Muxing FLAC into MP4 needs streamheader: {}",
e
);
return Err(gst::FlowError::NotNegotiated);
};
}
"audio/x-alaw" | "audio/x-mulaw" => (),
"audio/x-adpcm" => (),
"application/x-onvif-metadata" => (),
@ -1011,7 +924,6 @@ impl MP4Mux {
pre_queue: VecDeque::new(),
caps,
delta_frames,
discard_header_buffers,
chunks: Vec::new(),
pending_buffer: None,
queued_chunk_time: gst::ClockTime::ZERO,
@ -1020,13 +932,11 @@ impl MP4Mux {
earliest_pts: None,
end_pts: None,
running_time_utc_time_mapping: None,
extra_header_data: None,
orientation: None,
});
}
if state.streams.is_empty() {
gst::error!(CAT, imp = self, "No streams available");
gst::error!(CAT, imp: self, "No streams available");
return Err(gst::FlowError::Error);
}
@ -1161,7 +1071,7 @@ impl ElementImpl for MP4Mux {
if !state.streams.is_empty() {
gst::error!(
CAT,
imp = self,
imp: self,
"Can't request new pads after stream was started"
);
return None;
@ -1183,7 +1093,7 @@ impl AggregatorImpl for MP4Mux {
) -> bool {
use gst::QueryViewMut;
gst::trace!(CAT, obj = aggregator_pad, "Handling query {query:?}");
gst::trace!(CAT, obj: aggregator_pad, "Handling query {query:?}");
match query.view_mut() {
QueryViewMut::Caps(q) => {
@ -1217,14 +1127,14 @@ impl AggregatorImpl for MP4Mux {
) -> Result<gst::FlowSuccess, gst::FlowError> {
use gst::EventView;
gst::trace!(CAT, obj = aggregator_pad, "Handling event {event:?}");
gst::trace!(CAT, obj: aggregator_pad, "Handling event {event:?}");
match event.view() {
EventView::Segment(ev) => {
if ev.segment().format() != gst::Format::Time {
gst::warning!(
CAT,
obj = aggregator_pad,
obj: aggregator_pad,
"Received non-TIME segment, replacing with default TIME segment"
);
let segment = gst::FormattedSegment::<gst::ClockTime>::new();
@ -1234,57 +1144,6 @@ impl AggregatorImpl for MP4Mux {
}
self.parent_sink_event_pre_queue(aggregator_pad, event)
}
EventView::Tag(ev) => {
if let Some(tag_value) = ev.tag().get::<gst::tags::LanguageCode>() {
let lang = tag_value.get();
gst::trace!(
CAT,
imp = self,
"Received language code from tags: {:?}",
lang
);
// Language as ISO-639-2/T
if lang.len() == 3 && lang.chars().all(|c| c.is_ascii_lowercase()) {
let mut state = self.state.lock().unwrap();
let mut language_code: [u8; 3] = [0; 3];
for (out, c) in Iterator::zip(language_code.iter_mut(), lang.chars()) {
*out = c as u8;
}
state.language_code = Some(language_code);
}
} else if let Some(tag_value) = ev.tag().get::<gst::tags::ImageOrientation>() {
let orientation = tag_value.get();
gst::trace!(
CAT,
obj = aggregator_pad,
"Received image orientation from tags: {:?}",
orientation
);
let mut state = self.state.lock().unwrap();
for stream in &mut state.streams {
if &stream.sinkpad == aggregator_pad {
stream.orientation = match orientation {
"rotate-0" => Some(ImageOrientation::Rotate0),
"rotate-90" => Some(ImageOrientation::Rotate90),
"rotate-180" => Some(ImageOrientation::Rotate180),
"rotate-270" => Some(ImageOrientation::Rotate270),
// TODO:
// "flip-rotate-0" => Some(ImageOrientation::FlipRotate0),
// "flip-rotate-90" => Some(ImageOrientation::FlipRotate90),
// "flip-rotate-180" => Some(ImageOrientation::FlipRotate180),
// "flip-rotate-270" => Some(ImageOrientation::FlipRotate270),
_ => None,
};
break;
}
}
}
self.parent_sink_event_pre_queue(aggregator_pad, event)
}
_ => self.parent_sink_event_pre_queue(aggregator_pad, event),
}
}
@ -1292,7 +1151,7 @@ impl AggregatorImpl for MP4Mux {
fn sink_event(&self, aggregator_pad: &gst_base::AggregatorPad, event: gst::Event) -> bool {
use gst::EventView;
gst::trace!(CAT, obj = aggregator_pad, "Handling event {event:?}");
gst::trace!(CAT, obj: aggregator_pad, "Handling event {event:?}");
match event.view() {
EventView::Tag(_ev) => {
@ -1307,7 +1166,7 @@ impl AggregatorImpl for MP4Mux {
fn src_query(&self, query: &mut gst::QueryRef) -> bool {
use gst::QueryViewMut;
gst::trace!(CAT, imp = self, "Handling query {query:?}");
gst::trace!(CAT, imp: self, "Handling query {query:?}");
match query.view_mut() {
QueryViewMut::Seeking(q) => {
@ -1322,7 +1181,7 @@ impl AggregatorImpl for MP4Mux {
fn src_event(&self, event: gst::Event) -> bool {
use gst::EventView;
gst::trace!(CAT, imp = self, "Handling event {event:?}");
gst::trace!(CAT, imp: self, "Handling event {event:?}");
match event.view() {
EventView::Seek(_ev) => false,
@ -1331,7 +1190,7 @@ impl AggregatorImpl for MP4Mux {
}
fn flush(&self) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::info!(CAT, imp = self, "Flushing");
gst::info!(CAT, imp: self, "Flushing");
let mut state = self.state.lock().unwrap();
for stream in &mut state.streams {
@ -1345,7 +1204,7 @@ impl AggregatorImpl for MP4Mux {
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::trace!(CAT, imp = self, "Stopping");
gst::trace!(CAT, imp: self, "Stopping");
let _ = self.parent_stop();
@ -1355,7 +1214,7 @@ impl AggregatorImpl for MP4Mux {
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::trace!(CAT, imp = self, "Starting");
gst::trace!(CAT, imp: self, "Starting");
self.parent_start()?;
@ -1396,7 +1255,7 @@ impl AggregatorImpl for MP4Mux {
}
} else {
// Can't query downstream, have to assume downstream is seekable
gst::warning!(CAT, imp = self, "Can't query downstream for seekability");
gst::warning!(CAT, imp: self, "Can't query downstream for seekability");
}
state = self.state.lock().unwrap();
@ -1411,23 +1270,15 @@ impl AggregatorImpl for MP4Mux {
gst::info!(
CAT,
imp = self,
imp: self,
"Creating ftyp box at offset {}",
state.current_offset
);
// ... and then create the ftyp box plus mdat box header so we can start outputting
// actual data
let ftyp = boxes::create_ftyp(
self.obj().class().as_ref().variant,
&state
.streams
.iter()
.map(|s| s.caps.as_ref())
.collect::<Vec<_>>(),
)
.map_err(|err| {
gst::error!(CAT, imp = self, "Failed to create ftyp box: {err}");
let ftyp = boxes::create_ftyp(self.obj().class().as_ref().variant).map_err(|err| {
gst::error!(CAT, imp: self, "Failed to create ftyp box: {err}");
gst::FlowError::Error
})?;
state.current_offset += ftyp.size() as u64;
@ -1435,13 +1286,13 @@ impl AggregatorImpl for MP4Mux {
gst::info!(
CAT,
imp = self,
imp: self,
"Creating mdat box header at offset {}",
state.current_offset
);
state.mdat_offset = Some(state.current_offset);
let mdat = boxes::create_mdat_header(None).map_err(|err| {
gst::error!(CAT, imp = self, "Failed to create mdat box header: {err}");
gst::error!(CAT, imp: self, "Failed to create mdat box header: {err}");
gst::FlowError::Error
})?;
state.current_offset += mdat.size() as u64;
@ -1462,7 +1313,7 @@ impl AggregatorImpl for MP4Mux {
gst::info!(
CAT,
imp = self,
imp: self,
"Creating moov box now, mdat ends at offset {} with size {}",
state.current_offset,
state.mdat_size
@ -1485,8 +1336,6 @@ impl AggregatorImpl for MP4Mux {
earliest_pts,
end_pts,
chunks: stream.chunks,
extra_header_data: stream.extra_header_data.clone(),
orientation: stream.orientation,
});
}
@ -1494,10 +1343,9 @@ impl AggregatorImpl for MP4Mux {
variant: self.obj().class().as_ref().variant,
movie_timescale: settings.movie_timescale,
streams,
language_code: state.language_code,
})
.map_err(|err| {
gst::error!(CAT, imp = self, "Failed to create moov box: {err}");
gst::error!(CAT, imp: self, "Failed to create moov box: {err}");
gst::FlowError::Error
})?;
state.current_offset += moov.size() as u64;
@ -1512,7 +1360,7 @@ impl AggregatorImpl for MP4Mux {
if !buffers.is_empty() {
if let Err(err) = self.obj().finish_buffer_list(buffers) {
gst::error!(CAT, imp = self, "Failed pushing buffers: {err:?}");
gst::error!(CAT, imp: self, "Failed pushing buffers: {err:?}");
return Err(err);
}
}
@ -1523,7 +1371,7 @@ impl AggregatorImpl for MP4Mux {
if let Some(mdat_offset) = state.mdat_offset {
gst::info!(
CAT,
imp = self,
imp: self,
"Rewriting mdat box header at offset {mdat_offset} with size {} now",
state.mdat_size,
);
@ -1531,7 +1379,7 @@ impl AggregatorImpl for MP4Mux {
segment.set_start(gst::format::Bytes::from_u64(mdat_offset));
state.current_offset = mdat_offset;
let mdat = boxes::create_mdat_header(Some(state.mdat_size)).map_err(|err| {
gst::error!(CAT, imp = self, "Failed to create mdat box header: {err}");
gst::error!(CAT, imp: self, "Failed to create mdat box header: {err}");
gst::FlowError::Error
})?;
drop(state);
@ -1540,7 +1388,7 @@ impl AggregatorImpl for MP4Mux {
if let Err(err) = self.obj().finish_buffer(mdat) {
gst::error!(
CAT,
imp = self,
imp: self,
"Failed pushing updated mdat box header buffer downstream: {err:?}",
);
}
@ -1675,11 +1523,6 @@ impl ElementImpl for ISOMP4Mux {
.field("channels", gst::IntRange::new(1i32, 8))
.field("rate", gst::IntRange::new(1, i32::MAX))
.build(),
gst::Structure::builder("audio/x-flac")
.field("framed", true)
.field("channels", gst::IntRange::<i32>::new(1, 8))
.field("rate", gst::IntRange::<i32>::new(1, 10 * u16::MAX as i32))
.build(),
]
.into_iter()
.collect::<gst::Caps>(),
@ -1866,7 +1709,7 @@ impl AggregatorPadImpl for MP4MuxPad {
let mux = aggregator.downcast_ref::<super::MP4Mux>().unwrap();
let mut mux_state = mux.imp().state.lock().unwrap();
gst::info!(CAT, imp = self, "Flushing");
gst::info!(CAT, imp: self, "Flushing");
for stream in &mut mux_state.streams {
if stream.sinkpad == *self.obj() {

View file

@ -11,7 +11,6 @@ use gst::prelude::*;
mod boxes;
mod imp;
mod obu;
glib::wrapper! {
pub(crate) struct MP4MuxPad(ObjectSubclass<imp::MP4MuxPad>) @extends gst_base::AggregatorPad, gst::Pad, gst::Object;
@ -51,80 +50,6 @@ pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
Ok(())
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum ImageOrientation {
Rotate0,
Rotate90,
Rotate180,
Rotate270,
// TODO:
// FlipRotate0,
// FlipRotate90,
// FlipRotate180,
// FlipRotate270,
}
type TransformMatrix = [[u8; 4]; 9];
const IDENTITY_MATRIX: TransformMatrix = [
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_90_MATRIX: TransformMatrix = [
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_180_MATRIX: TransformMatrix = [
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_270_MATRIX: TransformMatrix = [
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
impl ImageOrientation {
pub(crate) fn transform_matrix(&self) -> &'static TransformMatrix {
match self {
ImageOrientation::Rotate0 => &IDENTITY_MATRIX,
ImageOrientation::Rotate90 => &ROTATE_90_MATRIX,
ImageOrientation::Rotate180 => &ROTATE_180_MATRIX,
ImageOrientation::Rotate270 => &ROTATE_270_MATRIX,
}
}
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum DeltaFrames {
/// Only single completely decodable frames
@ -201,12 +126,6 @@ pub(crate) struct Stream {
/// All the chunks stored for this stream
chunks: Vec<Chunk>,
// More data to be included in the fragmented stream header
extra_header_data: Option<Vec<u8>>,
/// Orientation from tags
orientation: Option<ImageOrientation>,
}
#[derive(Debug)]
@ -216,7 +135,6 @@ pub(crate) struct Header {
/// Pre-defined movie timescale if not 0.
movie_timescale: u32,
streams: Vec<Stream>,
language_code: Option<[u8; 3]>,
}
#[allow(clippy::upper_case_acronyms)]

View file

@ -1,303 +0,0 @@
//
// Copyright (C) 2022 Vivienne Watermeier <vwatermeier@igalia.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(non_camel_case_types)]
use bitstream_io::{BigEndian, BitRead, BitReader, Endianness};
use std::io::{self, Cursor, Read, Seek, SeekFrom};
pub fn parse_leb128<R, E>(reader: &mut BitReader<R, E>) -> io::Result<(u32, u32)>
where
R: Read + Seek,
E: Endianness,
{
let mut value = 0;
let mut num_bytes = 0;
for i in 0..8 {
let byte = reader.read::<u32>(8)?;
value |= (byte & 0x7f) << (i * 7);
num_bytes += 1;
if byte & 0x80 == 0 {
break;
}
}
reader.byte_align();
Ok((value, num_bytes))
}
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
pub struct SizedObu {
pub obu_type: ObuType,
pub has_extension: bool,
/// If the OBU header is followed by a leb128 size field.
pub has_size_field: bool,
pub temporal_id: u8,
pub spatial_id: u8,
/// size of the OBU payload in bytes.
/// This may refer to different sizes in different contexts, not always
/// to the entire OBU payload as it is in the AV1 bitstream.
pub size: u32,
/// the number of bytes the leb128 size field will take up
/// when written with write_leb128().
/// This does not imply `has_size_field`, and does not necessarily match with
/// the length of the internal size field if present.
pub leb_size: u32,
pub header_len: u32,
/// indicates that only part of this OBU has been processed so far
pub is_fragment: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ObuType {
Reserved,
SequenceHeader,
TemporalDelimiter,
FrameHeader,
TileGroup,
Metadata,
Frame,
RedundantFrameHeader,
TileList,
Padding,
}
impl Default for ObuType {
fn default() -> Self {
Self::Reserved
}
}
impl SizedObu {
/// Parse an OBU header and size field. If the OBU is not expected to contain
/// a size field, but the size is known from external information,
/// parse as an `UnsizedObu` and use `to_sized`.
pub fn parse<R, E>(reader: &mut BitReader<R, E>) -> io::Result<Self>
where
R: Read + Seek,
E: Endianness,
{
// check the forbidden bit
if reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"forbidden bit in OBU header is set",
));
}
let obu_type = reader.read::<u8>(4)?.into();
let has_extension = reader.read_bit()?;
// require a size field
if !reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"expected a size field",
));
}
// ignore the reserved bit
let _ = reader.read_bit()?;
let (temporal_id, spatial_id) = if has_extension {
(reader.read::<u8>(3)?, reader.read::<u8>(2)?)
} else {
(0, 0)
};
reader.byte_align();
let (size, leb_size) = parse_leb128(reader)?;
Ok(Self {
obu_type,
has_extension,
has_size_field: true,
temporal_id,
spatial_id,
size,
leb_size,
header_len: has_extension as u32 + 1,
is_fragment: false,
})
}
/// The amount of bytes this OBU will take up, including the space needed for
/// its leb128 size field.
pub fn full_size(&self) -> u32 {
self.size + self.leb_size + self.header_len
}
}
pub fn read_seq_header_obu_bytes(data: &[u8]) -> io::Result<Option<Vec<u8>>> {
let mut cursor = Cursor::new(data);
while cursor.position() < data.len() as u64 {
let obu_start = cursor.position();
let Ok(obu) = SizedObu::parse(&mut BitReader::endian(&mut cursor, BigEndian)) else {
break;
};
// set reader to the beginning of the OBU
cursor.seek(SeekFrom::Start(obu_start))?;
if obu.obu_type != ObuType::SequenceHeader {
// Skip the full OBU
cursor.seek(SeekFrom::Current(obu.full_size() as i64))?;
continue;
};
// read the full OBU
let mut bytes = vec![0; obu.full_size() as usize];
cursor.read_exact(&mut bytes)?;
return Ok(Some(bytes));
}
Ok(None)
}
impl From<u8> for ObuType {
fn from(n: u8) -> Self {
assert!(n < 16);
match n {
1 => Self::SequenceHeader,
2 => Self::TemporalDelimiter,
3 => Self::FrameHeader,
4 => Self::TileGroup,
5 => Self::Metadata,
6 => Self::Frame,
7 => Self::RedundantFrameHeader,
8 => Self::TileList,
15 => Self::Padding,
_ => Self::Reserved,
}
}
}
impl From<ObuType> for u8 {
fn from(ty: ObuType) -> Self {
match ty {
ObuType::Reserved => 0,
ObuType::SequenceHeader => 1,
ObuType::TemporalDelimiter => 2,
ObuType::FrameHeader => 3,
ObuType::TileGroup => 4,
ObuType::Metadata => 5,
ObuType::Frame => 6,
ObuType::RedundantFrameHeader => 7,
ObuType::TileList => 8,
ObuType::Padding => 15,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use bitstream_io::{BigEndian, BitReader};
use once_cell::sync::Lazy;
use std::io::Cursor;
#[allow(clippy::type_complexity)]
static OBUS: Lazy<Vec<(SizedObu, Vec<u8>)>> = Lazy::new(|| {
vec![
(
SizedObu {
obu_type: ObuType::TemporalDelimiter,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 0,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0001_0010, 0b0000_0000],
),
(
SizedObu {
obu_type: ObuType::Padding,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 10,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0111_1010, 0b0000_1010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
),
(
SizedObu {
obu_type: ObuType::SequenceHeader,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0000_1110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
(
SizedObu {
obu_type: ObuType::Frame,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0011_0110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
]
});
#[test]
fn test_parse_rtp_obu() {
for (idx, (sized_obu, raw_bytes)) in (*OBUS).iter().enumerate() {
println!("running test {idx}...");
let mut reader = BitReader::endian(Cursor::new(&raw_bytes), BigEndian);
let obu_parsed = SizedObu::parse(&mut reader).unwrap();
assert_eq!(&obu_parsed, sized_obu);
if let Some(seq_header_obu_bytes) = read_seq_header_obu_bytes(raw_bytes).unwrap() {
println!("validation of sequence header obu read/write...");
assert_eq!(&seq_header_obu_bytes, raw_bytes);
}
}
}
#[test]
fn test_read_seq_header_from_bitstream() {
let mut bitstream = Vec::new();
let mut seq_header_bytes_raw = None;
for (obu, raw_bytes) in (*OBUS).iter() {
bitstream.extend(raw_bytes);
if obu.obu_type == ObuType::SequenceHeader {
seq_header_bytes_raw = Some(raw_bytes.clone());
}
}
let seq_header_obu_bytes = read_seq_header_obu_bytes(&bitstream).unwrap().unwrap();
assert_eq!(seq_header_obu_bytes, seq_header_bytes_raw.unwrap());
}
}

View file

@ -7,8 +7,6 @@
// SPDX-License-Identifier: MPL-2.0
//
use std::path::Path;
use gst::prelude::*;
use gst_pbutils::prelude::*;
@ -22,57 +20,33 @@ fn init() {
});
}
struct Pipeline(gst::Pipeline);
impl std::ops::Deref for Pipeline {
type Target = gst::Pipeline;
#[test]
fn test_basic() {
init();
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Drop for Pipeline {
fn drop(&mut self) {
let _ = self.0.set_state(gst::State::Null);
}
}
struct Pipeline(gst::Pipeline);
impl std::ops::Deref for Pipeline {
type Target = gst::Pipeline;
impl Pipeline {
fn into_completion(self) {
self.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in self.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Drop for Pipeline {
fn drop(&mut self) {
let _ = self.0.set_state(gst::State::Null);
}
self.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
}
fn test_basic_with(video_enc: &str, audio_enc: &str, cb: impl FnOnce(&Path)) {
let Ok(pipeline) = gst::parse::launch(&format!(
"videotestsrc num-buffers=99 ! {video_enc} ! mux. \
audiotestsrc num-buffers=140 ! {audio_enc} ! mux. \
isomp4mux name=mux ! filesink name=sink"
)) else {
println!("could not build encoding pipeline");
return;
let pipeline = match gst::parse::launch(
"videotestsrc num-buffers=99 ! x264enc ! mux. \
audiotestsrc num-buffers=140 ! fdkaacenc ! mux. \
isomp4mux name=mux ! filesink name=sink \
",
) {
Ok(pipeline) => Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap()),
Err(_) => return,
};
let pipeline = Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap());
let dir = tempfile::TempDir::new().unwrap();
let mut location = dir.path().to_owned();
@ -80,95 +54,73 @@ fn test_basic_with(video_enc: &str, audio_enc: &str, cb: impl FnOnce(&Path)) {
let sink = pipeline.by_name("sink").unwrap();
sink.set_property("location", location.to_str().expect("Non-UTF8 filename"));
pipeline.into_completion();
cb(&location)
}
#[test]
fn test_basic_x264_aac() {
init();
test_basic_with("x264enc", "fdkaacenc", |location| {
let discoverer = gst_pbutils::Discoverer::new(gst::ClockTime::from_seconds(5))
.expect("Failed to create discoverer");
let info = discoverer
.discover_uri(
url::Url::from_file_path(location)
.expect("Failed to convert filename to URL")
.as_str(),
)
.expect("Failed to discover MP4 file");
assert_eq!(info.duration(), Some(gst::ClockTime::from_mseconds(3_300)));
let audio_streams = info.audio_streams();
assert_eq!(audio_streams.len(), 1);
let audio_stream = &audio_streams[0];
assert_eq!(audio_stream.channels(), 1);
assert_eq!(audio_stream.sample_rate(), 44_100);
let caps = audio_stream.caps().unwrap();
assert!(
caps.can_intersect(
&gst::Caps::builder("audio/mpeg")
.any_features()
.field("mpegversion", 4i32)
.build()
),
"Unexpected audio caps {caps:?}"
);
let video_streams = info.video_streams();
assert_eq!(video_streams.len(), 1);
let video_stream = &video_streams[0];
assert_eq!(video_stream.width(), 320);
assert_eq!(video_stream.height(), 240);
assert_eq!(video_stream.framerate(), gst::Fraction::new(30, 1));
assert_eq!(video_stream.par(), gst::Fraction::new(1, 1));
assert!(!video_stream.is_interlaced());
let caps = video_stream.caps().unwrap();
assert!(
caps.can_intersect(&gst::Caps::builder("video/x-h264").any_features().build()),
"Unexpected video caps {caps:?}"
);
})
}
#[test]
fn test_roundtrip_vp9_flac() {
init();
test_basic_with("vp9enc ! vp9parse", "flacenc ! flacparse", |location| {
let Ok(pipeline) = gst::parse::launch(
"filesrc name=src ! qtdemux name=demux \
demux.audio_0 ! queue ! flacdec ! fakesink \
demux.video_0 ! queue ! vp9dec ! fakesink",
) else {
panic!("could not build decoding pipeline")
};
let pipeline = Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap());
pipeline
.by_name("src")
.unwrap()
.set_property("location", location.display().to_string());
pipeline.into_completion();
})
}
#[test]
fn test_roundtrip_av1_aac() {
init();
test_basic_with("av1enc ! av1parse", "avenc_aac ! aacparse", |location| {
let Ok(pipeline) = gst::parse::launch(
"filesrc name=src ! qtdemux name=demux \
demux.audio_0 ! queue ! avdec_aac ! fakesink \
demux.video_0 ! queue ! av1dec ! fakesink",
) else {
panic!("could not build decoding pipeline")
};
let pipeline = Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap());
pipeline
.by_name("src")
.unwrap()
.set_property("location", location.display().to_string());
pipeline.into_completion();
})
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in pipeline.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
drop(pipeline);
let discoverer = gst_pbutils::Discoverer::new(gst::ClockTime::from_seconds(5))
.expect("Failed to create discoverer");
let info = discoverer
.discover_uri(
url::Url::from_file_path(&location)
.expect("Failed to convert filename to URL")
.as_str(),
)
.expect("Failed to discover MP4 file");
assert_eq!(info.duration(), Some(gst::ClockTime::from_mseconds(3_300)));
let audio_streams = info.audio_streams();
assert_eq!(audio_streams.len(), 1);
let audio_stream = &audio_streams[0];
assert_eq!(audio_stream.channels(), 1);
assert_eq!(audio_stream.sample_rate(), 44_100);
let caps = audio_stream.caps().unwrap();
assert!(
caps.can_intersect(
&gst::Caps::builder("audio/mpeg")
.any_features()
.field("mpegversion", 4i32)
.build()
),
"Unexpected audio caps {caps:?}"
);
let video_streams = info.video_streams();
assert_eq!(video_streams.len(), 1);
let video_stream = &video_streams[0];
assert_eq!(video_stream.width(), 320);
assert_eq!(video_stream.height(), 240);
assert_eq!(video_stream.framerate(), gst::Fraction::new(30, 1));
assert_eq!(video_stream.par(), gst::Fraction::new(1, 1));
assert!(!video_stream.is_interlaced());
let caps = video_stream.caps().unwrap();
assert!(
caps.can_intersect(&gst::Caps::builder("video/x-h264").any_features().build()),
"Unexpected video caps {caps:?}"
);
}

Some files were not shown because too many files have changed in this diff Show more