Compare commits

...

482 commits
0.12.4 ... main

Author SHA1 Message Date
Jordan Petridis
b5e0e0713c ci: Set build jobs according to FDO_CI_CONCURRENT when available
The CI runners run multiple jobs concurrently, and as such we should
only be building with the number of jobs allocated to us when the
variable is set.

Related to https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/-/merge_requests/1545

Also move the rest of the common flags we pass to cargo together so it's
a bit easier to read.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1791>
2024-09-20 15:05:21 +03:00
Jendrik Weise
1ff761e410 fmp4: Add tests for split-at-running-time signal
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1761>
2024-09-20 12:35:24 +03:00
Jendrik Weise
d5a9c7a940 fmp4: Add split-at-running-time signal
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1761>
2024-09-20 12:35:24 +03:00
Mathieu Duponchelle
a85b0cb72e webrtcsrc: expose MSID property on source pad
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1789>
2024-09-20 09:31:57 +03:00
Sebastian Dröge
c9b370a6e4 cea708mux: Fix off-by-one in deciding whether a buffer belongs to this or the next frame
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1774>
2024-09-19 12:05:42 +00:00
Sebastian Dröge
d69d0f8738 cea708mux: Stop with EOS if all pads are EOS instead of continuing forever
Also don't drop buffers if multiple tries are needed for aggregating
because some pads are not ready yet.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1774>
2024-09-19 12:05:42 +00:00
Sebastian Dröge
29b54ed2fc cea708mux: Don't create a separate source pad and actually push gap events downstream
The aggregator base class is already creating the source pad and the
source pad that was created here internally was never added to the
element at all. As such all gap events pushed on it never went anywhere.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1774>
2024-09-19 12:05:42 +00:00
Jan Schmidt
7905626a5f onvifmetadatapay: Set output caps earlier
As soon as input caps arrive, we can set output
caps. This means upstream can send gap events earlier,
before there is any actual metadata to send

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1779>
2024-09-19 20:45:43 +10:00
Mathieu Duponchelle
6a23ae168f webrtcsink: implement mechanism to forward metas over control channel
It may be desirable for the frontend to receive ancillary information
over the control channel.

Such information includes but is not limited to time code metas, support
for other metas (eg custom meta) might be implemented in the future, as
well as downstream events.

This patch implements a new info message, probes buffers that arrive at
nicesink to look up timecode metas and potentially forwards them to the
consumer when the `forward-metas` property is set appropriately.

Internally, a "dye" meta is used to trace the media identifier the
packet we are about to send over relates to, as rtpfunnel bundles all
packets together.

The example frontend code also gets a minor update and now logs info
messages to the console.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1749>
2024-09-19 08:41:47 +00:00
Mathieu Duponchelle
db026ad535 gstwebrtc-api: expose API on consumer-session for munging stereo
We cannot do that by default as this is technically non-compliant,
so we need to expose API to let the user opt into it.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1754>
2024-09-19 07:37:23 +00:00
Sebastian Dröge
aae9d5c0e9 closedcaption: cea608utils: Avoid overflow when deciding which lines to retain
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1776>
2024-09-17 12:00:56 +03:00
Seungha Yang
1675e517b3 hlscmafsink: Add playlist-root-init property
Adding a property to allow setting base path for init fragment to be
written in manifest file

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1773>
2024-09-11 03:36:08 +09:00
Sebastian Dröge
1d20028b00 ci: Ignore test-with update until MSRV can be updated to 1.75+
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1772>
2024-09-10 09:19:18 +03:00
Sebastian Dröge
975e4e157b deny: Add link to issue about replacing sodiumoxide
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1772>
2024-09-10 09:19:18 +03:00
Sebastian Dröge
6a7118e535 deny: Ignore proc-macro-error being unmaintained
See https://github.com/yanganto/test-with/issues/91

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1772>
2024-09-10 09:19:18 +03:00
Sebastian Dröge
7843209692 ci: Ignore bitstream-io update until MSRV can be updated to 1.80+
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1772>
2024-09-10 09:19:18 +03:00
Sebastian Dröge
c505d9a418 Update to async-tungstenite 0.28
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1772>
2024-09-10 09:19:18 +03:00
Sebastian Dröge
0c9fb369d3 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1772>
2024-09-10 09:19:18 +03:00
Jerome Colle
fef6601094 dav1ddec: add properties for film grain synthesis and in-loop filters
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1763>
2024-09-09 14:23:15 +00:00
Sebastian Dröge
24003a79f6 mpegtslivesrc: Make sure to use the object as context for all debug logs
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1767>
2024-09-09 13:29:14 +00:00
Sebastian Dröge
c32cb20906 mpegtslivesrc: Check if old compared to new PCR clock estimation is too far off
It the difference between the two estimations is more than 1s then
consider this a discontinuity too.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1767>
2024-09-09 13:29:14 +00:00
Sebastian Dröge
c5b1ebc7d8 mpegtslivesrc: Fix order of parameters passed to add_observation()
The first one should be the internal time, i.e. the monotonic clock time
in our case, and the second one the external time.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1767>
2024-09-09 13:29:14 +00:00
Sebastian Dröge
44f64fb3f6 mpegtslivesrc: Scale monotonic time on PCR disconts to allow for continuous clock times
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1767>
2024-09-09 13:29:14 +00:00
Sebastian Dröge
453b3014e6 mpegtslivesrc: Set DISCONT flag on buffers at PCR discontinuities
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1767>
2024-09-09 13:29:14 +00:00
Sebastian Dröge
8fd2b7daa4 uriplaylistbin: Properly check for stream-collection/streams-selected events
There is only a new stream-collection event if the media has actually
changed, and that's also the only time in these tests when a
streams-selected event is sent as the default stream selection is always
used and never changed.

Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/597

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1769>
2024-09-09 14:37:48 +03:00
Sebastian Dröge
2e0f52de2c uriplaylistbin: Track actual caller in test assertion functions
Makes it easier to figure out from where it is actually called when the
assertion is failing.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1769>
2024-09-09 14:35:40 +03:00
Sebastian Dröge
7a959b9e9d uriplaylistbin: Properly clean up the pipeline on panics/assertions too
Reduces log noise.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1769>
2024-09-09 14:35:10 +03:00
Sebastian Dröge
a709eb96d9 Fix new Rust 1.81 clippy warnings
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1768>
2024-09-05 22:01:01 +03:00
Sebastian Dröge
295b9f01c2 ndisrc: Use correct receive time to re-initialize time tracking on disconts
The base receive time should not be the monotonic system clock time, but
the monotonic system clock time adjusted by the current clock calibration.
For the first time this is equivalent as the clock calibration is the default,
but for further discontinuities it is not and would cause a
discontinuity in the clock times at this point.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1766>
2024-09-05 10:18:48 +00:00
Mathieu Duponchelle
bfc32cc692 net/aws: fix spurious dispatch failures
Since https://github.com/awslabs/aws-sdk-rust/discussions/956, the AWS
SDK errors out HTTP streams that do not transfer data for more than 5
seconds.

This probably should be an opt-in bhevior as it clearly not generically
useful, but as it is we need to opt out.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1760>
2024-09-05 07:43:23 +00:00
Mathieu Duponchelle
65508cfe75 net/aws: don't discard errors from transcribe loop
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1760>
2024-09-05 07:43:23 +00:00
Arun Raghavan
e72db57179 webrtc: Fix whipclientsink name in README
The element name was changed, but the documentation wasn't updated to
match.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1764>
2024-09-03 16:44:19 -04:00
Sebastian Dröge
871756bb70 ndisrc: Reset timestamp tracking if remote time goes backwards
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1727>
2024-08-29 20:53:13 +03:00
Sebastian Dröge
ee4416ee5f ndisrc: Add a clocked timestamp mode that provides a clock that follows the remote timecodes
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1727>
2024-08-29 20:53:13 +03:00
Sebastian Dröge
ab3db748be ndisrc: Get rid of unnecessary AtomicRefCell dependency
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1727>
2024-08-29 16:32:51 +00:00
Sebastian Dröge
0c4ec370cf ndisrc: Remove slope workaround in timestamping code
This was needed for an old version of the NDI HX Camera iOS application
and is fixed since quite a while. Let's get rid of unnecessarily
complicated code.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1727>
2024-08-29 16:32:51 +00:00
Sebastian Dröge
57821cade4 ndisrc: Only calculate timecode/timestamp mappings if necessary
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1727>
2024-08-29 16:32:51 +00:00
Sebastian Dröge
04da3b2047 ndisrc: receiver: Improve debug message when receiving frames
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1727>
2024-08-29 16:32:51 +00:00
Sebastian Dröge
84fef267b5 ndisrc: receiver: Remove some code duplication
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1727>
2024-08-29 16:32:51 +00:00
Sebastian Dröge
f2658eb773 ndisrc: Move from start/stop to change_state for slight code simplification
All state change related code is in a single place now.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1727>
2024-08-29 16:32:51 +00:00
Seungha Yang
0135c4251c transcriberbin: Fix passthrough state change
Sync state of child bin appropriately when passthrough is disabled

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1758>
2024-08-28 05:51:26 +00:00
Sebastian Dröge
c57b6c9c0a Update CHANGELOG.md for 0.13.1 2024-08-27 22:00:48 +03:00
Sebastian Dröge
3c38ed7ff0 deny: Update
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1757>
2024-08-27 21:10:48 +03:00
Sebastian Dröge
fc29ff7d8b hlssink3: Update to sprintf 0.2
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1757>
2024-08-27 21:06:52 +03:00
Sebastian Dröge
52895223a8 fmp4: Update to dash-mpd 0.17
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1757>
2024-08-27 21:02:43 +03:00
Sebastian Dröge
7e9a6d6893 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1757>
2024-08-27 20:58:48 +03:00
Mathieu Duponchelle
2f9bb62b6b gstwebrtc-api: create control data channel when offering
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1755>
2024-08-27 07:52:12 +02:00
Sanchayan Maity
f0d42b88cb Add s3putobjectsink to README
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1550>
2024-08-26 19:56:34 +00:00
Sanchayan Maity
f3206c2e1a aws: Add next-file support to putobjectsink
Add `next-file` support to `awss3putobjectsink` on similar lines to
the `next-file` support in `multifilesink`.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1550>
2024-08-26 19:56:34 +00:00
Sanchayan Maity
d274caeb35 whepsrc: Fix incorrect default caps
add-transceiver needs application/x-rtp caps and not raw caps. We were
providing raw caps which is incorrect.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1748>
2024-08-26 19:44:37 +05:30
Mathieu Duponchelle
66727188cf net/aws: fix sanity check in transcribe loop
When we receive a new alternative we want to avoid iterating out of
bounds, but the comparison between the current index and the length of
the alternative should not log an error when partial_index == length, as
Vec::drain(length..) is valid, and it is completely valid for AWS to
send us a new alternative with as many items as we have already
dequeued.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1751>
2024-08-26 11:37:08 +02:00
Sebastian Dröge
7e912b0dde video/gtk4: Clean up imports a bit
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1708>
2024-08-23 08:11:15 +00:00
Sebastian Dröge
3fd3a32b6f video/gtk4: Clean up GStreamer GL context initialization
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1708>
2024-08-23 08:11:15 +00:00
Jordan Petridis
4f01421cc4 video/gtk4: Make feature "v4_6" the default assumption
It's our minimum gtk4 version we target anyway so we
can remove some of the extra features in the toml and require
it by default.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1708>
2024-08-23 08:11:15 +00:00
Jordan Petridis
ff5032934d video/gtk4: Remove some indentation in frame.rs
We have less cfg features now and don't need the extra
brackets to guard the codepaths.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1708>
2024-08-23 08:11:15 +00:00
Jordan Petridis
602760d0d8 video/gtk4: Remove cfg flag for gst-gl
GTK 4 hard depends on GL on all platforms, and now both
windows and macos have codepaths for the paintable sink to
produce GLTextures.

This we can now drop the cfg build flag we have making it optional.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1708>
2024-08-23 08:11:15 +00:00
Sanchayan Maity
320f36a462 hlssink3: Use fragment duration from splitmuxsink if available
splitmuxsink now reports fragment offset and duration in the
splitmuxsink-fragment-closed message. Use this duration value
for the MediaSegment when available.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1728>
2024-08-22 15:13:21 +00:00
Mathieu Duponchelle
4cf93ccbdb net/webrtc: Add missing npm command to README
Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/589

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1746>
2024-08-22 15:46:28 +02:00
Sebastian Dröge
87b72f768b livesync: Report latency query results when handling latency query too
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1739>
2024-08-22 07:33:47 +00:00
Jerome Colle
dee0e32dde webrtcsink: add nvv4l2av1enc support
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1735>
2024-08-22 06:41:52 +00:00
Mathieu Duponchelle
8ad882bed5 gstwebrtc-api: address issues raised by mix matrix support
1c48d7065d was mistakenly merged too
early, and there were concerns about the implementation and API design:

The fact that the frontend had to expose a text area specifically for
sending over a mix matrix, and had to manually edit in floats into the
stringified JSON was suboptimal.

Said text area was always present even when remote control was not
enabled.

The sendControlRequest API was made more complex than needed by
accepting an optional stringifier callback.

This patch addresses all those concerns:

The deserialization code in webrtcsink is now made more clever and
robust by first having it pick a numerical type to coerce to when
deserializing arrays with numbers, then making sure it doesn't allow
mixed types in arrays (or arrays of arrays as those too must share
the same inner value type).

The frontend side simply sends over strings wrapped with a request
message envelope to the backend.

The request text area is only shown when remote control is enabled.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1725>
2024-08-22 05:54:46 +00:00
Piotr Brzeziński
c4bcdea830 hlscmafsink: Add new-playlist signal
Allows you to switch output between folders without having to state change to READY to close the current playlist.
Closes the current playlist immediately and starts a new one at the currently set location.
Should be used after changing the relevant location properties.
Makes use of the send-headers signal in cmafmux.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1692>
2024-08-22 02:06:51 +00:00
Piotr Brzeziński
798936afc9 cmafmux: Add send-headers signal
Forces cmafmux to output headers for the init segment again, alongside the next chunk.
Needed for hlscmafsink to support changing output paths on the fly, without going back to READY.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1692>
2024-08-22 02:06:51 +00:00
Piotr Brzeziński
ad0a23fee7 cmafmux: Add opus support
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1692>
2024-08-22 02:06:51 +00:00
Jordan Petridis
c43fc2d576 ci: Pin the windows jobs to gstreamer runners for now
There seem to be some seemingly random failures if the
jobs get scheduled on the other windows runners,
and they need to be debugged further.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1736>
2024-08-22 01:17:18 +00:00
Jordan Petridis
b9fcb99cd4 ci: Update the .cargo/config file
```
warning: `/builds/alatiera/gst-plugins-rs/.cargo/config` is deprecated in favor of `config.toml`
```

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1736>
2024-08-22 01:17:18 +00:00
Jordan Petridis
b4f22a52ff ci: Add a default retry policy for jobs
Automatically retry if it's a system failure or similar

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1736>
2024-08-22 01:17:18 +00:00
Mathieu Duponchelle
5dc2d56c0e webrtcsink: store mids per-session instead of globally
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1730>
2024-08-21 21:20:40 +00:00
Mathieu Duponchelle
16ee51621e webrtcsink: fix segment format mismatch with remote offer
webrtcsink was starting the negotiation process on Ready and concurrently
moving the consumer pipeline to Playing, but when answering the remote
description was set so fast that input streams were connected (and the time
format set on appsrc) before the state change to Paused had completed.

This meant gst_base_src_start was happening after that and setting the format
back to bytes, the time segment that was next coming in then caused:

basesrc gstbasesrc.c:4255:gst_base_src_push_segment:<video_0> segment format mismatched, ignore

And the consumer pipeline errored out.

The same issue existed in theory when webrtcsink was creating the offer,
but was much harder to trigger as it required that the remote answer
came in before the state change to Paused had completed.

This commit fixes the issue by simply waiting for the state to have
changed to Paused before negotiating.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1730>
2024-08-21 21:20:40 +00:00
Piotr Brzeziński
b6406013c5 hlssink3: Fix racy test by separating events (signals) from bus messages
Was regularly failing on the CI. Bus messages are handled async here, so they need to be tracked separately.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1737>
2024-08-21 19:49:09 +00:00
Mathieu Duponchelle
170e769812 audio: add speechmatics transcriber
Element implemented around the Speechmatics API:

<https://docs.speechmatics.com/rt-api-ref>

The element also comes with translation support, and offers a similar
interface to the one exposed by `awstranscriber`.

The Speechmatics service has good accuracy, and can be deployed on
premises, offering an advantage over AWS transcribe.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1665>
2024-08-21 17:43:02 +00:00
Jordan Petridis
4f69dcd210 ci: Remove leftover scripts
Both of these have been moved in the main image for a while now

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1733>
2024-08-21 06:44:52 +00:00
Piotr Brzeziński
982a9a9aea hlssink3: Post hls-segment-added message
Posts a simple 'hls-segment-added' message with the segment location, start running time and duration.
With hlssink2, it was possible to catch 'splitmuxsink-fragment-closed', but since hlssink3 doesn't forward that message
(and hlscmafsink doesn't even use that mux), the new one was added to allow for listening for new fragments being added.

I extended the existing tests to check whether this message is posted correctly.
They theoretically only cover hlssink3, but hlscmafsink uses the same base class so it should be alright for now.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1677>
2024-08-20 18:32:59 +00:00
Jordan Petridis
5172e8e520 ci: Use the windows specific image tags
Followup to c5dfc87953

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1731>
2024-08-20 17:21:20 +03:00
Sebastian Dröge
eb0a44fe67 ndisrc: Move timestamp handling from demuxer to source
This allows putting correct timestamps on buffers coming out of the
source already instead of leaving them unset until the demuxer.

And also calculate timestamps for metadata buffers.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1718>
2024-08-16 06:07:35 +00:00
Mathieu Duponchelle
1c48d7065d gstwebrtc-api example: add support for requesting mix matrix
This is one example of how a consumer might send over custom upstream
event requests to the producer.

As webrtcsink will deserialize numbers in priority as integers, we need
a custom stringifying function to ensure members of the matrix array are
indeed serialized with the floating point.

An optional stringifier parameter is thus added to the
sendControlRequest API.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1711>
2024-08-15 15:42:04 +00:00
Mathieu Duponchelle
01e28ddfe2 webrtcsink: implement generic data channel control mechanism ..
.. and deprecate data channel navigation in favor of it.

A new property, "enable-data-channel-control" is exposed, when set to
TRUE a control data channel is offered, over which can be sent typed
upstream events.

This means further upstream events will be usable, for now only
navigation and custom upstream events are handled.

In addition, send response messages to notify the consumer of whether
its requests have been handled.

In the future this can also be extended to allow the consumer to send
queries, or seek events ..

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1711>
2024-08-15 15:42:04 +00:00
Tim-Philipp Müller
0a4dc29efe ci: tag cerbero trigger job as placeholder job 2024-08-14 17:23:59 +01:00
Jordan Petridis
086281b03d ci: Update ci-template sha
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1721>
2024-08-14 18:23:48 +03:00
Mathieu Duponchelle
0a6963f7ce gstwebrtc-api: example: use http by default
That way the webpage connects with ws:/ to the signaller.

Fixes: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/589
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1704>
2024-08-14 14:10:04 +00:00
Sebastian Dröge
102185d09d mpegtslivesrc: Handle PCR discontinuities as errors for now
More work is needed to make this work seemlessly and right now it would
simply cause invalid timestamps to be created.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1717>
2024-08-14 12:34:18 +00:00
Sebastian Dröge
ede82ca5b4 hlssink3: Don't use is-live=true
This sometimes produces imperfect timestamps that cause the fragment
duration to be slightly different than expected.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1716>
2024-08-14 13:05:40 +03:00
Tim-Philipp Müller
e21f341a03 ci: set cerbero trigger job timeout to 4h
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1716>
2024-08-13 20:34:17 +01:00
Guillaume Desmottes
72e53b9f16 videofx: update image and image_hasher deps
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1707>
2024-08-13 07:21:59 +00:00
Guillaume Desmottes
ea29052c39 cdg: update to image 0.25
I just published a new cdg_renderer release depending of image 0.25.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1707>
2024-08-13 07:21:59 +00:00
Jordan Petridis
3e97fef6ce ci: Generate html and cobertura coverage with a single command
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1709>
2024-08-13 06:41:17 +00:00
Sebastian Dröge
bc930122ba webrtcsrc: Make sure to always call end_session() without the state lock
This was already done in another place for the same reason: preventing a
deadlock. It's probably not correct as hinted by the FIXME comment but
better than deadlocking at least.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1701>
2024-08-13 06:04:09 +00:00
Mathieu Duponchelle
0da1c8e9c9 webrtcsink: fix assertions when finalizing
Dumping the pipeline on state changes from an async bus handler was
triggering criticals.

Instead, dump from the sync handler.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1706>
2024-08-12 09:13:06 +02:00
Sebastian Dröge
30a5987c9e rtp: mp4gpay: Don't set seqnum-base on the caps
This is supposed to be set by another layer, e.g. rtspsrc.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1693>
2024-08-10 08:06:40 +00:00
Sebastian Dröge
de42ae432c rtp: basepay: Fix off-by-one with seqnum-offset
Setting a seqnum-offset of 1 would've caused the first packet to have a
seqnum of 2 instead of 1.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1693>
2024-08-10 08:06:40 +00:00
Sebastian Dröge
c5163a73ee rtp: basepay: Don't negotiate twice in the beginning
If srcpad caps are already set as part of sinkpad caps handling, unset
the reconfigure flag so negotiation does not happen yet another time on
the first buffer.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1693>
2024-08-10 08:06:40 +00:00
Sebastian Dröge
31e836f4d6 rtp: basepay: Negotiate SSRC and PT with downstream if not set via property
This makes the new payloaders closer to the old ones, and makes usage in
webrtcbin easier.

Also properly configure default PT of subclasses. Previously any PT that
was set for these subclasses via g_object_new() would be overridden by
the default one during construction.

Additionally, do SSRC collision handling while queueing output packets.
This is the more natural place as that's where the SSRC is actually
used, it happens potentially earlier and also allows to drain any
pending packets before the SSRC change in the caps.

Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/557

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1693>
2024-08-10 08:06:40 +00:00
Sebastian Dröge
914ffc8be9 rtp: basepay: Initialize class fields
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1693>
2024-08-10 08:06:40 +00:00
Sebastian Dröge
c554a5dc76 rtp: basepay: Don't unset stats on FlushStop
They are still valid and unsetting them here would cause no stats to
ever be updated again until the next state change.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1693>
2024-08-10 08:06:40 +00:00
Sebastian Dröge
035a199109 rtp: basepay: Don't use suggested SSRC on collissions if it's the current one
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1693>
2024-08-10 08:06:40 +00:00
Mathieu Duponchelle
9080c90120 net/webrtc: add support for answering to webrtcsink
Support was added to the base class when the AWS KVS signaller was
implemented, but the default signaller still only supported the case
where the producer was creating the offer.

Also extend the javascript API

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1702>
2024-08-09 14:02:48 +02:00
Mathieu Duponchelle
a9ff9615ff net/webrtc: correct signaller debug category
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1702>
2024-08-08 18:28:43 +02:00
Mathieu Duponchelle
64f0b76f71 webrtc: update README with section on embedded signalling / web services
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1671>
2024-08-08 16:40:46 +02:00
Mathieu Duponchelle
9455e09d9f webrtcsink: expose properties for running web server
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1671>
2024-08-08 16:40:46 +02:00
Mathieu Duponchelle
b709c56478 webrtcsink: expose properties for running signalling server
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1671>
2024-08-07 19:55:00 +02:00
Sebastian Dröge
6c04b59454 webrtcsrc: Don't hold the state lock while removing sessions
Removing a session can drop its bin and during release of the bin its
pads are removed, but the pad-removed handler is also taking the state
lock.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1695>
2024-08-07 09:35:15 +00:00
Sebastian Dröge
ec38d416aa fmp4mux: Remove _ prefix of actually used parameter
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1694>
2024-08-07 11:16:51 +03:00
Sebastian Dröge
9006a47e9b mp4mux: added image orientation tag support
Based on a patch by sergey radionov <rsatom@gmail.com>

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1694>
2024-08-07 11:16:25 +03:00
Guillaume Desmottes
cfe9968a77 gtk4: add custom widget automatically updating the window size
Use it in the example and debug window but let's not make it public yet.
Plan is to have a proper bin on top of gtk4paintablesink at some point.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1680>
2024-08-06 10:29:41 +00:00
Guillaume Desmottes
17910dd532 gtk4: add window-{width,height} property
Allow the application to pass the actual rendering size so overlays can
be rendered accordingly.

Fix #562

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1680>
2024-08-06 10:29:41 +00:00
Sebastian Dröge
ba0265970e deny: Update
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1691>
2024-08-06 09:10:08 +03:00
Sebastian Dröge
df330093d5 deny: Update to new configuration format
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1691>
2024-08-06 09:05:44 +03:00
Sebastian Dröge
b83b6031e5 Update etherparse and async-tungstenite dependencies
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1691>
2024-08-06 09:00:32 +03:00
Sebastian Dröge
184778d087 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1691>
2024-08-06 08:57:31 +03:00
Dave Lucia
3a949db720 net/webrtc: Fix turn-servers nick: user -> use
Noticed this typo

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1690>
2024-08-05 12:38:51 -04:00
Guillaume Desmottes
2333b241f0 gtk4: log paintable size in snapshot
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1689>
2024-08-05 15:53:19 +02:00
Sebastian Dröge
fa060b9fa0 Fix various 1.80 clippy warnings
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1688>
2024-08-05 14:14:17 +03:00
Jordan Petridis
1316b821c4 video/gtk4: Move the dmabuf cfg to the correct bracket level
This was defined one bracket above, which was causing the
gst-gl codepath below to also be disabled when there was
no dmabuf feature enabled.

This was also resulting in the following warning as
we were never creating the MappedFrame::GL vartiant due to this

```
warning: unused variable: `wrapped_context`
   --> video/gtk4/src/sink/frame.rs:541:85
    |
541 | ...", feature = "gst-gl"))] wrapped_context: Option<
    |                             ^^^^^^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_wrapped_context`
    |
    = note: `#[warn(unused_variables)]` on by default

warning: variant `GL` is never constructed
  --> video/gtk4/src/sink/frame.rs:80:5
   |
74 | enum MappedFrame {
   |      ----------- variant in this enum
...
```

Move the cfg to the appropriate place where it encaplsulates only
the dmabuf related code.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1682>
2024-08-01 15:44:58 +03:00
Thibault Saunier
a05ab37b49 tracers: Add a tracer that dumps data flow into .pcap files
See documentation for more details

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/879>
2024-07-31 20:27:27 +00:00
Mathieu Duponchelle
86039dd5c1 webrtc-api example: do not rely on webpack / npm proxying websocket
Instead simply use the desired address directly from the reference
example, this makes it work out of the box without placing expectations
on the web server.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1674>
2024-07-30 16:29:54 +00:00
Mathieu Duponchelle
79657e5671 transcriberbin: fix inspect with missing elements
Relax the dependency on `awstranscriber` by still building the initial
state when it is absent, this also means an alternative transcriber can
be linked even when `awstranscriber` was not available during
construction.

Also fix property getter / setters to avoid unwrapping the pad state,
and bubble up channel bin construction errors instead of unwrapping (eg
when textwrap was not available).

Fixes: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/584
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1679>
2024-07-29 08:38:36 +00:00
Sebastian Dröge
380448587b gtk4: Enable GtkGraphicsOffload::black-background property when building with GTK 4.16
This allows offloading in more situations.

Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/576

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1673>
2024-07-18 12:28:20 +03:00
Loïc Le Page
5a1d12419f gstwebrtc-api: always include index file in dist for convenience
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1670>
2024-07-17 08:27:31 +00:00
François Laignel
34b791ff5e webrtc: add raw payload support
This commit adds support for raw payloads such as L24 audio to `webrtcsink` &
`webrtcsrc`.

Most changes take place within the `Codec` helper structure:

* A `Codec` can now advertise a depayloader. This also ensures that a format
  not only can be decoded when necessary, but it can also be depayloaded in the
  first place.
* It is possible to declare raw `Codec`s, meaning that their caps are compatible
  with a payloader and a depayloader without the need for an encoder and decoder.
* Previous accessor `has_decoder` was renamed as `can_be_received` to account
  for codecs which can be handled by an available depayloader with or without
  the need for a decoder.
* New codecs were added for the following formats:
  * L24, L16, L8 audio.
  * RAW video.

The `webrtc-precise-sync` examples were updated to demonstrate streaming of raw
audio or video.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1501>
2024-07-16 19:32:02 +00:00
Sebastian Dröge
9c84748fc3 gopbuffer: Use workspace dependency for gst-plugin-version-helper 2024-07-16 19:13:49 +03:00
Sebastian Dröge
d4d02d70a8 rtp: Require bitstream-io < 2.4.0
Version 2.4.0 contains a breaking change that it shouldn't, and updating
to 2.4.0 requires a newer Rust version.

See https://github.com/tuffy/bitstream-io/issues/22
2024-07-16 19:13:49 +03:00
Sebastian Dröge
d20ffd5d39 Update CHANGELOG.md for 0.13.0 2024-07-16 19:13:49 +03:00
sergey radionov
fdfa3a33d9 fmp4mux: added image orientation tag support
fix #565

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1669>
2024-07-16 18:49:07 +07:00
Taruntej Kanakamalla
3a8462367e threadshare: udpsrc: add buffer-size property
Use buffer-size to set the receive buffer size
on the socket

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1636>
2024-07-15 12:13:41 +00:00
Taruntej Kanakamalla
276ec91cb2 threadshare: udpsrc: add loop property to set multicast loopback
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1636>
2024-07-15 12:13:41 +00:00
François Laignel
6e9855c36b webrtcsink: fix property types for rav1enc
Fixes: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/572
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1667>
2024-07-12 18:59:20 +02:00
François Laignel
000c486568 rav1enc: document bitrate property unit
See:

e34e772e47/src/rate.rs (L365)

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1667>
2024-07-12 18:59:17 +02:00
Sanchayan Maity
12be9a24a6 net/quinn: Fix generation of self signed certificate
The certificate chain was incorrectly being passed the private key instead
of certificate. With rustls 0.23.11 version, this error was being caught
and reported. As stated in the 0.23.11 release, it has a new feature

"API for determining whether a CertifiedKey's certificate and private key
matches: keys_match(). This is called from existing fallible functions
that accept a private key and certificate (for example, with_single_cert())
so these functions now detect this misconfiguration."

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1666>
2024-07-12 12:26:54 +05:30
Sebastian Dröge
797dd3f3ca Update version to 0.14.0-alpha.1
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1663>
2024-07-11 20:00:24 +03:00
Sebastian Dröge
a8ccfe49d9 webrtc: Require livekit-protocol < 0.3.4 due to uncoordinated breaking changes
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1663>
2024-07-11 20:00:24 +03:00
Sebastian Dröge
73fa904a7b Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1663>
2024-07-11 20:00:24 +03:00
Robert Mader
c7ef8e8185 gtk4: Use scale instead of rotate where possible
In order to make it easier for GTK4 to figure out that the resulting
operation is 2D and - crucially - can get offloaded to Wayland.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1662>
2024-07-10 10:58:20 +02:00
Robert Mader
2238db2005 gtk4: Support RGBx formats in SW paths
GTK4 has matching enums and thus should handle them fine. Further more
it should allow renderers to reduce memory bandwidth by applying
occlusion culling.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1660>
2024-07-09 16:53:01 +02:00
Sebastian Dröge
3609411801 gtk4: Invalidate paintable size if changing because of orientation changes
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1659>
2024-07-08 14:49:43 +03:00
Sebastian Dröge
98b28d69ce Update for new debug log macro syntax
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1658>
2024-07-08 11:25:23 +03:00
Sebastian Dröge
f88f5b03c4 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1658>
2024-07-08 10:58:14 +03:00
Sebastian Dröge
4123b5d1a1 mpegtslive: Update for gst::Clock::set_calibration() API changes
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1658>
2024-07-08 09:59:06 +03:00
Sebastian Dröge
8522c8a445 gtk4: Add support for rotations / flipping
Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/284

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1590>
2024-07-07 07:43:49 +00:00
Sanchayan Maity
2fe852166e aws/s3hlssink: Do not call abort before finishing uploads
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1653>
2024-07-06 14:44:08 +00:00
Sebastian Dröge
6e974cf4b9 gtk4: Document paintable properties correctly
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1655>
2024-07-06 11:36:55 +00:00
Sebastian Dröge
195c089f18 gtk4: Declare correct default value for force-aspect-ratio property
It's defaulting to false as generally keeping the aspect ratio is the
job of the widget layout and not the paintable.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1654>
2024-07-06 13:41:44 +03:00
Artem Martus
ac0e24b2bd tutorial-1: Fix broken links for struct references
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1652>
2024-07-04 15:01:22 +00:00
Sebastian Dröge
4ab8d92f28 mpegtslivesrc: Don't skip the first MPEG-TS packet
If every buffer contains only a single MPEG-TS packet we would otherwise
skip over everything and would never observe a PCR.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1651>
2024-07-04 17:01:43 +03:00
Sebastian Dröge
c701aa6f84 audioloudnorm: Fix limiter buffer index wraparound off-by-one for the last buffer
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1649>
2024-07-02 19:31:11 +03:00
Sebastian Dröge
bd2a039c8d livesync: Use the actual output buffer duration of gap filler buffers
Otherwise the following can happen:

  - 25fps stream
  - buffer with PTS 0ms, duration 20ms arrives, is output
  - buffer with PTS 40ms, duration 20ms arrives
  - is considered early because 20ms < 40ms
  - filler buffer with PTS 20ms and 40ms duration is output
  - buffer with PTS 40ms is output

After this change no filler would be inserted because the gap is smaller
than the duration of a filler buffer.

Also, previously the 40ms duration would be used if a filler was
previously output because in that case the cached output buffer duration
would've already been patched from 20ms to 40ms.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1647>
2024-07-02 17:15:58 +03:00
Philippe Normand
eee93aea52 rtp2: Fix typo on auto-header-extension property name
The rtp (de)pay elements use auto-header-extension so the new elements should do
the same.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1646>
2024-07-02 09:35:39 +01:00
Edward Hervey
95ae67752f net: New mpegtslive element
This element allows wrapping an existing live "mpeg-ts source" (udpsrc,
srtsrc,...) and providing a clock based on the actual PCR of the stream.

Combined with `tsdemux ignore-pcr=True` downstream of it, this allows playing
back the content at the same rate as the (remote) provider **and** not modify
the original timestamps.

Co-authored-by: Sebastian Dröge <slomo@coaxion.net>
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1640>
2024-07-01 15:29:22 +02:00
Mathieu Duponchelle
0ef886ea16 transcriberbin: fix internal ghost pad name regression
As part of https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1593
source pad names on inner transcription bins were appended a suffix, but
other pieces of the code were not updated to account for that.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1645>
2024-07-01 11:47:39 +02:00
leonardo salvatore
f303992e0c webrtcsink: initial support for vpuenc_h264 encoder for imx8mp, default values set to cover a common streaming scenario
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1639>
2024-07-01 07:34:04 +00:00
Jordan Petridіs
718e757669 video/gtk4: Dehardcode module name in the Flatpak example in the readme 2024-06-29 15:56:06 +00:00
Mathieu Duponchelle
f0df6874d8 transcriberbin: fix property proxying
As part of https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1546
the element started implementing the GstChildProxy interface in order to
expose properties on its sink pads, but the implementation was
incorrect and broke proxying to children elements.

In addition, an intermediary bin was introduced with no name, making it
hard to set the properties of the inner elements through the child
proxy interface, it is now named according to the name of the pad it
corresponds to.

Finally, the default transcriber is back to being named "transcriber".

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1642>
2024-06-28 14:24:08 +00:00
Sebastian Dröge
960529d90d livesync: Add sync property for allowing to output buffers as soon as they arrive
By default livesync will wait for each buffer on the clock. If sync is
set to false, it will output buffers immediately once they're available
and only waits on the clock for outputting gap filler buffers.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1635>
2024-06-26 16:21:42 +00:00
Sebastian Dröge
bbf131086a livesync: Synchronize on the first buffer too
Previously the first buffer would be output immediately and
synchronization would only happen from the second buffer onwards.
This would mean that the first buffer would potentially be output too
early.

Instead, if there is no known output timestamp yet but a buffer with a
timestamp, first of all take its start as the initial output timestamp
and synchronize on that buffer.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1635>
2024-06-26 16:21:42 +00:00
Sebastian Dröge
7caf6b2073 livesync: Use let-else in a few more places
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1635>
2024-06-26 16:21:41 +00:00
Sebastian Dröge
505fab2e1c livesync: Allow queueing up to latency buffers
This was already reported by the latency query, and not doing this would
require to always put a queue before livesync.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1635>
2024-06-26 16:21:41 +00:00
Guillaume Desmottes
a10577b42c aws: log error if sink failed to start
I find it confusing that the element was failing without reporting any
error in its logs.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1638>
2024-06-26 11:22:54 +02:00
Guillaume Desmottes
0ecbd3f953 aws: use DisplayErrorContext when displaying SDK errors
As suggested in the aws crate documentation, wrap SDK errors with
DisplayErrorContext so their Display implementation outputs the full
context.

Improve error display from "dispatch failure" to

"dispatch failure: io error: error trying to connect: dns error: failed
to lookup address information: Name or service not known: dns error:
failed to lookup address information: Name or service not known: failed
to lookup address information: Name or service not known
(DispatchFailure(DispatchFailure { source: ConnectorError { kind: Io,
source: hyper::Error(Connect, ConnectError(\"dns error\", Custom { kind:
Uncategorized, error: \"failed to lookup address information: Name or
service not known\" })), connection: Unknown } }))"

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1638>
2024-06-26 10:47:10 +02:00
Guillaume Desmottes
3b7b2cd37b aws: rely on WaitError Display implementation
The Display implementation of WaitError already displays the underlying
SDK error and the metadata, so can just use that.

Will also be used to provide more context in the next patch.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1638>
2024-06-26 10:46:46 +02:00
Sanchayan Maity
0bd98e2c34 net/quinn: Allow dropping buffers when buffer size exceeds maximum datagram size
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1613>
2024-06-25 20:15:40 +05:30
Sanchayan Maity
e00ebca63f net/quinn: Add stats property for connection statistics
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1613>
2024-06-25 20:15:40 +05:30
Sanchayan Maity
2b35f009fb net/quinn: Update quinn to 0.11.2
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1613>
2024-06-25 20:15:40 +05:30
Sanchayan Maity
cf7172248c net/quinn: Allow setting some parameters from TransportConfig
As of now, we expose the below four properties from `TransportConfig`.
- Initial MTU
- Minimum MTU
- Datagram receive buffer size
- Datagram send buffer size

Maximum UDP payload size from `EndpointConfig` and upper bound from
`MtuDiscoveryConfig` are also exposed as properties.

See the below documentation for further details.
- https://docs.rs/quinn/latest/quinn/struct.TransportConfig.html
- https://docs.rs/quinn/latest/quinn/struct.MtuDiscoveryConfig.html
- https://docs.rs/quinn/latest/quinn/struct.EndpointConfig.html

While at it, also clean up passing function parameters to the functions
in utils.rs.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1613>
2024-06-25 20:15:40 +05:30
Sanchayan Maity
bc5ed023e4 net/quinn: Improve datagram handling
We now check if the peer actually supports Datagram and refusing to
proceed if it does not. Since the datagram size can actually change
over the lifetime of a connection according to variation in path MTU
estimate, also check buffer size before trying to send.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1613>
2024-06-25 20:15:40 +05:30
Sebastian Dröge
dbad98132f deny: Add another override for livekit 2024-06-25 10:58:56 +03:00
Matthew Waters
39b61195ad rtprecv: ensure that stopping the rtp src task does not critical
When pad a released, then we were removing the pad from an internal
list. If the pad was not already deactivated, the deactiviation would
attempt to look for the pad in that list and panic if it was not there.

Fix by delaying removal of the pad from the list until after pad
deactivation occurs.

Also includes test.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1618>
2024-06-24 13:13:28 +00:00
Matthew Waters
10a31a397e rtp/recv: support pushing buffer lists from the jitterbuffer
Multiple concurrent buffers produced by the jitterbuffer will be
combined into a single buffer list which will be sent downstream.

Events or queries that interrupt the buffer flow will cause a split in
the output buffer list.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1618>
2024-06-24 13:13:28 +00:00
Matthew Waters
d036abb7d2 rtp/recv: support buffers lists on rtp sink pad
In one case, improves throughput by 25% when buffer lists are used.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1618>
2024-06-24 13:13:28 +00:00
Matthew Waters
df4a4fb2ef rtp/send: support receiving buffer lists
Can reduce processing overhead if many buffers are pushed concurrently.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1618>
2024-06-24 13:13:28 +00:00
Matthew Waters
2d1f556794 rtp/session: guard against a busy wait with no members
If the number of members is 0, then the calculated time to the next rtcp
wakup would be 'now' and could result in a busy loop in the rtcp
processing.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1618>
2024-06-24 13:13:28 +00:00
Matthew Waters
84a9f9c61f rtp/source: use extended sequence number helper
Instead of rolling our own

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1618>
2024-06-24 13:13:28 +00:00
Sebastian Dröge
47d62b6d78 Update for new clone/closure macro syntax
Also fix various weak/strong references in the webrtc plugin, and make
sure to pass the object to debug log functions in every place.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1625>
2024-06-21 11:54:58 +03:00
Sebastian Dröge
90e926def4 deny: Add override for older system-deps version
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1625>
2024-06-21 11:07:22 +03:00
Nirbheek Chauhan
345edeb947 meson: Printing a list is only available with meson >=1.3
Fixes https://gitlab.freedesktop.org/tpm/gstreamer/-/jobs/60108579

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1632>
2024-06-20 14:54:11 +05:30
Sebastian Dröge
a7764ff033 Update CHANGELOG.md for 0.12.7
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1631>
2024-06-19 21:03:33 +03:00
Sebastian Dröge
9b323a6519 Use Option::is_some_and(...) instead of Option::map_or(false, ...)
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1630>
2024-06-19 13:03:37 +00:00
Sebastian Dröge
23d998a1db Slightly improve code making use of element factories retrieved from an element
We can use `is_some_and(...)` instead of `map_or(false, ...)`.

Also in a few places the factory was retrieved multiple times, one time
with unwrapping and another time with handling the `None` case
correctly. Instead of unwrapping, move code to handle the `None` case.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1630>
2024-06-19 13:03:37 +00:00
Arun Raghavan
8f96509f03 aws: s3: Enable tests again
We lost the environment variable checks during the addition of the
putobjectsink tests, which caused failures on MR branches.

It would be nicer to use some other mechanism to validate the tests can
run, so we don't count on only the environmnent, but for now this will
have to do.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1629>
2024-06-18 11:58:43 -04:00
Sebastian Dröge
59bada0a9f deny: Add another override for hermit-abi
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1622>
2024-06-18 10:27:27 +03:00
Sebastian Dröge
743ab29ba8 Update Cargo.lock and MSRV to 1.71
cea608-types requires that now because it updated the env_logger
dependency. As a result, we can also update it here now.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1622>
2024-06-18 10:27:27 +03:00
Sebastian Dröge
970d1c9afd Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1622>
2024-06-18 10:27:27 +03:00
Sebastian Dröge
5aedcab32f Revert "aws: s3: Re-enable tests"
This reverts commit b4b56eb282.
The tests are still failing.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1624>
2024-06-18 08:50:07 +03:00
Sebastian Dröge
4677948a82 rtp: av1pay: Derive Default trait for the state instead of manual implementation
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1624>
2024-06-18 08:07:24 +03:00
Sebastian Dröge
d357a63bf9 rtp: av1pay: Correctly use N flag for marking keyframes
The "first packet of a coded video sequence" means that this should be
the first packet of a keyframe that comes together with a sequence
header, not the first packet of a new frame.

Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/558

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1624>
2024-06-18 08:06:59 +03:00
Sebastian Dröge
5cd9e34265 rtp: av1pay: Correctly skip over ignored OBUs
The reader is already after the header at this point so only the OBU
content has to be skipped.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1624>
2024-06-18 08:06:59 +03:00
Sebastian Dröge
bbe38b9599 rtp: av1: Drop padding OBUs too like Chrome does
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1624>
2024-06-18 08:06:59 +03:00
Arun Raghavan
b4b56eb282 aws: s3: Re-enable tests
These seem to have stopped working due to bad/rotated creds. Should work
fine now.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1623>
2024-06-17 06:08:18 -04:00
Sebastian Dröge
343680ffea rtp: av1depay: Don't return an error if parsing a packet fails
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1612>
2024-06-14 13:13:21 +00:00
Sebastian Dröge
477855789d rtp: av1depay: Also log warnings on errors
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1612>
2024-06-14 13:13:21 +00:00
Sebastian Dröge
93c9821cba rtp: av1depay: Drop unusable packets as early as possible
Otherwise they would pile up until a discontinuity or until we can
actually output something.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1612>
2024-06-14 13:13:21 +00:00
Sebastian Dröge
0ca4a3778a rtp: av1depay: Parse internal size fields of OBUs and handle them
They're not recommended by the spec to include in the RTP packets but it
is valid to include them. Pion is including them.

When parsing the size fields also make sure to only take that much of a
payload unit and to skip any trailing data (which should not exist in
the first place).

Pion is also currently storing multiple OBUs in a single payload unit,
which is not allowed by the spec but can be easily handled with this
code now.

Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/560

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1612>
2024-06-14 13:13:21 +00:00
Sebastian Dröge
69c3c2ae46 Fix various new clippy 1.79 warnings
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1620>
2024-06-14 08:33:49 +03:00
Sanchayan Maity
cd47bf2f04 threadshare: Handle end of stream for sources
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1581>
2024-06-12 18:15:31 +05:30
Nirbheek Chauhan
6538803cf6 meson: Handle features needed only by examples separately
Currently we incorrectly require gtk4 to build the fallbackswitch, livesync,
togglerecord plugins when the examples option is allowed.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1604>
2024-06-10 18:50:05 +00:00
Nirbheek Chauhan
4c9ed330c8 meson: Actually build plugin examples
This broke in 8b5a398135.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1604>
2024-06-10 18:50:05 +00:00
Nirbheek Chauhan
7f16fd7736 meson: Fix gtk4 plugin build on linux
dmabuf feature needs the wayland feature too.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1604>
2024-06-10 18:50:05 +00:00
Nirbheek Chauhan
3e4330686f meson: Only enable gtk4 examples when gtk4 is found
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1604>
2024-06-10 18:50:05 +00:00
Nirbheek Chauhan
3b6832724f meson: Only enable the gtk4 plugin when deps are found
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1604>
2024-06-10 18:50:05 +00:00
Nirbheek Chauhan
968e0fddb9 meson: Fix plugin requirement checking and add logging
We were silently skipping plugins that didn't find a required feature,
even if the plugin option was enabled.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1604>
2024-06-10 18:50:05 +00:00
Nirbheek Chauhan
39f466f2c6 meson: Fix typo in gstreamer-gl dep fetching
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1604>
2024-06-10 18:50:05 +00:00
Nirbheek Chauhan
4eed615871 meson: Make gstreamer-gl dependency optional
Minimal systems like docker containers may not have GL

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1604>
2024-06-10 18:50:05 +00:00
Sebastian Dröge
3d4d785a2a webrtchttp: Fix race condition when unlocking
It would be possible that there is no cancellable yet when unlock() is
called, then a new future is executed and it wouldn't have any
information that it is not supposed to run at all.

To solve this remember if cancellation should happen and reset this
later.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1602>
2024-06-10 07:38:29 +00:00
Sebastian Dröge
51f6d3986f aws: Fix race condition when unlocking
It would be possible that there is no cancellable yet when unlock() is
called, then a new future is executed and it wouldn't have any
information that it is not supposed to run at all.

To solve this remember if unlock() was called and reset this in
unlock_stop().

Also implement actual unlocking in s3hlssink.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1602>
2024-06-10 07:38:29 +00:00
Sebastian Dröge
00aaecad07 quinn: Fix race condition when unlocking
It would be possible that there is no cancellable yet when unlock() is
called, then a new future is executed and it wouldn't have any
information that it is not supposed to run at all.

To solve this remember if unlock() was called and reset this in
unlock_stop().

Also actually implement unlock() / unlock_stop() for the sink, and don't
cancel in stop() as unlock() / unlock_stop() would've been called before
that already.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1602>
2024-06-10 07:38:29 +00:00
Sebastian Dröge
c42040fbb8 spotifyaudiosrc: Fix race condition when unlocking
It would be possible that there is no cancellable yet when unlock() is
called, then the setup task is started and it would simply run and being
waited on instead of not being run at all.

To solve this, remember if unlock() was called and reset this in
unlock_stop().

Also make sure to not keep the abort handle locked while waiting,
otherwise cancellation would never actually work.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1602>
2024-06-10 07:38:29 +00:00
Sebastian Dröge
9945b702b8 reqwesthttpsrc: Fix race condition when unlocking
It would be possible that there is no cancellable yet when unlock() is
called, then a new future is executed and it wouldn't have any
information that it is not supposed to run at all.

To solve this remember if unlock() was called and reset this in
unlock_stop().

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1602>
2024-06-10 07:38:29 +00:00
Sebastian Dröge
f68655b5e2 Update for gst::BufferList API changes
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1610>
2024-06-08 09:58:10 +03:00
Sebastian Dröge
aaccc6e7f1 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1610>
2024-06-07 20:23:13 +03:00
Jordan Petridis
f30cb2b56c video/gtk4: Add --features to the flatpak example
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1608>
2024-06-07 10:54:05 +00:00
Mathieu Duponchelle
7cec628c43 transcriberbin: make sure to always record pad property changes
When the pad isn't parented yet we should still record user choices,
either in our settings or in our state.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1593>
2024-06-06 15:42:21 +00:00
Mathieu Duponchelle
0e85973e94 transcriberbin: fix regression with > 1 translation languages
By making sure to expose uniquely named pads on the inner transcription
bins.

Fixes: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/552
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1593>
2024-06-06 15:42:21 +00:00
Sebastian Dröge
30252a1b2e ndi: Add support for loading NDI SDK v6
The library name and environment variable name have changed but the ABI
is completely compatible.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1607>
2024-06-06 14:51:09 +00:00
Matthew Waters
1e964233c6 ci: run tests with RUST_BACKTRACE=1
Produces backtraces which would allow some initial debugging on hard to
find issues.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1606>
2024-06-06 14:02:55 +00:00
Angelo Verlain
c9ac553cfe gtk4: update flatpak integration code
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1600>
2024-06-06 13:08:19 +00:00
Matthew Waters
260b04a1cf rtpbin2: protoct against adding with overflow
If jitter is really bad, then this calculation may overflow.  Protect
against that.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1605>
2024-06-06 11:43:26 +00:00
Sebastian Dröge
ba70bb1154 deny: Add override for older tungstenite
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1603>
2024-06-06 10:34:12 +00:00
Sebastian Dröge
85c38107cf webrtc: Update to async-tungstenite 0.26
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1603>
2024-06-06 10:34:12 +00:00
Sanchayan Maity
8171a00943 net/quinn: Fix pad template naming typo
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1601>
2024-06-05 13:44:40 +05:30
Tim-Philipp Müller
ab2f5e3d8d rtp: ac3: add some unit tests
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1586>
2024-06-01 12:43:27 +00:00
Tim-Philipp Müller
2b68920f82 rtp: tests: add possibility to make input live
.. for payloaders that behave differently with live
and non-live inputs (e.g. audio payloaders which by
default will pick different aggregation modes based
on that.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1586>
2024-06-01 12:43:27 +00:00
Tim-Philipp Müller
6597ec84eb rtp: tests: add possibility to check duration of depayloaded buffers
.. and clarify an expect panic message.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1586>
2024-06-01 12:43:27 +00:00
Tim-Philipp Müller
6b628485c5 rtp: Add AC-3 RTP payloader/depayloader
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1586>
2024-06-01 12:43:27 +00:00
Tamas Levai
802ff6a67c net/quinn: Make QUIC role configurable
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1575>
2024-05-31 23:20:38 +02:00
Francisco Javier Velázquez-García
8fc652f208 webrtcsink: Refactor value retrieval to avoid lock poisoning
When setting an incorrect property name in settings,
start_stream_discovery_if_needed would panic because it attempts to
unwrap a poisoned lock for settings.

This refactor avoids that situation.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1594>
2024-05-31 08:10:23 +00:00
Francisco Javier Velázquez-García
568e8533fa webrtcsink: Fix typo in property name for av1enc
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1594>
2024-05-31 08:10:23 +00:00
Sebastian Dröge
91bc39367b deny: Add another override for librespot for nix 2024-05-31 10:06:14 +03:00
Arun Raghavan
04e9e5284c webrtc: signaller: A couple of minor doc fixups
The expectation is `Returns:`, not `Return:`

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1525>
2024-05-30 22:16:46 +03:00
Arun Raghavan
1c54c77840 webrtcsink: Add a mechanism for SDP munging
Unfortunately, server implementations might have odd SDP-related quirks,
so let's allow clients a way to work around these oddities themselves.
For now, this means that a client can fix up the H.264 profile-level-id
as required by Twitch (whose media pipeline is more permissive than the
WHIP implementation).

Fixes: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/516
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1525>
2024-05-30 22:16:46 +03:00
Taruntej Kanakamalla
83f76280f5 net/webrtc: Example for whipserver
rudimentary sample to test multiple WHIP client connections

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1339>
2024-05-29 21:03:27 +00:00
Taruntej Kanakamalla
712d4757c3 net/webrtc/whip_signaller: multiple client support in the server
- generate a new session id for every new client
use the session id in the resource url

- remove the producer-peer-id property in the WhipServer signaler as it
is redundant to have producer id in a session having only one producer

- read the 'producer-peer-id' property on the signaller conditionally
if it exists else use the session id as producer id

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1339>
2024-05-29 21:03:27 +00:00
Taruntej Kanakamalla
de726ca8d2 net/webrtc: multi producer support in webrtcsrc
- Add a new structure Session
  - manage each producer using a session
  - avoid send EOS when a session terminates, instead keep running
    waiting for any new producer to connect

- Maintain a bin element per session
  - each session bin encapsulates webrtcbin and the decoder if needed
    as well as the parser and filter if requested by the application
    (through request-encoded-filter)
  - this will be helpful to cleanup the session's respective elements
    when the corresponding producer terminates the session

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1339>
2024-05-29 21:03:27 +00:00
Seungha Yang
ebdcc403cf transcriberbin: Fix mux-method=cea708
* Update "translation-languages" property to include G_PARAM_CONSTRUCT
so that it can be applied to initial state.

* Change default "translation-languages" value to be None instead of
cea608 specific one. Transcriberbin will be able to configure initia
state depending on selected mux method if "translation-languages" is
unspecified.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1589>
2024-05-30 04:40:09 +09:00
Matthew Waters
45800d7636 tttocea708: ensure periodic sync points in roll up mode
Otherwise, without the relevant DefineWindow, then a receiver cannot
begin to display the captions from the middle of a stream.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1591>
2024-05-29 11:15:10 +00:00
Sebastian Dröge
a7418fb483 rtp: Use released version of rtcp-types 2024-05-29 10:30:40 +03:00
Matthew Waters
df32e1ebfa rtpsend: ensure only a single rtcp pad push
Otherwise, it can occur that multiple rtcp packets may be produced out
of order.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Matthew Waters
525179f666 rtpbin2: handle ssrc collisions
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Nirbheek Chauhan
9485265769 rtspsrc2: Update rtpbin2 support to use rtprecv and rtpsend
USE_RTPBIN2 is now USE_RTP2 because there is no "rtpbin2" now.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Matthew Waters
1600d3b055 rtpbin2: split send and receive halves into separate elements
There is now two elements, rtpsend and rtprecv that represent the two
halves of a rtpsession.  This avoids the potential pipeline loop if two
peers are sending/receiving data towards each other.  The two halves can
be connected by setting the rtp-id property on each element to the same
value and they will behave like a combined rtpbin-like element.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Matthew Waters
0121d78482 rtpbin2: expose session signals for new/bye ssrc
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Matthew Waters
d480c6c2d3 rtpbin2/config: add stats to session GObject
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Matthew Waters
7d5789032a rtpbin2/config: add a new-ssrc signal
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Matthew Waters
06f40e72cb rtpbin2: implement a session configuration object
Currently only contains pt-map

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Matthew Waters
48e7a2ed06 jitterbuffer: handle flush-start/stop
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Matthew Waters
66306e32f2 jitterbuffer: remove mpsc channel for every packet
It is very slow.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Mathieu Duponchelle
327f563e80 jitterbuffer: implement support for serialized events / queries
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Mathieu Duponchelle
74ec83a0ff rtpbin2: implement and use synchronization context
Co-authored-by: Sebastian Dröge <sebastian@centricular.com>
Co-Authored-By: Matthew Waters <matthew@centricular.com>
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 19:58:09 +10:00
Mathieu Duponchelle
1865899621 rtpbin2: implement jitterbuffer
The jitterbuffer implements both reordering and duplicate packet
handling.

Co-Authored-By: Sebastian Dröge <sebastian@centricular.com>
Co-Authored-By: Matthew Waters <matthew@centricular.com>
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 17:35:41 +10:00
Sebastian Dröge
2b4ec75bc5 rtpbin2: Add support for receiving rtcp-mux packets
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 17:35:41 +10:00
Sebastian Dröge
e09ad990fa rtpbin2: Implement support for reduced size RTCP (RFC 5506)
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 17:35:41 +10:00
Sebastian Dröge
1e4a966c92 rtpbin2: Add support for sending NACK/PLI and FIR
Co-Authored-By: Matthew Waters <matthew@centricular.com>
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 17:35:41 +10:00
Sebastian Dröge
66c9840ad8 rtpbin2: Add handling for receiving NACK/PLI and FIR
Co-Authored-By: Matthew Waters <matthew@centricular.com>
Co-Authored-By: Mathieu Duponchelle <mathieu@centricular.com>
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 17:35:41 +10:00
Matthew Waters
2c86f18a99 rtpbin2: add support for RFC 4585 (RTP/AVPF)
Implements the timing rules for RTP/AVPF

Co-Authored-By: Sebastian Dröge <sebastian@centricular.com>
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 17:35:41 +10:00
Matthew Waters
27ad26c258 rtp: Initial rtpbin2 element
Can receive and recevie one or more RTP sessions containing multiple
pt/ssrc combinations.

Demultiplexing happens internally instead of relying on separate
elements.

Co-Authored-By: François Laignel <francois@centricular.com>
Co-Authored-By: Mathieu Duponchelle <mathieu@centricular.com>
Co-Authored-By: Sebastian Dröge <sebastian@centricular.com>
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1426>
2024-05-28 17:35:41 +10:00
Sebastian Dröge
984a9fe5ff rtp: Don't restrict payload types for payloaders
WebRTC uses payload types 35-63 as dynamic payload types too to be able
to place more codec variants into the SDP offer.

Instead of allowing just certain payload types, completely remove any
restrictions and let the user decide. There's technically nothing wrong
with using any payload type, especially when using the encoding-name.

Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/551

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1587>
2024-05-27 09:34:16 +00:00
Liam
b4fd6cf362 aws: Add system-defined metadata options to both sinks
Add to awss3sink and awss3putobjectsink elements the following
paramerters which are set on the uploaded S3 objects:

* cache-control;
* content-encoding; and
* content-language

Bugfix: Set the content-type and content-disposition values in the S3
putobject call. Previously the params were defined on the element but
unused.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1585>
2024-05-27 10:25:22 +03:00
Tim-Philipp Müller
4f74cb7958 rtp: klv: add test for fragmented payloads with packet loss
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1580>
2024-05-26 12:34:44 +03:00
Tim-Philipp Müller
b6e24668a7 rtp: klv: add unit test with some packet loss
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1580>
2024-05-26 12:34:44 +03:00
Tim-Philipp Müller
92a1e222f4 rtp: tests: add functionality to drop RTP packets after payloading
Add ExpectedPacket::drop() to flag RTP packets that should not
be forwarded to the depayloader.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1580>
2024-05-26 12:34:44 +03:00
Tim-Philipp Müller
de71e9dadd rtp: tests: print rtp timestamp mismatch minus the initial offset
Unit tests specify a 0-based offset, so printing that plus the
random initial offset on failure is just needlessly confusing,
so subtract the initial offset when printing expected/actual
values. The real values are still printed as part of the assert.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1580>
2024-05-26 12:34:44 +03:00
Tim-Philipp Müller
be7da027f8 rtp: klv: add some basic tests
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1580>
2024-05-26 12:34:44 +03:00
Tim-Philipp Müller
1e33926dc5 fixup: klv payloader indentation
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1580>
2024-05-26 12:34:44 +03:00
Tim-Philipp Müller
c2f67bd3c9 fixup: klv depay: debug log indentation
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1580>
2024-05-26 12:34:44 +03:00
Tim-Philipp Müller
e7d0e0702a fixup: payloader
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1580>
2024-05-26 12:34:44 +03:00
Tim-Philipp Müller
566e6443f4 rtp: Add KLV RTP payloader/depayloader
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1580>
2024-05-25 20:21:50 +03:00
François Laignel
4259d284bd webrtc: add android webrtcsrc example
This commit adds an Android `webrtcsrc` based example with the following
features:

* A first view allows retrieving the producer list from the signaller (peer ids
  are uuids which are too long to tap, especially using an onscreen keyboard).
* Selecting a producer opens a second view. The first available video stream is
  rendered on a native Surface. All the audio streams are rendered using
  `autoaudiosink`.

Available Settings:

* Signaller URI.
* A toggle to prefer hardware decoding for OPUS, otherwise the app defaults to
  raising `opusdec`'s rank. Hardware decoding was moved aside since it was found
  to crash the app on all tested devices (2 smartphones, 1 tv).

**Warning**: in order to ease testing, this demonstration application enables
unencrypted network communication. See `AndroidManifest.xml`.

The application uses the technologies currenlty proposed by Android Studio when
creating a new project:

* Kotlin as the default language, which is fully interoperable with Java and
  uses the same SDK.
* gradle 8.6.
* kotlin dialect for gradle. The structure is mostly the same as the previously
  preferred dialect, for which examples can be found online readily.
* However, JNI code generation still uses Makefiles (instead of CMake) due to
  the need to call [`gstreamer-1.0.mk`] for `gstreamer_android` generation.
  Note: on-going work on that front:
  - https://gitlab.freedesktop.org/gstreamer/cerbero/-/merge_requests/1466
  - https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/6794

Current limitations:

* x86 support is currently discarded as `gstreamer_android` libs generation
  fails (observed with `gstreamer-1.0-android-universal-1.24.3`).
* A selector could be added to let the user chose the video streams and
  possibly decide whether to render all audio streams or just select one.

Nice to have:

* Support for the synchronization features of the `webrtc-precise-sync-recv`
  example (NTP clock, RFC 7273).
* It could be nice to use Rust for the specific native code.

[`gstreamer-1.0.mk`]: https://gitlab.freedesktop.org/gstreamer/cerbero/-/blob/main/data/ndk-build/gstreamer-1.0.mk

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1578>
2024-05-24 16:14:13 +00:00
Sebastian Dröge
58e91c154c rtp: basedepay: Reset last used ext seqnum on discontinuities
The ext seqnum counting is reset too so keeping the old one around will
cause problems with timestamping of the next outgoing buffer.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1584>
2024-05-24 10:23:06 +03:00
Sebastian Dröge
28bd6f07a2 Update CHANGELOG.md for 0.12.6 2024-05-23 17:27:21 +03:00
Sebastian Dröge
b1ad123595 gtk4: Fix Python example in the non-GL code path 2024-05-23 16:15:52 +03:00
cdelguercio
c99cabfbc5 webrtcsink: Add VP9 parser after the encoder for VP9 too
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1572>
2024-05-23 10:16:59 +03:00
cdelguercio
f5a7de9dc3 webrtcsink: Support av1 via nvav1enc, av1enc, and rav1enc
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1572>
2024-05-23 10:16:59 +03:00
Sebastian Dröge
b12da2c543 deny: Update with itertool 0.12 override
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1579>
2024-05-20 14:36:39 +03:00
Sebastian Dröge
02cd2c42fd Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1579>
2024-05-20 14:33:23 +03:00
Sebastian Dröge
dcc0b47349 rtp: basepay: Fix header extension negotiation
Only configure header extensions from the source pad caps if they exist
already in the source pad caps, otherwise the configuration will fail.
Extensions that are added via the signals might not exist in the source
pad caps yet and would be added later.

Also, if configuring an existing extension from the new caps fails,
remove it and try to request a new extension for it.

Additionally don't remove extensions from the caps that can't be
provided. No header extensions for them would be added to the packets,
but that's not a problem. Removing them on the other hand would cause
negotiation to fail. This only affects extensions that are already
included in the caps.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1577>
2024-05-20 07:53:50 +00:00
Sebastian Dröge
0d33077df6 rtp: basedepay: Clean up header extension negotiation
If configuring an existing extension from the new caps fails, remove it
and try to request a new extension for it.

Also remove all extensions from the list that are not provided in the
caps, instead of passing RTP packets to all of them anyway.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1577>
2024-05-20 07:53:50 +00:00
Tim-Philipp Müller
16608d2541 rtp: opus: add multichannel depay/pay test
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1571>
2024-05-18 09:29:29 +00:00
Tim-Philipp Müller
bab3498c6a rtp: opus: add multichannel pay/depay test
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1571>
2024-05-18 09:29:29 +00:00
Tim-Philipp Müller
72006215cb rtp: tests: add run_test_pipeline_full() that checks output caps too
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1571>
2024-05-18 09:29:29 +00:00
Tim-Philipp Müller
10e0294d5a rtp: opus: fix payloader caps query handling and add tests
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1571>
2024-05-18 09:29:29 +00:00
Tim-Philipp Müller
61523baa7b rtp: opus: add minimal depayload / re-payload test
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1571>
2024-05-18 09:29:29 +00:00
Tim-Philipp Müller
6f871e6ce2 rtp: opus: add simple payload / depayload test
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1571>
2024-05-18 09:29:29 +00:00
Tim-Philipp Müller
92c0cf1285 rtp: opus: add test for payloader dtx packet handling
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1571>
2024-05-18 09:29:29 +00:00
Tim-Philipp Müller
2585639054 rtp: Add Opus RTP payloader/depayloader
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1571>
2024-05-18 09:29:29 +00:00
Sebastian Dröge
0215339c5a Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1576>
2024-05-17 07:50:51 +00:00
Sebastian Dröge
539000574b aws: Update to base32 0.5
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1576>
2024-05-17 07:50:51 +00:00
Robert Ayrapetyan
bac5845be1 webrtc: add support for insecure tls connections
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1553>
2024-05-16 19:34:57 +00:00
Mathieu Duponchelle
c282bc1bca examples/dash_vod: compare durations to the millisecond
Otherwise when the segment durations aren't as clean cut as in the
example, multiple segments with the exact same duration in milliseconds
will get output, even though they could have been repeated.

Fix this so that people copying this code don't encounter the bug.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1574>
2024-05-15 06:28:14 +00:00
Martin Nordholts
9a7f37e2b7 rtpgccbwe: Support linear regression based delay estimation
In our tests, the slope (found with linear regression) on a
history of the (smoothed) accumulated inter-group delays
gives a more stable congestion control. In particular,
low-end devices becomes less sensitive to spikes in
inter-group delay measurements.

This flavour of delay based bandwidth estimation with Google
Congestion Control is also what Chromium is using.

To make it easy to experiment with the new estimator, as
well as add support for new ones in the future, also add
infrastructure for making delay estimator flavour selectable
at runtime.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1566>
2024-05-14 16:25:48 +00:00
Martin Nordholts
71e9c2bb04 rtpgccbwe: Also log self.measure in overuse_filter()
Also log `self.measure` in overuse_filter() since tracking
`self.measure` over time help a lot in making sense of
`self.estimate` (and `amplified_estimate`).

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1566>
2024-05-14 16:25:48 +00:00
Martin Nordholts
d9aa0731f4 rtpgccbwe: Rename variable t to amplified_estimate
We normally multiply `self.estimate` with `MAX_DELTAS` (60).
Rename the variables that holds the result of this
calculation to `amplified_estimate` to make the distinction
clearer.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1566>
2024-05-14 16:25:48 +00:00
Sebastian Dröge
49d3dd17a2 gtk4: Clean up Python example
It's not more or less equivalent to the Rust example.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1573>
2024-05-13 10:06:32 +03:00
Tamas Levai
71cd80f204 net/quinn: Enable client to keep QUIC conn alive
Co-authored-by: Felician Nemeth <nemethf@tmit.bme.hu>
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1568>
2024-05-11 08:51:00 +02:00
Rafael Caricio
5549dc7a15 fmp4mux: Support AV1 packaging in the fragmented mp4 plugin
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1544>
2024-05-10 20:59:49 +00:00
Sebastian Dröge
613ed56675 webrtcsink: Add a custom signaller example in Python
This re-implements the default webrtcsink/src signalling protocol in
Python for demonstration purposes.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1569>
2024-05-10 15:59:12 +00:00
Martin Nordholts
a719cbfcc6 rtp: Change RtpBasePay2::ssrc_collision from AtomicU64 to Option<u32>
Rust targets without support for `AtomicU64` is still
somewhat common. Running

    git grep -i 'max_atomic_width: Some(32)' | wc -l

in the Rust compiler repo currently counts to 34 targets.

Change the `RtpBasePay2::ssrc_collision` from `AtomicU64` to
`Mutex<Option<u32>>`. This way we keep support for these
targets.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1562>
2024-05-10 14:23:41 +00:00
7d75e263f8 fmp4mux: Add language from tags
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1557>
2024-05-10 13:01:58 +00:00
Martin Nordholts
aabb011f5a rtpgccbwe: Log effective bitrate in more places
While monitoring and debugging rtpgccbwe, it is very helpful
to get continuous values of what it considers the effective
bitrate. Right now such prints will stop coming once the
algorithm stabilizes. Print it in more places so it keeps
coming. Use the same format to make it simpler to extract
the values by parsing the logs.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1567>
2024-05-10 11:56:51 +00:00
Martin Nordholts
e845e3575c rtpgccbwe: Add mising 'ps' suffix to 'kbps' of 'effective bitrate'
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1567>
2024-05-10 11:56:51 +00:00
Sebastian Dröge
f265c3197b Update plugins cache JSON for new CI GStreamer version
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1570>
2024-05-10 14:14:51 +03:00
Sebastian Dröge
e8e173d0d0 webrtc: Update Signallable interface to new interface definition API
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1570>
2024-05-10 14:13:55 +03:00
Sebastian Dröge
f842aff6df Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1570>
2024-05-10 14:09:27 +03:00
Sebastian Dröge
7e09481adc rtp: Add JPEG RTP payloader/depayloader
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1543>
2024-05-10 11:12:49 +03:00
Sebastian Dröge
1b48fb7ae7 deny: Re-add rustls override because the AWS SDK still uses an old version 2024-05-10 09:41:19 +03:00
Sanchayan Maity
fe55acb4c9 net/hlssink3: Refactor out HlsBaseSink & hlscmafsink from hlssink3
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1564>
2024-05-09 21:50:32 +05:30
Tamas Levai
fe3607bd14 net/quinn: Remove dependency locks
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1565>
2024-05-09 16:45:38 +02:00
Tamas Levai
5884c00bd0 net/quinn: Improve stream shutdown process
Co-authored-by: Sanchayan Maity <sanchayan@asymptotic.io>
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1565>
2024-05-09 16:43:26 +02:00
Tamas Levai
13c3db7857 net/quinn: Port to quinn 0.11 and rustls 0.23
Co-authored-by: Felician Nemeth <nemethf@tmit.bme.hu>
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1565>
2024-05-09 13:49:33 +02:00
Martin Nordholts
2b7488a4c8 rtpgccbwe: Log delay and loss target bitrates separately
When debugging rtpgccbwe it is helpful to know if it is
delay based or loss based band-width estimation that puts a
bound on the current target bitrate, so add logs for that.

To minimize the time we need to hold the state lock, perform
the logging after we have released the state lock.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1561>
2024-05-08 19:12:44 +00:00
Sebastian Dröge
b4576a0074 gtk4: Fix description of the plugin
A paintable is not a widget and that aspect does not belong in the short
description anyway.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1563>
2024-05-07 20:21:03 +03:00
Mathieu Duponchelle
8861fc493b webrtcsink: improve error when no discovery pipeline runs
If for instance no encoder was found or the RTP plugin was missing,
it is possible that no discovery pipeline will run for a given stream.

Provide a more helpful error message for that case.

Fixes: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/534
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1560>
2024-05-06 11:39:48 +00:00
Sanchayan Maity
2bfb6ee016 Add quinn to default-members
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1558>
2024-05-02 16:39:29 +00:00
Sanchayan Maity
edd7c258c8 Add quinn plugin to README
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1558>
2024-05-02 16:39:29 +00:00
Sanchayan Maity
3a3cec96ff net/quinn: Add pipeline example
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1558>
2024-05-02 16:39:29 +00:00
Sanchayan Maity
80f8664564 net/quinn: Use camel case acronym
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1558>
2024-05-02 16:39:29 +00:00
Sebastian Dröge
be3ae583bc Fix new Rust 1.78 clippy warnings
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1559>
2024-05-02 18:36:23 +03:00
Sebastian Dröge
58106a42a9 quinn: Fix up dependencies 2024-05-02 09:59:55 +03:00
Sanchayan Maity
096538989b docs: Add documentation for gst-plugin-quinn
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 22:30:23 +05:30
Sanchayan Maity
150ad7a545 net/quinn: Use separate property for certificate & private key file
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 22:30:23 +05:30
Sanchayan Maity
0d2f054c15 Move net/quic to net/quinn
While at it, add this to meson.build.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 22:30:23 +05:30
Sanchayan Maity
18cf5292b7 net/quic: Fix inconsistencies around secure connection handling
This set of changes implements the below fixes:

- Allow certificates to be specified for client/quicsink
- Secure connection being true on server/quicsrc and false on
  client/quicsink still resulted in a successful connection
  instead of server rejecting the connection
- Using secure connection with ALPN was not working

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 18:09:16 +05:30
Sanchayan Maity
97d8a79d36 net/quic: Drop private key type property
Use read_all helper from rustls_pemfile and drop the requirement for the
user having to specify the private key type.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 18:09:16 +05:30
Sanchayan Maity
a306b1ce94 net/quic: Use a custom ALPN string
`h3` does not make sense as the default ALPN, as there likely isn't
going to be a HTTP/3 application layer, especially as our transport
is unidirectional for now. Use a custom string `gst-quinn` for now.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 18:09:16 +05:30
Sanchayan Maity
22c6a98914 net/quic: Rename to quinnquicsink/src
There might be other QUIC elements in the future based on other
libraries. To prevent namespace collision, namespace the elements
with `quinn` prefix.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 18:09:16 +05:30
Sanchayan Maity
8b64c734e7 net/quic: Use separate property for address and port
While at it, do not duplicate call to settings lock in property
getter and setter for every property.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 18:01:49 +05:30
Tamas Levai
befd8d4bd2 net/quic: Allow SSL keylog file for debugging
rustls has a KeyLog implementation that opens a file whose name is
given by the `SSLKEYLOGFILE` environment variable, and writes keys
into it. If SSLKEYLOGFILE is not set, this does nothing.

See
https://docs.rs/rustls/latest/rustls/struct.KeyLogFile.html
https://docs.rs/rustls/latest/rustls/trait.KeyLog.html

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 18:01:49 +05:30
Sanchayan Maity
ce930eab5f net/quic: Allow setting multiple ALPN transport parameters
For reference, see
https://datatracker.ietf.org/doc/html/rfc9000#section-7.4
https://datatracker.ietf.org/doc/html/rfc7301

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 18:01:49 +05:30
Tamas Levai
75b25d011f net/quic: Allow specifying an ALPN transport parameter
See https://datatracker.ietf.org/doc/html/rfc9000#section-7.4.

This controls the Transport Layer Security (TLS) extension for
application-layer protocol negotiation within the TLS handshake.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 18:01:49 +05:30
Sanchayan Maity
953f6a3fd7 net: Add QUIC source and sink
To test, run receiver as

```bash
gst-launch-1.0 -v -e quicsrc caps=audio/x-opus use-datagram=true ! opusparse ! opusdec ! audio/x-raw,format=S16LE,rate=48000,channels=2,layout=interleaved ! audioconvert ! autoaudiosink
```

run sender as

```bash
gst-launch-1.0 -v -e audiotestsrc num-buffers=512 ! audio/x-raw,format=S16LE,rate=48000,channels=2,layout=interleaved ! opusenc ! quicsink use-datagram=true
```

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1036>
2024-05-01 18:01:49 +05:30
Robert Mader
8e675de690 gtk4paintablesink: Add some documentation
And sync with `README.md` in order to make the environment variables
`GST_GTK4_WINDOW` and `GST_GTK4_WINDOW_FULLSCREEN` discoverable - and
because it's generally useful.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1555>
2024-04-30 09:59:49 +03:00
Robert Mader
4326c3bfce gtk4paintablesink: Also create window for gst-play
So it can be easily tested with
```
gst-play-1.0 --videosink=gtk4paintablesink ...
```

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1555>
2024-04-29 22:44:56 +02:00
Robert Mader
47b788d44b gtk4paintablesink: Add env var to fullscreen window
For testing purposes with e.g. gst-launch.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1555>
2024-04-29 20:44:05 +00:00
François Laignel
16b0a4d762 rtp: add mp4gpay
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1551>
2024-04-29 13:33:42 +00:00
François Laignel
b588ee59bc rtp: add mp4gdepay
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1551>
2024-04-29 13:33:42 +00:00
François Laignel
5466cafc24 rtp: add mp4apay
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1551>
2024-04-29 13:33:42 +00:00
François Laignel
812fe0a9bd rtp: add mp4adepay
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1551>
2024-04-29 13:33:42 +00:00
Sebastian Dröge
c55c4ca42a Update CHANGELOG.md for 0.12.5 2024-04-29 13:49:17 +03:00
Sebastian Dröge
83bd7be92a deny: Remove syn override 2024-04-29 11:54:38 +03:00
Sebastian Dröge
70397a9f05 Update CHANGELOG.md for 0.12.4 2024-04-29 11:46:19 +03:00
Maksym Khomenko
a87eaa4b79 hrtfrender: use bitmask, not int, to prevent a capsnego failure
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1549>
2024-04-26 20:24:19 +00:00
Philippe Normand
88cbc93338 dav1ddec: Negotiate bt709 colorimetry when values from seq header are unspecified
With unknown range colorimetry validation would fail in video-info. As our
decoder outputs only YUV formats Bt709 should be a reasonable default.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1548>
2024-04-26 19:35:41 +00:00
Sebastian Dröge
927c3fcdb6 gtk4paintablesink: Update README.md with all the new features
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge
5803904deb gtk4paintablesink: meson: Add auto-detection of GTK4 versions and dmabuf feature
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge
c95e07a897 gtk4paintablesink: Improve scaling logic
If force-aspect-ratio=false then make sure to fully fill the given
width/height with the video frame and avoid rounding errors. This makes
sure that the video is rendered in the exact position selected by the
caller and that graphics offloading is going to work more likely.

In other cases and for all overlays, make sure that the calculated
positions are staying inside (0, 0, width, height) as rendering outside
is not allowed by GTK.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge
b42bd3d026 gtk4paintablesink: Add force-aspect-ratio property like in other video sinks
Unlike in other sinks this defaults to false as generally every user of
GDK paintables already ensures that the aspect ratio is kept and the
paintable is layed out in the most optimal way based on the context.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge
3dd800ac77 gtk4paintablesink: Implement child proxy interface
This allows setting properties on the paintable from gst-launch-1.0.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge
c92462b240 gtk4: Implement support for directly importing dmabufs
Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/441

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1547>
2024-04-26 12:29:10 +03:00
Sebastian Dröge
7573caa8e9 rtpgccbwe: Move away from deprecated time::Instant to std::time::Instant
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1554>
2024-04-25 15:37:28 +03:00
Sebastian Dröge
c12585377c Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1554>
2024-04-25 14:46:45 +03:00
Mathieu Duponchelle
17d7997137 transcriberbin: add support for consuming secondary audio streams
In some situations, a translated alternate audio stream for a content
might be available.

Instead of going through transcription and translation of the original
audio stream, it may be preferrable for accuracy purposes to simply
transcribe the secondary audio stream.

This MR adds support for doing just that:

* Secondary audio sink pads can be requested as "sink_audio_%u"

* Sometimes audio source pads are added at that point to pass through
  the audio, as "src_audio_%u"

* The main transcription bin now contains per-input stream transcription
  bins. Those can be individually controlled through properties on the
  sink pads, for instance translation-languages can be dynamically set
  per audio stream

* Some properties that originally existed on the main element still
  remain, but are now simply mapped to the always audio sink pad

* Releasing of secondary sink pads is nominally implemented, but not
  tested in states other than NULL

An example launch line for this would be:

```
$ gst-launch-1.0 transcriberbin name=transcriberbin latency=8000 accumulate-time=0 \
      cc-caps="closedcaption/x-cea-708, format=cc_data" sink_audio_0::language-code="es-US" \
      sink_audio_0::translation-languages="languages, transcript=cc3"
    uridecodebin uri=file:///home/meh/Music/chaplin.mkv name=d
      d. ! videoconvert ! transcriberbin.sink_video
      d. ! clocksync ! audioconvert ! transcriberbin.sink_audio
      transcriberbin.src_video ! cea608overlay field=1 ! videoconvert ! autovideosink \
      transcriberbin.src_audio ! audioconvert ! fakesink \
    uridecodebin uri=file:///home/meh/Music/chaplin-spanish.webm name=d2 \
      d2. ! audioconvert ! transcriberbin.sink_audio_0 \
      transcriberbin.src_audio_0 ! fakesink
```

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1546>
2024-04-25 11:56:01 +02:00
Sebastian Dröge
66030f36ad tracers: Add a pad push durations tracer
This tracer measures the time it takes for a buffer/buffer list push to return.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1506>
2024-04-17 16:20:43 +03:00
Seungha Yang
b3d3895ae7 cea608overlay: Fix black-background setting
Apply the property to newly created renderer

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1542>
2024-04-15 15:38:31 +00:00
Sebastian Dröge
d6a855ff1b rtp: Add VP8/9 RTP payloader/depayloader
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1487>
2024-04-15 14:03:56 +00:00
François Laignel
542030fd82 webrtcsink: don't panic if input CAPS are not supported
If a user constrained the supported CAPS, for instance using `video-caps`:

```shell
gst-launch-1.0 videotestsrc ! video/x-raw,format=I420 ! x264enc \
    ! webrtcsink video-caps=video/x-vp8
```

... a panic would occur which was internally caught without the user being
informed except for the following message which was written to stderr:

> thread 'tokio-runtime-worker' panicked at net/webrtc/src/webrtcsink/imp.rs:3533:22:
>   expected audio or video raw caps: video/x-h264, [...] <br>
> note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace

The pipeline kept running.

This commit converts the panic into an `Error` which bubbles up as an element
`StreamError::CodecNotFound` which can be handled by the application.
With the above `gst-launch`, this terminates the pipeline with:

> [...] ERROR  webrtcsink net/webrtc/src/webrtcsink/imp.rs:3771:gstrswebrtc::
>   webrtcsink:👿:BaseWebRTCSink::start_stream_discovery_if_needed::{{closure}}:<webrtcsink0>
> Error running discovery: Unsupported caps: video/x-h264, [...] <br>
> ERROR: from element /GstPipeline:pipeline0/GstWebRTCSink:webrtcsink0:
>   There is no codec present that can handle the stream's type. <br>
> Additional debug info: <br>
> net/webrtc/src/webrtcsink/imp.rs(3772): gstrswebrtc::webrtcsink:👿:BaseWebRTCSink::
> start_stream_discovery_if_needed::{{closure}} (): /GstPipeline:pipeline0/GstWebRTCSink:webrtcsink0:
> Failed to look up output caps: Unsupported caps: video/x-h264, [...] <br>
> Execution ended after 0:00:00.055716661 <br>
> Setting pipeline to NULL ...

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1540>
2024-04-14 23:09:09 +02:00
François Laignel
3fc38be5c4 webrtc: add missing tokio feature for precise sync examples
Clippy caught the missing feature `signal` which is used by the WebRTC precise
synchronization examples. When running `cargo` `check`, `build` or `clippy`
without `no-default-dependencies`, this feature was already present due to
dependents crates.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1541>
2024-04-14 16:50:33 +02:00
François Laignel
168af88eda webrtc: add features for specific signallers
When swapping between several development branches, compilation times can be
frustrating. This commit proposes adding features to control which signaller
to include when building the webrtc plugin. By default, all signallers are
included, just like before.

Compiling the `webrtc-precise-sync` examples with `--no-default-features`
reduces compilation to 267 crates instead of 429 when all signallers are
compiled in.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1539>
2024-04-12 19:10:42 +02:00
François Laignel
83d70d3471 webrtc: add RFC 7273 support
This commit implements [RFC 7273] (NTP & PTP clock signalling & synchronization)
for `webrtcsink` by adding the "ts-refclk" & "mediaclk" SDP media attributes to
identify the clock. These attributes are handled by `rtpjitterbuffer` on the
consumer side. They MUST be part of the SDP offer.

When used with an NTP or PTP clock, "mediaclk" indicates the RTP offset at the
clock's origin. Because the payloaders are not instantiated when the offer is
sent to the consumer, the RTP offset is set to 0 and the payloader
`timstamp-offset`s are set accordingly when they are created.

The `webrtc-precise-sync` examples were updated to be able to start with an NTP
(default), a PTP or the system clock (on the receiver only). The rtp jitter
buffer will synchronize with the clock signalled in the SDP offer provided the
sender is started with `--do-clock-signalling` & the receiver with
`--expect-clock-signalling`.

[RFC 7273]: https://datatracker.ietf.org/doc/html/rfc7273

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1500>
2024-04-12 14:18:09 +02:00
Guillaume Desmottes
596a9177ce uriplaylistbin: disable racy test
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1537>
2024-04-12 10:17:40 +00:00
Philippe Normand
2341ee6935 dav1d: Set colorimetry parameters on src pad caps
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1514>
2024-04-12 09:14:34 +00:00
Guillaume Desmottes
61c9cbdc8f uriplaylistbin: allow to change 'iterations' property while playing
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1492>
2024-04-11 11:13:20 +02:00
Guillaume Desmottes
00b56ca845 uriplaylistbin: stop using an iterator to manage the playlist
Will make it easier to update the playlist while playing.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1492>
2024-04-11 10:48:50 +02:00
François Laignel
42158cbcb0 gccbwe: don't log an error when handling a buffer list while stopping
When `webrtcsink` was stopped, `gccbwe` could log an error if it was handling a
buffer list. This commit logs an error only if `push_list()` returned an error
other than `Flushing`.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1535>
2024-04-11 01:29:53 +00:00
Matthew Waters
4dcc44687a cea608overlay: move Send impl lower in the stack
Try to avoid hiding another non-Send object in the State struct.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1519>
2024-04-10 06:55:34 +00:00
Matthew Waters
fbce73f6fc closedcaption: implement cea708overlay element
Can overlay any single CEA-708 service or any single CEA-608 channel.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1519>
2024-04-10 06:55:34 +00:00
Matthew Waters
f0c38621c1 cea608overlay: also print bytes that failed to decode
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1519>
2024-04-10 06:55:34 +00:00
Sanchayan Maity
a3e30b499f aws: Introduce a property to use path-style addressing
AWS SDK switched to virtual addressing as default instead of path
style earlier. While MinIO supports virtual host style requests,
path style requests are the default.

Introduce a property to allow the use of path style addressing if
required.

For more information, see
https://github.com/minio/minio/blob/master/docs/config/README.md#domain
https://docs.aws.amazon.com/AmazonS3/latest/userguide/VirtualHosting.html

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1527>
2024-04-10 00:23:22 +00:00
François Laignel
2ad452ee89 webrtcsink: don't panic with bitrate handling unsupported encoders
When an encoder was not supported by the `VideoEncoder` `bitrate` accessors, an
`unimplemented` panic would occur which would poison `state` & `settings`
`Mutex`s resulting in other threads panicking, notably entering `end_session()`,
which lead to many failures in `BinImplExt::parent_remove_element()` until a
segmentation fault ended the process. This was observed using `vaapivp9enc`.

This commit logs a warning if an encoder isn't supported by the `bitrate`
accessors and silently by-passes `bitrate`-related operations when unsupported.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1534>
2024-04-09 15:48:59 +00:00
Simonas Kazlauskas
5d939498f1 mp4/fmp4: support flac inside the iso (f)mp4 container
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1401>
2024-04-09 14:37:05 +03:00
Taruntej Kanakamalla
f4b086738b webrtcsrc: change the producer-id type for request-encoded-filter
With https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1477
the producer id used while emitting the request-encoded-filter
can be a None if the msid of the webrtcbin's pad is None.
This might not affect the signal handler written in C but
can panic in an existing Rust application with signal
handler which can only handle valid String type as its param
for the producer id.

So change the param type to Option<String> in the signal builder
for request-encoded-fiter signal

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1528>
2024-04-09 06:01:15 +00:00
Tim-Philipp Müller
6b30266145 ci: tag linter and sanity check jobs as a "placeholder" jobs
They hardly use any resources and almost finish immediately.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1533>
2024-04-08 23:42:23 +00:00
Tim-Philipp Müller
c8180e714e ci: make sure version Cargo.toml matches version in meson.build
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1532>
2024-04-08 14:46:00 +01:00
Sebastian Dröge
0b356ee203 deny: Update
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1530>
2024-04-06 11:12:16 +03:00
Sebastian Dröge
c2ebb3083a Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1530>
2024-04-06 11:12:16 +03:00
Sebastian Dröge
921938fd20 fmp4mux: Require gstreamer-pbutils 1.20 for the examples
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1530>
2024-04-06 11:10:58 +03:00
Sebastian Dröge
fab246f82e webrtchttp: Update to reqwest 0.12
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1530>
2024-04-06 11:07:16 +03:00
Sebastian Dröge
7757e06e36 onvifmetadataparse: Reset state in PAUSED->READY after pad deactivation
Otherwise the clock id will simply be overridden instead of unscheduling
it, and if the streaming thread of the source pad currently waits on it
then it will wait potentially for a very long time and deactivating the
pad would wait for that to happen.

Also unschedule the clock id on `Drop` of the state to be one the safe
side and not simply forget about it.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1526>
2024-04-05 15:19:37 +00:00
Taruntej Kanakamalla
70adedb142 net/webrtc: fix inconsistencies in documentation of object names
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1529>
2024-04-05 14:10:35 +00:00
Matthew Waters
7f6929b98d closedcaption: remove libcaption code entirely
It is now unused.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1517>
2024-04-05 19:29:24 +11:00
Matthew Waters
2575013faa cea608tott: use our own CEA-608 frame handling instead of libcaption
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1517>
2024-04-05 19:29:24 +11:00
Matthew Waters
d8fe1c64f1 cea608overlay: use or own CEA-608 caption frame handling instead of libcaption
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1517>
2024-04-05 19:29:24 +11:00
Matthew Waters
fea85ff9c8 closedcaption: use cea608-types for parsing 608 captions instead of libcaption
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1517>
2024-04-05 19:29:24 +11:00
François Laignel
cc43935036 webrtc: add precise synchronization example
This example demonstrates a sender / receiver setup which ensures precise
synchronization of multiple streams in a single session.

[RFC 6051]-style rapid synchronization of RTP streams is available as an option.
See the [Instantaneous RTP synchronization...] blog post for details about this
mode and an example based on RTSP instead of WebRTC.

[RFC 6051]: https://datatracker.ietf.org/doc/html/rfc6051
[Instantaneous RTP synchronization...]: https://coaxion.net/blog/2022/05/instantaneous-rtp-synchronization-retrieval-of-absolute-sender-clock-times-with-gstreamer/

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1463>
2024-04-03 19:10:40 +02:00
Guillaume Desmottes
b5cbc47cf7 web: webrtcsink: improve panic message on unexpected caps during discovery
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1524>
2024-04-02 14:25:58 +02:00
Guillaume Desmottes
35b84d219f webrtc: webrtcsink: set perfect-timestamp=true on audio encoders
Chrome audio decoder doesn't cope well with not perfect ts, generating
noises in the audio.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1524>
2024-04-02 14:25:51 +02:00
Sebastian Dröge
0aabbb3186 fmp4: Update to dash-mpd 0.16
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1523>
2024-03-31 09:36:53 +03:00
Sebastian Dröge
4dd6b102c4 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1523>
2024-03-31 09:35:46 +03:00
Sebastian Dröge
0dd03da91f ci: Ignore env_logger for cargo-outdated
It requires Rust >= 1.71.
2024-03-29 11:03:04 +02:00
Matthew Waters
e1cd52178e transcriberbin: also support 608 inside 708
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1406>
2024-03-28 13:46:28 +11:00
Matthew Waters
55b4de779c tttocea708: add support for writing 608 compatibility bytes
608 compatibility bytes are generated using the same functionality as
tttocea608.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1406>
2024-03-28 13:46:28 +11:00
Matthew Waters
9db4290d2d tttocea608: move functionality to a separate object
Will be used by tttocea708 later.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1406>
2024-03-28 13:46:28 +11:00
Matthew Waters
df30d2fbd3 transcriberbin: add support for generating cea708 captions
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1406>
2024-03-28 13:46:28 +11:00
Matthew Waters
b0cf7e5c81 cea708mux: add element muxing multiple 708 caption services together
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1406>
2024-03-28 13:46:28 +11:00
Matthew Waters
756abbf807 tttocea708: add element converting from text to cea708 captions
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1406>
2024-03-28 13:46:28 +11:00
Martin Nordholts
5d7e068a8b rtpgccbwe: Add increasing_duration and counter to existing gst::log!()
Add `self.increasing_duration` and `self.increasing_counter`
to logs to provide more details of why `overuse_filter()`
determines overuse of network.

To get access to the latest values of those fields we need
to move down the log call. But that is fine, since no other
logged data is modified between the old and new location of
`gst::log!()`.

We do not bother logging `self.last_overuse_estimate` since
that is simply the previously logged value of `estimate`. We
must put the log call before we write the latest value to it
though, in case we want to log it in the future.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1522>
2024-03-27 15:08:23 +00:00
François Laignel
a870d60621 aws: improve error message logs
The `Display` and `Debug` trait for the AWS error messages are not very useful.

- `Display` only prints the high level error, e.g.: "service error".
- `Debug` prints all the fields in the error stack, resulting in hard to read
  messages with redudant or unnecessary information. E.g.:

> ServiceError(ServiceError { source: BadRequestException(BadRequestException {
> message: Some("1 validation error detected: Value 'test' at 'languageCode'
> failed to satisfy constraint: Member must satisfy enum value set: [ar-AE,
> zh-HK, en-US, ar-SA, zh-CN, fi-FI, pl-PL, no-NO, nl-NL, pt-PT, es-ES, th-TH,
> de-DE, it-IT, fr-FR, ko-KR, hi-IN, en-AU, pt-BR, sv-SE, ja-JP, ca-ES, es-US,
> fr-CA, en-GB]"), meta: ErrorMetadata { code: Some("BadRequestException"),
> message: Some("1 validation error detected: Value 'test' at 'languageCode'
> failed to satisfy constraint: Member must satisfy enum value set: [ar-AE,
> zh-HK, en-US, ar-SA, zh-CN, fi-FI, pl-PL, no-NO, nl-NL, pt-PT, es-ES, th-TH,
> de-DE, it-IT, fr-FR, ko-KR, hi-IN, en-AU, pt-BR, sv-SE, ja-JP, ca-ES, es-US,
> fr-CA, en-GB]"), extras: Some({"aws_request_id": "1b8bbafd-5b71-4ba5-8676-28432381e6a9"}) } }),
> raw: Response { status: StatusCode(400), headers: Headers { headers:
> {"x-amzn-requestid": HeaderValue { _private: H0("1b8bbafd-5b71-4ba5-8676-28432381e6a9") },
> "x-amzn-errortype": HeaderValue { _private:
> H0("BadRequestException:http://internal.amazon.com/coral/com.amazonaws.transcribe.streaming/") },
> "date": HeaderValue { _private: H0("Tue, 26 Mar 2024 17:41:31 GMT") },
> "content-type": HeaderValue { _private: H0("application/x-amz-json-1.1") },
> "content-length": HeaderValue { _private: H0("315") }} }, body: SdkBody {
> inner: Once(Some(b"{\"Message\":\"1 validation error detected: Value 'test'
> at 'languageCode' failed to satisfy constraint: Member must satisfy enum value
> set: [ar-AE, zh-HK, en-US, ar-SA, zh-CN, fi-FI, pl-PL, no-NO, nl-NL, pt-PT,
> es-ES, th-TH, de-DE, it-IT, fr-FR, ko-KR, hi-IN, en-AU, pt-BR, sv-SE, ja-JP,
> ca-ES, es-US, fr-CA, en-GB]\"}")), retryable: true }, extensions: Extensions {
> extensions_02x: Extensions, extensions_1x: Extensions } } })

This commit adopts the most informative and concise solution I could come up
with to log AWS errors. With the above error case, this results in:

> service error: Error { code: "BadRequestException", message: "1 validation
> error detected: Value 'test' at 'languageCode' failed to satisfy constraint:
> Member must satisfy enum value set: [ar-AE, zh-HK, en-US, ar-SA, zh-CN, fi-FI,
> pl-PL, no-NO, nl-NL, pt-PT, es-ES, th-TH, de-DE, it-IT, fr-FR, ko-KR, hi-IN,
> en-AU, pt-BR, sv-SE, ja-JP, ca-ES, es-US, fr-CA, en-GB]",
> aws_request_id: "a40a32a8-7b0b-4228-a348-f8502087a9f0" }

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1521>
2024-03-26 20:05:32 +01:00
François Laignel
9f27bde36a aws: use fixed BehaviorVersion
Quoting [`BehaviorVersion` documentation]:

> Over time, new best-practice behaviors are introduced. However, these
> behaviors might not be backwards compatible. For example, a change which
> introduces new default timeouts or a new retry-mode for all operations might
> be the ideal behavior but could break existing applications.

This commit uses `BehaviorVersion::v2023_11_09()`, which is the latest
major version at the moment. When a new major version is released, the method
will be deprecated, which will warn us of the new version and let us decide
when to upgrade, after any changes if required. This is safer that using
`latest()` which would silently use a different major version, possibly
breaking existing code.

[`BehaviorVersion` documentation]: https://docs.rs/aws-config/1.1.8/aws_config/struct.BehaviorVersion.html

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1520>
2024-03-26 17:44:16 +01:00
Matthew Waters
e868f81189 gopbuffer: implement element buffering of an entire GOP
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1349>
2024-03-26 15:29:48 +11:00
Sebastian Dröge
bac2e02160 deny: Add overrides for duplicates hyper / reqwest dependencies 2024-03-24 11:30:30 +02:00
Nirbheek Chauhan
ae7c68dbf8 ci: Add a job to trigger a cerbero build, similar to the monorepo
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1513>
2024-03-23 23:02:27 +00:00
Sebastian Dröge
0b11209674 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1510>
2024-03-23 14:33:07 +02:00
Sebastian Dröge
f97150aa58 reqwest: Update to reqwest 0.12
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1510>
2024-03-23 14:30:31 +02:00
Philippe Normand
7e1ab086de dav1d: Require dav1d-rs 0.10
This version depends on libdav1d >= 1.3.0. Older versions are no longer
supported, due to an ABI/API break introduced in 1.3.0.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1512>
2024-03-21 17:33:32 +00:00
Philippe Normand
be12c0a5f7 Fix clippy warnings after upgrade to Rust 1.77
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1512>
2024-03-21 17:33:32 +00:00
Sebastian Dröge
317f46ad97 Update CHANGELOG.md for 0.12.3 2024-03-21 18:55:29 +02:00
François Laignel
c5e7e76e4d webrtcsrc: add do-retransmission property
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1509>
2024-03-21 07:25:30 +00:00
Sebastian Dröge
6556d31ab8 livesync: Ignore another racy test
Same problem as https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/328
2024-03-21 09:27:09 +02:00
François Laignel
5476e3d759 webrtcsink: prevent video-info error log for audio streams
The following error is logged when `webrtcsink` is feeded with an audio stream:

> ERROR video-info video-info.c:540:gst_video_info_from_caps:
>       wrong name 'audio/x-raw', expected video/ or image/

This commit bypasses `VideoInfo::from_caps` for audio streams.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1511>
2024-03-20 19:57:45 +01:00
François Laignel
cc7b7d508d rtp: gccbwe: don't break downstream assumptions pushing buffer lists
Some elements in the RTP stack assume all buffers in a `gst::BufferList`
correspond to the same timestamp. See in [`rtpsession`] for instance.
This also had the effect that `rtpsession` did not create correct RTCP as it
only saw some of the SSRCs in the stream.

`rtpgccbwe` formed a packet group by gathering buffers in a `gst::BufferList`,
regardless of whether they corresponded to the same timestamp, which broke
synchronization under certain circonstances.

This commit makes `rtpgccbwe` push the buffers as they were received: one by one.

[`rtpsession`]: bc858976db/subprojects/gst-plugins-good/gst/rtpmanager/gstrtpsession.c (L2462)

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1502>
2024-03-20 18:19:14 +00:00
Sebastian Dröge
2b9272c7eb fmp4mux: Move away from deprecated chrono function
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1503>
2024-03-20 15:37:18 +02:00
Sebastian Dröge
cca3ebf520 rtp: Switch from chrono to time
Which allows to simplify quite a bit of code and avoids us having to
handle some API deprecations.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1503>
2024-03-20 15:05:39 +02:00
Sebastian Dröge
428f670753 version-helper: Use non-deprecated type alias from toml_edit
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1503>
2024-03-19 18:16:42 +02:00
Sebastian Dröge
fadb7d0a26 deny: Add override for heck 0.4
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1503>
2024-03-19 17:52:32 +02:00
Sebastian Dröge
2a88e29454 originalbufferstore: Update for VideoMetaTransform -> VideoMetaTransformScale rename
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1503>
2024-03-19 17:51:41 +02:00
Sebastian Dröge
bfff0f7d87 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1503>
2024-03-19 17:50:32 +02:00
Guillaume Desmottes
96337d5234 webrtc: allow resolution and framerate input changes
Some changes do not require a WebRTC renegotiation so we can allow
those.

Fix #515

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1498>
2024-03-18 14:52:01 +01:00
Tim-Philipp Müller
eb49459937 rtp: m2pt: add some unit tests
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1493>
2024-03-16 10:07:37 +00:00
Tim-Philipp Müller
ce3960f37f rtp: Add MPEG-TS RTP payloader
Pushes out pending TS packets on EOS.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1493>
2024-03-16 10:07:37 +00:00
Tim-Philipp Müller
9f07ec35e6 rtp: Add MPEG-TS RTP depayloader
Can handle different packet sizes, also see:
https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/1310

Has clock-rate=90000 as spec prescribes, see:
https://gitlab.freedesktop.org/gstreamer/gst-plugins-good/-/issues/691

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1493>
2024-03-16 10:07:37 +00:00
Mathieu Duponchelle
f4366f8b2e gstregex: add support for switches exposed by RegexBuilder
The builder allows for instance for switching off case-sensitiveness for
the entire pattern, instead of having to do so inline with `(?i)`.

All the options exposed by the builder at
<https://docs.rs/regex/latest/regex/struct.RegexBuilder.html> can now be
passed as fields of invidual commands, snake-cased.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1497>
2024-03-15 17:41:39 +00:00
Guillaume Desmottes
523a46b4f5 gtk4: scale texture position
Fix regression in 0.12 introduced by 3423d05f77

Code from Ivan Molodetskikh suggested on Matrix.

Fix #519

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1499>
2024-03-15 13:43:32 +01:00
Nirbheek Chauhan
6f8fc5f178 meson: Disable docs completely when the option is disabled
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1496>
2024-03-14 15:30:17 +05:30
Guillaume Desmottes
8f997ea4e3 webrtc: janus: handle 'hangup' messages from Janus
Fix error about this message not being handled:

{
   "janus": "hangup",
   "session_id": 4758817463851315,
   "sender": 4126342934227009,
   "reason": "Close PC"
}

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1481>
2024-03-13 10:14:38 +00:00
Guillaume Desmottes
992f8d9a5d webrtc: janus: handle 'destroyed' messages from Janus
Fix this error when the room is destroyed:

ERROR   webrtc-janusvr-signaller imp.rs:413:gstrswebrtc::janusvr_signaller:👿:Signaller::handle_msg:<GstJanusVRWebRTCSignallerU64@0x55b166a3fe40> Unknown message from server: {
   "janus": "event",
   "session_id": 6667171862739941,
   "sender": 1964690595468240,
   "plugindata": {
      "plugin": "janus.plugin.videoroom",
      "data": {
         "videoroom": "destroyed",
         "room": 8320333573294267
      }
   }
}

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1481>
2024-03-13 10:14:38 +00:00
Guillaume Desmottes
9c6a39d692 webrtc: janus: handle (stopped-)talking events
Expose those events using a signal.

Fix those errors when joining a Janus room configured with
'audiolevel_event: true'.

ERROR   webrtc-janusvr-signaller imp.rs:408:gstrswebrtc::janusvr_signaller:👿:Signaller::handle_msg:<GstJanusVRWebRTCSignaller@0x560cf2a55100> Unknown message from server: {
   "janus": "event",
   "session_id": 2384862538500481,
   "sender": 1867822625190966,
   "plugindata": {
      "plugin": "janus.plugin.videoroom",
      "data": {
         "videoroom": "talking",
         "room": 7564250471742314,
         "id": 6815475717947398,
         "mindex": 0,
         "mid": "0",
         "audio-level-dBov-avg": 37.939998626708984
      }
   }
}
ERROR   webrtc-janusvr-signaller imp.rs:408:gstrswebrtc::janusvr_signaller:👿:Signaller::handle_msg:<GstJanusVRWebRTCSignaller@0x560cf2a55100> Unknown message from server: {
   "janus": "event",
   "session_id": 2384862538500481,
   "sender": 1867822625190966,
   "plugindata": {
      "plugin": "janus.plugin.videoroom",
      "data": {
         "videoroom": "stopped-talking",
         "room": 7564250471742314,
         "id": 6815475717947398,
         "mindex": 0,
         "mid": "0",
         "audio-level-dBov-avg": 40.400001525878906
      }
   }
}

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1481>
2024-03-13 10:14:38 +00:00
Guillaume Desmottes
b29a739fb2 uriplaylistbin: disable racy test
https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/514

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1494>
2024-03-12 16:57:22 +01:00
Guillaume Desmottes
1dea8f60a8 threadshare: disable racy tests
https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/250

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1494>
2024-03-12 16:54:21 +01:00
Guillaume Desmottes
2629719b4e livesync: disable racy tests
https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/328
https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/357

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1494>
2024-03-12 16:32:47 +01:00
Guillaume Desmottes
9e6e8c618e togglerecord: disable racy test_two_stream_close_open_nonlivein_liveout test
See https://gitlab.freedesktop.org/gdesmott/gst-plugins-rs/-/jobs/56183085

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1494>
2024-03-12 16:21:52 +01:00
François Laignel
995f64513d Update Cargo.lock to use latest gstreamer-rs
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1491>
2024-03-11 14:42:36 +01:00
François Laignel
5b01e43a12 webrtc: update further to WebRTCSessionDescription sdp accessor changes
See: https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/-/merge_requests/1406
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1491>
2024-03-11 13:39:19 +01:00
Guillaume Desmottes
03abb5c681 spotify: document how to use with non Facebook accounts
See discussion on #203.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1490>
2024-03-11 09:46:40 +01:00
Zhao, Gang
7a46377627 rtp: tests: Simplify loop
All buffers can be added in 100 outer loops. Add buffer less than 200 in the last (i = 99) loop.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1489>
2024-03-10 16:47:30 +08:00
Olivier Crête
15e7a63e7b originalbuffer: Pair of elements to keep and restore original buffer
The goal is to be able to get back the original buffer
after performing analysis on a transformed version. Then put the
various GstMeta back on the original buffer.

An example pipeline would be
.. ! originalbuffersave ! videoscale ! analysis ! originalbufferestore ! draw_overlay ! sink

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1428>
2024-03-08 15:15:13 -05:00
Guillaume Desmottes
612f863ee9 webrtc: janusvrwebrtcsink: add 'use-string-ids' property
Instead of exposing all ids properties as strings, we now have two
signaller implementations exposing those properties using their actual
type. This API is more natural and save the element and application
conversions when using numerical ids (Janus's default).

I also removed the 'joined-id' property as it's actually the same id as
'feed-id'. I think it would be better to have a 'janus-state' property or
something like that for applications willing to know when the room has
been joined.
This id is also no longer generated by the element by default, as Janus
will take care of generating one if not provided.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1486>
2024-03-07 09:34:58 +01:00
Seungha Yang
237f22d131 sccparse: Ignore invalid timecode during seek as well
sccparse holds last timecode in order to ignore invalid timecode
and fallback to the previous timecode. That should happen
when sccparse is handling seek event too. Otherwise single invalid
timecode before the target seek position will cause flow error.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1485>
2024-03-06 11:12:04 +00:00
Sebastian Dröge
2839e0078b rtp: Port RTP AV1 payloader/depayloader to new base classes
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1472>
2024-03-06 09:40:35 +00:00
Jordan Yelloz
0414f468c6 livekit_signaller: Added missing getter for excluded-producer-peer-ids
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1484>
2024-03-04 10:08:11 -07:00
Jordan Yelloz
8b0731b5a2 webrtcsrc: Removed incorrect URIHandler from LiveKit source
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1484>
2024-03-04 09:44:01 -07:00
Guillaume Desmottes
7d0397e1ad uriplaylistbin: re-enable all tests
They now seem to work reliably. \o/

Fix #194

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1471>
2024-03-04 12:00:13 +01:00
Guillaume Desmottes
f6476f1e8f uriplaylistbin: use vp9 in test media
The Windows CI runner does not have a Theora decoder so those tests were
failing there.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1471>
2024-03-04 12:00:13 +01:00
Guillaume Desmottes
cfebc32b82 uriplaylistbin: tests: use fakesink sync=true
Tests is more reliable when using sync sink.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1471>
2024-03-04 11:17:11 +01:00
Guillaume Desmottes
721b7e9c8c uriplaylistbin: rely on new uridecodebin3 gapless logic
uridecodebin3 can now properly handle gapless switches so use that
instead of our own very complicated logic.

Fix #268
Fix #193

Depends on gst 1.23.90 as the plugin requires recent fixes to work properly.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1471>
2024-03-04 11:17:11 +01:00
Guillaume Desmottes
1e88971ec8 uriplaylistbin: pass valid URI in tests
Fix critical raised by libsoup,
see https://gitlab.gnome.org/GNOME/libsoup/-/merge_requests/346

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1471>
2024-03-04 11:06:19 +01:00
Sebastian Dröge
8a6bcb712f Remove empty line from the CHANGELOG.md that confuses the GitLab renderer 2024-03-01 16:46:21 +02:00
Jordan Yelloz
002dc36ab9 livekit_signaller: Improved shutdown behavior
Without sending a Leave request to the server before disconnecting, the
disconnected client will appear present and stuck in the room for a little
while until the server removes it due to inactivity.

After this change, the disconnecting client will immediately leave the room.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1482>
2024-02-29 08:21:13 -07:00
Sebastian Dröge
9c590f4223 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1483>
2024-02-29 10:09:09 +00:00
Jordan Yelloz
f0b408d823 webrtcsrc: Removed flag setup from WhipServerSrc
It's already done in the base class

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1461>
2024-02-28 11:25:58 -07:00
Jordan Yelloz
17b2640237 webrtcsrc: Updated readme for LiveKit source
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1461>
2024-02-28 11:25:58 -07:00
Jordan Yelloz
fa006b9fc9 webrtcsrc: Added LiveKit source element
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1461>
2024-02-28 11:25:58 -07:00
Jordan Yelloz
96037fbcc5 webrtcsink: Updated livekitwebrtcsink for new signaller constructor
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1461>
2024-02-28 11:25:58 -07:00
Jordan Yelloz
730b3459f1 livekit_signaller: Added dual-role support
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1461>
2024-02-28 11:25:49 -07:00
Guillaume Desmottes
60bb72ddc3 webrtc: janus: add joined-id property to the signaller
Fix #504

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1480>
2024-02-28 15:05:11 +01:00
Guillaume Desmottes
eabf31e6d0 webrtc: janus: rename RoomId to JanusId
Those weird ids are used in multiple places, not only for the room id,
so best to have a more generic name.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1480>
2024-02-28 15:05:11 +01:00
Guillaume Desmottes
550018c917 webrtc: janus: room id not optional in 'joined' message
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1480>
2024-02-28 14:16:46 +01:00
Guillaume Desmottes
0829898d73 webrtc: janus: remove 'audio' and 'video' from publish messages
Those are deprecated and no longer used.

See https://janus.conf.meetecho.com/docs/videoroom and
https://github.com/meetecho/janus-gateway/blob/master/src/plugins/janus_videoroom.c#L9894

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1480>
2024-02-28 13:39:04 +01:00
Guillaume Desmottes
ec17c58dee webrtc: janus: numerical room ids are u64
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1478>
2024-02-28 11:56:44 +01:00
Yorick Smilda
563eff1193 Implement GstWebRTCAPI as class instead of global instance
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1373>
2024-02-27 12:30:13 +00:00
Jordan Yelloz
594400a7f5 webrtcsrc: Made producer-peer-id optional
It may be necessary for some signalling clients but the source element
doesn't need to depend on it.

Also, the value will fall back to the pad's MSID for the first argument
to the request-encoded-filter gobject signal when it isn't available
from the signalling client.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1477>
2024-02-26 13:41:40 -07:00
Sebastian Dröge
47ba068966 Update CHANGELOG.md for 0.12.2 2024-02-26 14:58:58 +02:00
Sebastian Dröge
5df7c01cb5 closedcaption: Port from nom to winnow
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1475>
2024-02-26 14:00:08 +02:00
Xavier Claessens
f7ffa13543 janusvr: Add string-ids property
It forces usage of strings even if it can be parsed into an integer.
This allows joining room `"133"` in a server configured with string
room ids.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1466>
2024-02-26 11:10:00 +00:00
Xavier Claessens
23955d2dbb janusvr: Room IDs can be strings
Sponsored-by: Netflix Inc.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1466>
2024-02-26 11:10:00 +00:00
Sebastian Dröge
340d65d7a4 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1474>
2024-02-26 11:14:01 +02:00
Sebastian Dröge
b9195ed309 fmp4mux: Update to dash-mpd 0.15
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1474>
2024-02-26 11:14:01 +02:00
Sebastian Dröge
fc1c017fc6 Update CHANGELOG.md for 0.12.1 2024-02-23 19:19:17 +02:00
Sebastian Dröge
f563f8334b rtp: Add PCMU/PCMA RTP payloader / depayloader elements
These come with new generic RTP payloader, RTP raw-ish audio payloader
and RTP depayloader base classes.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1424>
2024-02-23 14:43:45 +02:00
Xavier Claessens
e09f9e9540 meson: Fix error when default_library=both
Skip duplicated plugin_name when we have both the static and shared
plugin in the plugins list.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1470>
2024-02-22 12:31:23 -05:00
Maksym Khomenko
da21dc853d webrtcsink: extensions: separate API and signal checks
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1469>
2024-02-20 19:29:46 +02:00
Maksym Khomenko
2228f882d8 webrtcsink: apply rustfmt
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1469>
2024-02-20 19:29:28 +02:00
Mathieu Duponchelle
8f3a6171ac textwrap: don't split on all whitespaces ..
but only on ASCII whitespaces, as we want to honor non-breaking
whitespaces (\u{a0})

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1468>
2024-02-16 19:38:38 +01:00
Xavier Claessens
2572afbf15 janusvr: Add secret-key property
Every API calls have an optional "apisecret" argument.

Sponsored-by: Netflix Inc.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1465>
2024-02-16 14:04:59 +00:00
Sebastian Dröge
0faac3b875 deny: Add winnow 0.5 override
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1467>
2024-02-16 14:27:29 +02:00
Sebastian Dröge
cb0cc764ba Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1467>
2024-02-16 14:26:44 +02:00
Sebastian Dröge
45f55423fb Remove Cargo.lock from .gitignore
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1467>
2024-02-16 14:25:54 +02:00
Sebastian Dröge
8ef12a72e8 rtpgccbwe: Don't reset PTS/DTS to None
The element is usually placed before `rtpsession`, and `rtpsession`
needs the PTS/DTS for correctly determining the running time. The
running time is then used to produce correct RTCP SR, and to potentially
update an NTP-64 RTP header extension if existing on the packets.

Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/496

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1462>
2024-02-14 08:05:54 +00:00
Sebastian Dröge
05884de50c textwrap: Remove unnecessary to_string() in debug output of a string
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1458>
2024-02-12 19:09:06 +02:00
Jordan Yelloz
67b7cf9764 webrtcsink: Added sinkpad with "msid" property
This forwards to the webrtcbin sinkpad's msid when specified.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1442>
2024-02-12 15:04:44 +00:00
Sebastian Dröge
9827106961 Update Cargo.lock
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1455>
2024-02-11 11:55:37 +02:00
Sebastian Dröge
b2d5ee48cd Update to async-tungstenite 0.25
Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1455>
2024-02-11 11:31:24 +02:00
Sebastian Dröge
7274c725a6 gtk4: Create a window if running from gst-launch-1.0 or GST_GTK4_WINDOW=1 is set
Fixes https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/482

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/merge_requests/1454>
2024-02-09 15:05:45 +02:00
Sebastian Dröge
66ad059a47 deny: Add zerocopy 0.6 duplicate override for librespot 2024-02-09 10:05:45 +02:00
Sebastian Dröge
c0d111e2c1 utils: Update for renamed clippy lint in 1.76 2024-02-08 21:37:17 +02:00
Sebastian Dröge
9116853e6d Update Cargo.lock
Downgrade clap_derive to 4.4.7 to not require Rust 1.74 or newer.
2024-02-08 20:50:44 +02:00
Sebastian Dröge
21aa61b69c Update Cargo.lock 2024-02-08 19:41:00 +02:00
Sebastian Dröge
119d905805 Update version to 0.13.0-alpha.1 2024-02-08 19:41:00 +02:00
510 changed files with 83385 additions and 18774 deletions

1
.gitignore vendored
View file

@ -1,4 +1,3 @@
Cargo.lock
target
*~
*.bk

View file

@ -1,4 +1,4 @@
.templates_sha: &templates_sha fddab8aa63e89a8e65214f59860d9c0f030360c9
.templates_sha: &templates_sha 6a40df92957c8ce9ee741aaccc5daaaf70545b1e
include:
- project: 'freedesktop/ci-templates'
@ -20,9 +20,11 @@ variables:
# to ensure that we are testing against the same thing as GStreamer itself.
# The tag name is included above from the main repo.
GSTREAMER_DOC_IMAGE: "registry.freedesktop.org/gstreamer/gstreamer/amd64/fedora:$FEDORA_TAG-main"
# Use the gstreamer image to trigger the cerbero job, same as the monorepo
CERBERO_TRIGGER_IMAGE: "registry.freedesktop.org/gstreamer/gstreamer/amd64/fedora:$FEDORA_TAG-main"
WINDOWS_BASE: "registry.freedesktop.org/gstreamer/gstreamer-rs/windows"
WINDOWS_RUST_MINIMUM_IMAGE: "$WINDOWS_BASE:$GST_RS_IMG_TAG-main-$GST_RS_MSRV"
WINDOWS_RUST_STABLE_IMAGE: "$WINDOWS_BASE:$GST_RS_IMG_TAG-main-$GST_RS_STABLE"
WINDOWS_RUST_MINIMUM_IMAGE: "$WINDOWS_BASE:$GST_RS_IMG_WINDOWS_TAG-main-$GST_RS_MSRV"
WINDOWS_RUST_STABLE_IMAGE: "$WINDOWS_BASE:$GST_RS_IMG_WINDOWS_TAG-main-$GST_RS_STABLE"
workflow:
rules:
@ -36,6 +38,14 @@ workflow:
default:
interruptible: true
# Auto-retry jobs in case of infra failures
retry:
max: 1
when:
- 'runner_system_failure'
- 'stuck_or_timeout_failure'
- 'scheduler_failure'
- 'api_failure'
stages:
- "trigger"
@ -50,6 +60,7 @@ trigger:
stage: 'trigger'
variables:
GIT_STRATEGY: none
tags: [ 'placeholder-job' ]
script:
- echo "Trigger job done, now running the pipeline."
rules:
@ -72,7 +83,7 @@ trigger:
- rm -rf target
before_script:
- source ./ci/env.sh
- mkdir .cargo && echo -e "[net]\ngit-fetch-with-cli = true" > .cargo/config
- mkdir .cargo && echo -e "[net]\ngit-fetch-with-cli = true" > .cargo/config.toml
.debian:12-stable:
extends: .debian:12
@ -94,13 +105,14 @@ trigger:
RUST_BACKTRACE: 'full'
script:
- rustc --version
- CARGO_FLAGS="-j${FDO_CI_CONCURRENT:-$(nproc)} --locked --color=always --all --all-targets"
- cargo build --locked --color=always --workspace --all-targets
- G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets
- cargo build --locked --color=always --workspace --all-targets --all-features --exclude gst-plugin-gtk4
- G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets --all-features --exclude gst-plugin-gtk4
- cargo build --locked --color=always --workspace --all-targets --no-default-features
- G_DEBUG=fatal_warnings cargo test --locked --color=always --workspace --all-targets --no-default-features
- cargo build $CARGO_FLAGS
- RUST_BACKTRACE=1 G_DEBUG=fatal_warnings cargo test $CARGO_FLAGS
- cargo build $CARGO_FLAGS --all-features --exclude gst-plugin-gtk4
- RUST_BACKTRACE=1 G_DEBUG=fatal_warnings cargo test $CARGO_FLAGS --all-features --exclude gst-plugin-gtk4
- cargo build $CARGO_FLAGS --no-default-features
- RUST_BACKTRACE=1 G_DEBUG=fatal_warnings cargo test $CARGO_FLAGS --no-default-features
test msrv:
extends:
@ -261,6 +273,7 @@ documentation:
- 'docker'
- 'windows'
- '2022'
- "gstreamer-windows"
script:
# Set the code page to UTF-8
- chcp 65001
@ -287,6 +300,7 @@ test windows stable:
rustfmt:
extends: '.debian:12-stable'
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- cargo fmt --version
@ -295,6 +309,7 @@ rustfmt:
typos:
extends: '.debian:12-stable'
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- typos
@ -302,6 +317,7 @@ typos:
gstwebrtc-api lint:
image: node:lts
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- cd net/webrtc/gstwebrtc-api
@ -311,10 +327,12 @@ gstwebrtc-api lint:
check commits:
extends: '.debian:12-stable'
stage: "lint"
tags: [ 'placeholder-job' ]
needs: []
script:
- ci-fairy check-commits --textwidth 0 --no-signed-off-by
- ci/check-for-symlinks.sh
- ci/check-meson-version.sh
clippy:
extends: '.debian:12-stable'
@ -326,9 +344,10 @@ clippy:
# csound-sys only looks at /usr/lib and /usr/local top levels
CSOUND_LIB_DIR: '/usr/lib/x86_64-linux-gnu/'
script:
- cargo clippy --locked --color=always --all --all-targets -- -D warnings -A unknown-lints
- cargo clippy --locked --color=always --all --all-features --all-targets --exclude gst-plugin-gtk4 -- -D warnings -A unknown-lints
- cargo clippy --locked --color=always --all --all-targets --no-default-features -- -D warnings -A unknown-lints
- CARGO_FLAGS="-j${FDO_CI_CONCURRENT:-$(nproc)} --locked --color=always --all --all-targets"
- cargo clippy $CARGO_FLAGS -- -D warnings -A unknown-lints
- cargo clippy $CARGO_FLAGS --all-features --exclude gst-plugin-gtk4 -- -D warnings -A unknown-lints
- cargo clippy $CARGO_FLAGS --no-default-features -- -D warnings -A unknown-lints
deny:
extends: .debian:12-stable
@ -353,7 +372,9 @@ outdated:
- if: '$CI_PIPELINE_SOURCE == "schedule"'
script:
- cargo update --color=always
- cargo outdated --color=always --root-deps-only --exit-code 1 -v
# Ignore bitstream-io until we can update MSRV to 1.80
# Ignore test-with until we can update MSRV to 1.77
- cargo outdated --color=always --root-deps-only --exit-code 1 -v -i bitstream-io -i test-with
coverage:
allow_failure: true
@ -369,18 +390,53 @@ coverage:
# csound-sys only looks at /usr/lib and /usr/local top levels
CSOUND_LIB_DIR: '/usr/lib/x86_64-linux-gnu/'
script:
- cargo test --locked --color=always --all --all-features --exclude gst-plugin-gtk4
- CARGO_FLAGS="-j${FDO_CI_CONCURRENT:-$(nproc)} --locked --color=always --all"
- cargo test $CARGO_FLAGS --all-features --exclude gst-plugin-gtk4
# generate html report
- grcov . --binary-path ./target/debug/ -s . -t html --branch --ignore-not-existing --ignore "*target*" --ignore "*/build.rs" -o ./coverage/
# generate cobertura report for gitlab integration
- grcov . --binary-path ./target/debug/ -s . -t cobertura --branch --ignore-not-existing --ignore "*target*" --ignore "*/build.rs" -o coverage.xml
- mkdir -p coverage
- grcov . --binary-path ./target/debug/ -s . -t html,cobertura --branch --ignore-not-existing --ignore "*target*" --ignore "*/build.rs" -o ./coverage/
# output coverage summary for gitlab parsing.
# TODO: use grcov once https://github.com/mozilla/grcov/issues/556 is fixed
- grep "%" coverage/index.html | head -1 || true
- grep "%" coverage/html/index.html | head -1 || true
artifacts:
paths:
- 'coverage'
reports:
coverage_report:
coverage_format: cobertura
path: coverage.xml
path: "coverage/cobertura.xml"
cerbero trigger:
image: $CERBERO_TRIGGER_IMAGE
needs: [ "trigger" ]
timeout: '4h'
tags:
- placeholder-job
variables:
# We will build this cerbero branch in the cerbero trigger CI
CERBERO_UPSTREAM_BRANCH: 'main'
script:
- ci/cerbero/trigger_cerbero_pipeline.py
rules:
# Never run post merge
- if: '$CI_PROJECT_NAMESPACE == "gstreamer"'
when: never
# Don't run if the only changes are files that cargo-c does not read
- if:
changes:
- "CHANGELOG.md"
- "README.md"
- "deny.toml"
- "rustfmt.toml"
- "typos.toml"
- "*.py"
- "*.sh"
- "Makefile"
- "meson.build"
- "meson_options.txt"
- "**/meson.build"
- "ci/*.sh"
- "ci/*.py"
when: never
- when: always

View file

@ -5,6 +5,198 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html),
specifically the [variant used by Rust](http://doc.crates.io/manifest.html#the-version-field).
## [0.13.1] - 2024-08-27
### Fixed
- transcriberbin: Fix gst-inspect with missing elements.
- gtk4paintablesink: Move dmabuf cfg to the correct bracket level.
- webrtcsrc: Don't hold the state lock while removing sessions.
- rtpbasepay: Various fixes to payloader base class.
- webrtcsink: Fix various assertions when finalizing.
- webrtcsrc: Make sure to always call end_session() without state lock.
- mpegtslivesrc: Handle PCR discontinuities as errors.
- ndisrc: Calculate timestamps for metadata buffers too.
- Various new clippy warnings.
- webrtcsink: Fix segment format mismatch when using a remote offer.
- awstranscriber: Fix sanity check in transcribe loop.
- whepsrc: Fix incorrect default caps.
### Changed
- gtk4paintablesink: Enable `gtk::GraphicsOffload::black-background` when
building with GTK 4.16 or newer.
- gstwebrtc-api: Always include index file in dist for convenience.
- rtpbasepay: Negotiate SSRC/PT with downstream via caps for backwards
compatibility.
- hlssink3: Use more accurate fragment duration from splitmuxsink if
available.
### Added
- gtk4paintablesink: Add `window-width` and `window-height` properties.
- gtk4paintablesink: Add custom widget for automatically updating window size.
- fmp4mux / mp4mux: Add image orientation tag support.
- webrtcsink: Add nvv4l2av1enc support.
- cmafmux: Add Opus support.
## [0.13.0] - 2024-07-16
### Added
- rtp: New RTP payloader and depayloader base classes, in addition to new
payloader and depayloaders for: PCMA, PCMU, AC-3, AV1 (ported to the new
base classes), MPEG-TS, VP8, VP9, MP4A, MP4G, JPEG, Opus, KLV.
- originalbuffer: New pair of elements that allows to save a buffer, perform
transformations on it and then restore the original buffer but keeping any
new analytics and other metadata on it.
- gopbuffer: New element for buffering an entire group-of-pictures.
- tttocea708: New element for converting timed text to CEA-708 closed captions.
- cea708mux: New element for muxing multiple CEA-708 services together.
- transcriberbin: Add support for generating CEA-708 closed captions and
CEA-608-in-708.
- cea708overlay: New overlay element for CEA-708 and CEA-608 closed captions.
- dav1ddec: Signal colorimetry in the caps.
- webrtc: Add support for RFC7273 clock signalling and synchronization to
webrtcsrc and webrtcsink.
- tracers: Add a new pad push durations tracer.
- transcriberbin: Add support for a secondary audio stream.
- quinn: New plugin with a QUIC source and sink element.
- rtpgccbwe: New mode based on linear regression instead of a kalman filter.
- rtp: New rtpsend and rtprecv elements that provide a new implementation of
the rtpbin element with a separate send and receive side.
- rtpsrc2: Add support for new rtpsend / rtprecv elements instead of rtpbin.
- webrtcsrc: Add multi-producer support.
- livesync: Add sync property for enabling/disabling syncing of the output
buffers to the clock.
- mpegtslivesrc: New element for receiving an MPEG-TS stream, e.g. over SRT or
UDP, and exposing the remote PCR clock as a local GStreamer clock.
- gtk4paintablesink: Add support for rotations / flipping.
- gtk4paintablesink: Add support for RGBx formats in non-GL mode.
### Fixed
- livesync: Queue up to latency buffers instead of requiring a queue of the
same size in front of livesync.
- livesync: Synchronize the first buffer to the clock too.
- livesync: Use correct duration for deciding whether a filler has to be
inserted or not.
- audioloudnorm: Fix possible off-by-one in the limiter when handling the very
last buffer.
- webrtcsink: Fix property types for rav1enc.
### Changed
- sccparse, mccparse: Port from nom to winnow.
- uriplaylistbin: Rely on uridecodebin3 gapless logic instead of
re-implementing it.
- webrtc: Refactor of JavaScript API.
- janusvrwebrtcsink: New use-string-ids property to distinguish between
integer and string room IDs, instead of always using strings and guessing
what the server expects.
- janusvrwebrtcsink: Handle more events and expose some via signals.
- dav1ddec: Require dav1d 1.3.0.
- closedcaption: Drop libcaption C code and switch to a pure Rust
implementation.
## [0.12.7] - 2024-06-19
### Fixed
- aws, spotifyaudiosrc, reqwesthttpsrc, webrtchttp: Fix race condition when unlocking
- rtp: Allow any payload type for the AV1 RTP payloader/depayloader
- rtp: Various fixes to the AV1 RTP payloader/depayloader to work correctly
with Chrome and Pion
- meson: Various fixes to the meson-based build system around cargo
- webrtcsink: Use correct property names for configuring `av1enc`
- webrtcsink: Avoid lock poisoning when setting encoder properties
### Added
- ndi: Support for NDI SDK v6
- webrtcsink: Support for AV1 via `nvav1enc`, `av1enc` or `rav1enc`
### Changed
- Update to async-tungstenite 0.26
## [0.12.6] - 2024-05-23
### Fixed
- Various Rust 1.78 clippy warnings.
- gtk4paintablesink: Fix plugin description.
### Added
- fmp4mux / mp4mux: Add support for adding AV1 header OBUs into the MP4
headers.
- fmp4mux / mp4mux: Take track language from the tags if provided.
- gtk4paintablesink: Add GST_GTK4_WINDOW_FULLSCREEN environment variable to
create a fullscreen window for debugging purposes.
- gtk4paintablesink: Also create a window automatically when called from
gst-play-1.0.
- webrtc: Add support for insecure TLS connections.
- webrtcsink: Add VP9 parser after the encoder.
### Changed
- webrtcsink: Improve error when no discovery pipeline runs.
- rtpgccbwe: Improve debug output in various places.
## [0.12.5] - 2024-04-29
### Fixed
- hrtfrender: Use a bitmask instead of an int in the caps for the channel-mask.
- rtpgccbwe: Don't log an error when pushing a buffer list fails while stopping.
- webrtcsink: Don't panic in bitrate handling with unsupported encoders.
- webrtcsink: Don't panic if unsupported input caps are used.
- webrtcsrc: Allow a `None` producer-id in `request-encoded-filter` signal.
### Added
- aws: New property to support path-style addressing.
- fmp4mux / mp4mux: Support FLAC instead (f)MP4.
- gtk4: Support directly importing dmabufs with GTK 4.14.
- gtk4: Add force-aspect-ratio property similar to other video sinks.
## [0.12.4] - 2024-04-08
### Fixed
- aws: Use fixed behaviour version to ensure that updates to the AWS SDK don't
change any defaults configurations in unexpected ways.
- onvifmetadataparse: Fix possible deadlock on shutdown.
- webrtcsink: Set `perfect-timestamp=true` on audio encoders to work around
bugs in Chrome's audio decoders.
- Various clippy warnings.
### Changed
- reqwest: Update to reqwest 0.12.
- webrtchttp: Update to reqwest 0.12.
## [0.12.3] - 2024-03-21
### Fixed
- gtk4paintablesink: Fix scaling of texture position.
- janusvrwebrtcsink: Handle 64 bit numerical room ids.
- janusvrwebrtcsink: Don't include deprecated audio/video fields in publish
messages.
- janusvrwebrtcsink: Handle various other messages to avoid printing errors.
- livekitwebrtc: Fix shutdown behaviour.
- rtpgccbwe: Don't forward buffer lists with buffers from different SSRCs to
avoid breaking assumptions in rtpsession.
- sccparse: Ignore invalid timecodes during seeking.
- webrtcsink: Don't try parsing audio caps as video caps.
### Changed
- webrtc: Allow resolution and framerate changes.
- webrtcsrc: Make producer-peer-id optional.
### Added
- livekitwebrtcsrc: Add new LiveKit source element.
- regex: Add support for configuring regex behaviour.
- spotifyaudiosrc: Document how to use with non-Facebook accounts.
- webrtcsrc: Add `do-retransmission` property.
## [0.12.2] - 2024-02-26
### Fixed
- rtpgccbwe: Don't reset PTS/DTS to `None` as otherwise `rtpsession` won't be
able to generate valid RTCP.
- webrtcsink: Fix usage with 1.22.
### Added
- janusvrwebrtcsink: Add `secret-key` property.
- janusvrwebrtcsink: Allow for string room ids and add `string-ids` property.
- textwrap: Don't split on all whitespaces, especially not on non-breaking
whitespace.
## [0.12.1] - 2024-02-13
### Added
- gtk4: Create a window for testing purposes when running in `gst-launch-1.0`
or if `GST_GTK4_WINDOW=1` is set.
- webrtcsink: Add `msid` property.
## [0.12.0] - 2024-02-08
### Changed
- ndi: `ndisrc` passes received data downstream without an additional copy, if
@ -36,7 +228,6 @@ specifically the [variant used by Rust](http://doc.crates.io/manifest.html#the-v
- New `janusvrwebrtcsink` element for the Janus VideoRoom API.
- New `rtspsrc2` element.
- New `whipserversrc` element.
- gtk4: New `background-color` property for setting the color of the
background of the frame and the borders, if any.
- gtk4: New `scale-filter` property for defining how to scale the frames.
@ -344,7 +535,16 @@ specifically the [variant used by Rust](http://doc.crates.io/manifest.html#the-v
- webrtcsink: Make the `turn-server` property a `turn-servers` list
- webrtcsink: Move from async-std to tokio
[Unreleased]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.0...HEAD
[Unreleased]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.13.1...HEAD
[0.13.1]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.13.0...0.13.1
[0.13.0]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.7...0.13.0
[0.12.7]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.6...0.12.7
[0.12.6]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.5...0.12.6
[0.12.5]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.4...0.12.5
[0.12.4]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.3...0.12.4
[0.12.3]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.2...0.12.3
[0.12.2]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.1...0.12.2
[0.12.1]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.12.0...0.12.1
[0.12.0]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.11.3...0.12.0
[0.11.3]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.11.2...0.11.3
[0.11.2]: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/compare/0.11.1...0.11.2

2953
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -9,12 +9,15 @@ members = [
"audio/claxon",
"audio/csound",
"audio/lewton",
"audio/speechmatics",
"audio/spotify",
"generic/file",
"generic/originalbuffer",
"generic/sodium",
"generic/threadshare",
"generic/inter",
"generic/gopbuffer",
"mux/flavors",
"mux/fmp4",
@ -22,6 +25,7 @@ members = [
"net/aws",
"net/hlssink3",
"net/mpegtslive",
"net/ndi",
"net/onvif",
"net/raptorq",
@ -32,6 +36,7 @@ members = [
"net/webrtc",
"net/webrtc/protocol",
"net/webrtc/signalling",
"net/quinn",
"text/ahead",
"text/json",
@ -65,13 +70,16 @@ default-members = [
"audio/claxon",
"audio/lewton",
"generic/originalbuffer",
"generic/threadshare",
"generic/inter",
"generic/gopbuffer",
"mux/fmp4",
"mux/mp4",
"net/aws",
"net/mpegtslive",
"net/hlssink3",
"net/onvif",
"net/raptorq",
@ -83,6 +91,7 @@ default-members = [
"net/webrtc/protocol",
"net/webrtc/signalling",
"net/ndi",
"net/quinn",
"text/ahead",
"text/json",
@ -113,10 +122,10 @@ panic = 'unwind'
opt-level = 1
[workspace.package]
version = "0.12.0-alpha.1"
version = "0.14.0-alpha.1"
repository = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs"
edition = "2021"
rust-version = "1.70"
rust-version = "1.71"
[workspace.dependencies]
once_cell = "1"
@ -125,11 +134,12 @@ gio = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
cairo-rs = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master", features=["use_glib"] }
pango = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
pangocairo = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
gtk = { package = "gtk4", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master"}
gdk-wayland = { package = "gdk4-wayland", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master"}
gdk-x11 = { package = "gdk4-x11", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master"}
gdk-win32 = { package = "gdk4-win32", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master"}
gtk = { package = "gtk4", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master", features = ["v4_6"]}
gdk-wayland = { package = "gdk4-wayland", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master", features = ["v4_4"]}
gdk-x11 = { package = "gdk4-x11", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master", features = ["v4_4"]}
gdk-win32 = { package = "gdk4-win32", git = "https://github.com/gtk-rs/gtk4-rs", branch = "master", features = ["v4_4"]}
gst = { package = "gstreamer", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-allocators = { package = "gstreamer-allocators", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-app = { package = "gstreamer-app", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-audio = { package = "gstreamer-audio", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }
gst-base = { package = "gstreamer-base", git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", branch = "main" }

View file

@ -23,6 +23,7 @@ You will find the following plugins in this repository:
- `aws`: Various elements for Amazon AWS services using the [AWS SDK](https://awslabs.github.io/aws-sdk-rust/) library
- `s3src`/`s3sink`: A source and sink element to talk to the Amazon S3 object storage system.
- `s3putobjectsink`: A sink element to talk to Amazon S3. Uses `PutObject` instead of multi-part upload like `s3sink`.
- `s3hlssink`: A sink element to store HLS streams on Amazon S3.
- `awstranscriber`: an element wrapping the AWS Transcriber service.
- `awstranscribeparse`: an element parsing the packets of the AWS Transcriber service.
@ -33,6 +34,9 @@ You will find the following plugins in this repository:
- `onvif`: Various elements for parsing, RTP (de)payloading, overlaying of ONVIF timed metadata.
- `quinn`: Transfer data over the network using QUIC
- `quinnquicsink`/`quinnquicsrc`: Send and receive data using QUIC
- `raptorq`: Encoder/decoder element for RaptorQ RTP FEC mechanism.
- `reqwest`: An HTTP source element based on the [reqwest](https://github.com/seanmonstar/reqwest) library.

View file

@ -11,8 +11,8 @@ use gst::prelude::*;
use gst::subclass::prelude::*;
use gst_audio::subclass::prelude::*;
use std::cmp;
use std::sync::Mutex;
use std::{cmp, u64};
use byte_slice_cast::*;

View file

@ -18,7 +18,6 @@ use gst::subclass::prelude::*;
use std::mem;
use std::sync::Mutex;
use std::u64;
use byte_slice_cast::*;
@ -264,7 +263,7 @@ impl State {
// Drains everything
fn drain(&mut self, imp: &AudioLoudNorm) -> Result<gst::Buffer, gst::FlowError> {
gst::debug!(CAT, imp: imp, "Draining");
gst::debug!(CAT, imp = imp, "Draining");
let (pts, distance) = self.adapter.prev_pts();
let distance_samples = distance / self.info.bpf() as u64;
@ -299,7 +298,7 @@ impl State {
self.frame_type = FrameType::Final;
} else if src.is_empty() {
// Nothing to drain at all
gst::debug!(CAT, imp: imp, "No data to drain");
gst::debug!(CAT, imp = imp, "No data to drain");
return Err(gst::FlowError::Eos);
}
@ -342,7 +341,7 @@ impl State {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Calculated global loudness for first frame {} with peak {}",
global,
true_peak
@ -396,7 +395,7 @@ impl State {
self.prev_delta = self.delta[self.index];
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Initializing for first frame with gain adjustment of {}",
self.prev_delta
);
@ -458,7 +457,7 @@ impl State {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Applying gain adjustment {}-{}",
gain,
gain_next
@ -532,7 +531,7 @@ impl State {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Calculated global loudness {}, short term loudness {} and relative threshold {}",
global,
shortterm,
@ -555,7 +554,7 @@ impl State {
self.above_threshold = true;
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Above threshold now ({} >= {}, {} > -70)",
shortterm_out,
self.target_i,
@ -583,7 +582,7 @@ impl State {
self.prev_delta = self.delta[self.index];
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Calculated new gain adjustment {}",
self.prev_delta
);
@ -754,7 +753,7 @@ impl State {
// amount of samples the last frame is short to reach the correct read position.
if next_frame_size < FRAME_SIZE {
self.limiter_buf_index += FRAME_SIZE - next_frame_size;
if self.limiter_buf_index > self.limiter_buf.len() {
if self.limiter_buf_index >= self.limiter_buf.len() {
self.limiter_buf_index -= self.limiter_buf.len();
}
}
@ -777,7 +776,7 @@ impl State {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Applying linear gain adjustment of {}",
self.offset
);
@ -856,7 +855,7 @@ impl State {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Found peak {} at sample {}, going to attack state at sample {} (gain reduction {}-{})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -993,7 +992,7 @@ impl State {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Found new peak {} at sample {}, restarting attack state at sample {} (gain reduction {}-{})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1042,7 +1041,7 @@ impl State {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Found new peak {} at sample {}, adjusting attack state at sample {} (gain reduction {}-{})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1057,7 +1056,7 @@ impl State {
// to ensure that we at least sustain it for that long afterwards.
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Found new low peak {} at sample {} in attack state at sample {}",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1073,7 +1072,7 @@ impl State {
// If we reached the target gain reduction, go into sustain state.
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Going to sustain state at sample {} (gain reduction {})",
smp_cnt,
self.gain_reduction[1]
@ -1152,7 +1151,7 @@ impl State {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Found new peak {} at sample {}, going back to attack state at sample {} (gain reduction {}-{})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1163,7 +1162,7 @@ impl State {
} else {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Found new peak {} at sample {}, going sustain further at sample {} (gain reduction {})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1190,7 +1189,7 @@ impl State {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Going to release state for sample {} at sample {} (gain reduction {}-1.0)",
smp_cnt + LIMITER_RELEASE_WINDOW,
smp_cnt,
@ -1260,7 +1259,7 @@ impl State {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Found new peak {} at sample {}, going back to attack state at sample {} (gain reduction {}-{})",
peak_value,
smp_cnt + LIMITER_ATTACK_WINDOW,
@ -1272,7 +1271,7 @@ impl State {
self.gain_reduction[1] = current_gain_reduction;
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Going from release to sustain state at sample {} because of low peak {} at sample {} (gain reduction {})",
smp_cnt,
peak_value,
@ -1313,7 +1312,7 @@ impl State {
self.limiter_state = LimiterState::Out;
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Leaving release state and going to out state at sample {}",
smp_cnt,
);
@ -1351,7 +1350,7 @@ impl State {
self.gain_reduction[1] = self.target_tp / max;
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Reducing gain for start of first frame by {} ({} > {}) and going to sustain state",
self.gain_reduction[1],
max,
@ -1367,7 +1366,7 @@ impl State {
let channels = self.info.channels() as usize;
let nb_samples = dst.len() / channels;
gst::debug!(CAT, imp: imp, "Running limiter for {} samples", nb_samples);
gst::debug!(CAT, imp = imp, "Running limiter for {} samples", nb_samples);
// For the first frame we can't adjust the gain before it smoothly anymore so instead
// apply the gain reduction immediately if we get above the threshold and move to sustain
@ -1536,12 +1535,12 @@ impl AudioLoudNorm {
_pad: &gst::Pad,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::log!(CAT, imp: self, "Handling buffer {:?}", buffer);
gst::log!(CAT, imp = self, "Handling buffer {:?}", buffer);
let mut state_guard = self.state.borrow_mut();
let state = match *state_guard {
None => {
gst::error!(CAT, imp: self, "Not negotiated yet");
gst::error!(CAT, imp = self, "Not negotiated yet");
return Err(gst::FlowError::NotNegotiated);
}
Some(ref mut state) => state,
@ -1549,7 +1548,7 @@ impl AudioLoudNorm {
let mut outbufs = vec![];
if buffer.flags().contains(gst::BufferFlags::DISCONT) {
gst::debug!(CAT, imp: self, "Draining on discontinuity");
gst::debug!(CAT, imp = self, "Draining on discontinuity");
match state.drain(self) {
Ok(outbuf) => {
outbufs.push(outbuf);
@ -1567,7 +1566,7 @@ impl AudioLoudNorm {
drop(state_guard);
for buffer in outbufs {
gst::log!(CAT, imp: self, "Outputting buffer {:?}", buffer);
gst::log!(CAT, imp = self, "Outputting buffer {:?}", buffer);
self.srcpad.push(buffer)?;
}
@ -1577,17 +1576,17 @@ impl AudioLoudNorm {
fn sink_event(&self, pad: &gst::Pad, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj: pad, "Handling event {:?}", event);
gst::log!(CAT, obj = pad, "Handling event {:?}", event);
match event.view() {
EventView::Caps(c) => {
let caps = c.caps();
gst::info!(CAT, obj: pad, "Got caps {:?}", caps);
gst::info!(CAT, obj = pad, "Got caps {:?}", caps);
let info = match gst_audio::AudioInfo::from_caps(caps) {
Ok(info) => info,
Err(_) => {
gst::error!(CAT, obj: pad, "Failed to parse caps");
gst::error!(CAT, obj = pad, "Failed to parse caps");
return false;
}
};
@ -1605,9 +1604,9 @@ impl AudioLoudNorm {
drop(state);
if let Some(outbuf) = outbuf {
gst::log!(CAT, imp: self, "Outputting buffer {:?}", outbuf);
gst::log!(CAT, imp = self, "Outputting buffer {:?}", outbuf);
if let Err(err) = self.srcpad.push(outbuf) {
gst::error!(CAT, imp: self, "Failed to push drained data: {}", err);
gst::error!(CAT, imp = self, "Failed to push drained data: {}", err);
return false;
}
@ -1627,11 +1626,11 @@ impl AudioLoudNorm {
drop(state);
if let Some(outbuf) = outbuf {
gst::log!(CAT, imp: self, "Outputting buffer {:?}", outbuf);
gst::log!(CAT, imp = self, "Outputting buffer {:?}", outbuf);
if let Err(err) = self.srcpad.push(outbuf) {
gst::error!(
CAT,
imp: self,
imp = self,
"Failed to push drained data on EOS: {}",
err
);
@ -1661,7 +1660,7 @@ impl AudioLoudNorm {
fn src_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
use gst::QueryViewMut;
gst::log!(CAT, obj: pad, "Handling query {:?}", query);
gst::log!(CAT, obj = pad, "Handling query {:?}", query);
match query.view_mut() {
QueryViewMut::Latency(q) => {
let mut peer_query = gst::query::Latency::new();

View file

@ -113,7 +113,7 @@ impl AudioRNNoise {
let settings = *self.settings.lock().unwrap();
let mut buffer = gst::Buffer::with_size(available).map_err(|e| {
gst::error!(CAT, imp: self, "Failed to allocate buffer at EOS {:?}", e);
gst::error!(CAT, imp = self, "Failed to allocate buffer at EOS {:?}", e);
gst::FlowError::Flushing
})?;
@ -214,7 +214,7 @@ impl AudioRNNoise {
);
}
gst::trace!(CAT, imp: self, "Voice activity: {}", vad);
gst::trace!(CAT, imp = self, "Voice activity: {}", vad);
if vad < settings.vad_threshold {
out_frame.fill(0.0);
} else {
@ -237,8 +237,9 @@ impl AudioRNNoise {
gst::trace!(
CAT,
imp: self,
"rms: {}, level: {}, has_voice : {} ", rms,
imp = self,
"rms: {}, level: {}, has_voice : {} ",
rms,
level,
has_voice
);
@ -345,7 +346,7 @@ impl BaseTransformImpl for AudioRNNoise {
use gst::EventView;
if let EventView::Eos(_) = event.view() {
gst::debug!(CAT, imp: self, "Handling EOS");
gst::debug!(CAT, imp = self, "Handling EOS");
if self.drain().is_err() {
return false;
}
@ -361,7 +362,7 @@ impl BaseTransformImpl for AudioRNNoise {
let (live, mut min, mut max) = upstream_query.result();
gst::debug!(
CAT,
imp: self,
imp = self,
"Peer latency: live {} min {} max {}",
live,
min,
@ -406,7 +407,7 @@ impl AudioFilterImpl for AudioRNNoise {
})?;
}
gst::debug!(CAT, imp: self, "Set caps to {:?}", info);
gst::debug!(CAT, imp = self, "Set caps to {:?}", info);
let mut denoisers = vec![];
for _i in 0..info.channels() {

View file

@ -12,7 +12,6 @@ use gst::subclass::prelude::*;
use gst_audio::subclass::prelude::*;
use gst_base::prelude::*;
use std::i32;
use std::sync::atomic;
use std::sync::Mutex;
@ -130,7 +129,7 @@ impl ObjectImpl for EbuR128Level {
let this = args[0].get::<super::EbuR128Level>().unwrap();
let imp = this.imp();
gst::info!(CAT, obj: this, "Resetting measurements",);
gst::info!(CAT, obj = this, "Resetting measurements",);
imp.reset.store(true, atomic::Ordering::SeqCst);
None
@ -176,7 +175,7 @@ impl ObjectImpl for EbuR128Level {
let mode = value.get().expect("type checked upstream");
gst::info!(
CAT,
imp: self,
imp = self,
"Changing mode from {:?} to {:?}",
settings.mode,
mode
@ -187,7 +186,7 @@ impl ObjectImpl for EbuR128Level {
let post_messages = value.get().expect("type checked upstream");
gst::info!(
CAT,
imp: self,
imp = self,
"Changing post-messages from {} to {}",
settings.post_messages,
post_messages
@ -198,7 +197,7 @@ impl ObjectImpl for EbuR128Level {
let interval = value.get::<u64>().unwrap().nseconds();
gst::info!(
CAT,
imp: self,
imp = self,
"Changing interval from {} to {}",
settings.interval,
interval,
@ -287,7 +286,7 @@ impl BaseTransformImpl for EbuR128Level {
// Drop state
let _ = self.state.borrow_mut().take();
gst::info!(CAT, imp: self, "Stopped");
gst::info!(CAT, imp = self, "Stopped");
Ok(())
}
@ -378,7 +377,7 @@ impl BaseTransformImpl for EbuR128Level {
Ok(loudness) => s.set("momentary-loudness", loudness),
Err(err) => gst::error!(
CAT,
imp: self,
imp = self,
"Failed to get momentary loudness: {}",
err
),
@ -390,7 +389,7 @@ impl BaseTransformImpl for EbuR128Level {
Ok(loudness) => s.set("shortterm-loudness", loudness),
Err(err) => gst::error!(
CAT,
imp: self,
imp = self,
"Failed to get shortterm loudness: {}",
err
),
@ -402,7 +401,7 @@ impl BaseTransformImpl for EbuR128Level {
Ok(loudness) => s.set("global-loudness", loudness),
Err(err) => gst::error!(
CAT,
imp: self,
imp = self,
"Failed to get global loudness: {}",
err
),
@ -412,7 +411,7 @@ impl BaseTransformImpl for EbuR128Level {
Ok(threshold) => s.set("relative-threshold", threshold),
Err(err) => gst::error!(
CAT,
imp: self,
imp = self,
"Failed to get relative threshold: {}",
err
),
@ -423,7 +422,12 @@ impl BaseTransformImpl for EbuR128Level {
match state.ebur128.loudness_range() {
Ok(range) => s.set("loudness-range", range),
Err(err) => {
gst::error!(CAT, imp: self, "Failed to get loudness range: {}", err)
gst::error!(
CAT,
imp = self,
"Failed to get loudness range: {}",
err
)
}
}
}
@ -436,7 +440,7 @@ impl BaseTransformImpl for EbuR128Level {
match peaks {
Ok(peaks) => s.set("sample-peak", peaks),
Err(err) => {
gst::error!(CAT, imp: self, "Failed to get sample peaks: {}", err)
gst::error!(CAT, imp = self, "Failed to get sample peaks: {}", err)
}
}
}
@ -449,12 +453,12 @@ impl BaseTransformImpl for EbuR128Level {
match peaks {
Ok(peaks) => s.set("true-peak", peaks),
Err(err) => {
gst::error!(CAT, imp: self, "Failed to get true peaks: {}", err)
gst::error!(CAT, imp = self, "Failed to get true peaks: {}", err)
}
}
}
gst::debug!(CAT, imp: self, "Posting message {}", s);
gst::debug!(CAT, imp = self, "Posting message {}", s);
let msg = gst::message::Element::builder(s).src(&*self.obj()).build();
@ -505,7 +509,7 @@ impl AudioFilterImpl for EbuR128Level {
}
fn setup(&self, info: &gst_audio::AudioInfo) -> Result<(), gst::LoggableError> {
gst::debug!(CAT, imp: self, "Configured for caps {:?}", info);
gst::debug!(CAT, imp = self, "Configured for caps {:?}", info);
let settings = *self.settings.lock().unwrap();
@ -568,7 +572,7 @@ impl AudioFilterImpl for EbuR128Level {
val => {
gst::debug!(
CAT,
imp: self,
imp = self,
"Unknown channel position {:?}, ignoring channel",
val
);
@ -746,12 +750,12 @@ fn interleaved_channel_data_into_slice<'a, T: FromByteSlice>(
) -> Result<&'a [T], gst::FlowError> {
buf.plane_data(0)
.map_err(|err| {
gst::error!(CAT, imp: imp, "Failed to get audio data: {}", err);
gst::error!(CAT, imp = imp, "Failed to get audio data: {}", err);
gst::FlowError::Error
})?
.as_slice_of::<T>()
.map_err(|err| {
gst::error!(CAT, imp: imp, "Failed to handle audio data: {}", err);
gst::error!(CAT, imp = imp, "Failed to handle audio data: {}", err);
gst::FlowError::Error
})
}
@ -765,12 +769,12 @@ fn non_interleaved_channel_data_into_slices<'a, T: FromByteSlice>(
.map(|c| {
buf.plane_data(c)
.map_err(|err| {
gst::error!(CAT, imp: imp, "Failed to get audio data: {}", err);
gst::error!(CAT, imp = imp, "Failed to get audio data: {}", err);
gst::FlowError::Error
})?
.as_slice_of::<T>()
.map_err(|err| {
gst::error!(CAT, imp: imp, "Failed to handle audio data: {}", err);
gst::error!(CAT, imp = imp, "Failed to handle audio data: {}", err);
gst::FlowError::Error
})
})

View file

@ -224,7 +224,7 @@ impl HrtfRender {
let mut outbuf =
gst_audio::AudioBufferRef::from_buffer_ref_writable(outbuf, &state.outinfo).map_err(
|err| {
gst::error!(CAT, imp: self, "Failed to map buffer : {}", err);
gst::error!(CAT, imp = self, "Failed to map buffer : {}", err);
gst::FlowError::Error
},
)?;
@ -248,13 +248,13 @@ impl HrtfRender {
while state.adapter.available() >= inblksz {
let inbuf = state.adapter.take_buffer(inblksz).map_err(|_| {
gst::error!(CAT, imp: self, "Failed to map buffer");
gst::error!(CAT, imp = self, "Failed to map buffer");
gst::FlowError::Error
})?;
let inbuf = gst_audio::AudioBuffer::from_buffer_readable(inbuf, &state.ininfo)
.map_err(|_| {
gst::error!(CAT, imp: self, "Failed to map buffer");
gst::error!(CAT, imp = self, "Failed to map buffer");
gst::FlowError::Error
})?;
@ -624,7 +624,7 @@ impl BaseTransformImpl for HrtfRender {
gst::log!(
CAT,
imp: self,
imp = self,
"Adapter size: {}, input size {}, transformed size {}",
state.adapter.available(),
size,
@ -649,7 +649,7 @@ impl BaseTransformImpl for HrtfRender {
if direction == gst::PadDirection::Sink {
s.set("channels", 2);
s.set("channel-mask", 0x3);
s.set("channel-mask", gst::Bitmask(0x3));
} else {
let settings = self.settings.lock().unwrap();
if let Some(objs) = &settings.spatial_objects {
@ -670,7 +670,7 @@ impl BaseTransformImpl for HrtfRender {
gst::debug!(
CAT,
imp: self,
imp = self,
"Transformed caps from {} to {} in direction {:?}",
caps,
other_caps,
@ -741,7 +741,7 @@ impl BaseTransformImpl for HrtfRender {
adapter: gst_base::UniqueAdapter::new(),
});
gst::debug!(CAT, imp: self, "Configured for caps {}", incaps);
gst::debug!(CAT, imp = self, "Configured for caps {}", incaps);
Ok(())
}
@ -749,7 +749,7 @@ impl BaseTransformImpl for HrtfRender {
fn sink_event(&self, event: gst::Event) -> bool {
use gst::EventView;
gst::debug!(CAT, imp: self, "Handling event {:?}", event);
gst::debug!(CAT, imp = self, "Handling event {:?}", event);
match event.view() {
EventView::FlushStop(_) => {

View file

@ -198,7 +198,7 @@ fn basic_two_channels() {
#[test]
fn silence() {
run_test("wave=silence", None, 1000, 1024, 1, std::f64::NEG_INFINITY);
run_test("wave=silence", None, 1000, 1024, 1, f64::NEG_INFINITY);
}
#[test]
@ -228,7 +228,7 @@ fn below_threshold() {
1000,
1024,
1,
std::f64::NEG_INFINITY,
f64::NEG_INFINITY,
);
}

View file

@ -115,7 +115,7 @@ impl AudioDecoderImpl for ClaxonDec {
}
fn set_format(&self, caps: &gst::Caps) -> Result<(), gst::LoggableError> {
gst::debug!(CAT, imp: self, "Setting format {:?}", caps);
gst::debug!(CAT, imp = self, "Setting format {:?}", caps);
let mut audio_info: Option<gst_audio::AudioInfo> = None;
@ -124,15 +124,15 @@ impl AudioDecoderImpl for ClaxonDec {
let streamheaders = streamheaders.as_slice();
if streamheaders.len() < 2 {
gst::debug!(CAT, imp: self, "Not enough streamheaders, trying in-band");
gst::debug!(CAT, imp = self, "Not enough streamheaders, trying in-band");
} else {
let ident_buf = streamheaders[0].get::<Option<gst::Buffer>>();
if let Ok(Some(ident_buf)) = ident_buf {
gst::debug!(CAT, imp: self, "Got streamheader buffers");
gst::debug!(CAT, imp = self, "Got streamheader buffers");
let inmap = ident_buf.map_readable().unwrap();
if inmap[0..7] != [0x7f, b'F', b'L', b'A', b'C', 0x01, 0x00] {
gst::debug!(CAT, imp: self, "Unknown streamheader format");
gst::debug!(CAT, imp = self, "Unknown streamheader format");
} else if let Ok(tstreaminfo) = claxon_streaminfo(&inmap[13..]) {
if let Ok(taudio_info) = gstaudioinfo(&tstreaminfo) {
// To speed up negotiation
@ -142,7 +142,7 @@ impl AudioDecoderImpl for ClaxonDec {
{
gst::debug!(
CAT,
imp: self,
imp = self,
"Error to negotiate output from based on in-caps streaminfo"
);
}
@ -165,7 +165,7 @@ impl AudioDecoderImpl for ClaxonDec {
&self,
inbuf: Option<&gst::Buffer>,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::debug!(CAT, imp: self, "Handling buffer {:?}", inbuf);
gst::debug!(CAT, imp = self, "Handling buffer {:?}", inbuf);
let inbuf = match inbuf {
None => return Ok(gst::FlowSuccess::Ok),
@ -173,7 +173,7 @@ impl AudioDecoderImpl for ClaxonDec {
};
let inmap = inbuf.map_readable().map_err(|_| {
gst::error!(CAT, imp: self, "Failed to buffer readable");
gst::error!(CAT, imp = self, "Failed to buffer readable");
gst::FlowError::Error
})?;
@ -181,18 +181,18 @@ impl AudioDecoderImpl for ClaxonDec {
let state = state_guard.as_mut().ok_or(gst::FlowError::NotNegotiated)?;
if inmap.as_slice() == b"fLaC" {
gst::debug!(CAT, imp: self, "fLaC buffer received");
gst::debug!(CAT, imp = self, "fLaC buffer received");
} else if inmap[0] & 0x7F == 0x00 {
gst::debug!(CAT, imp: self, "Streaminfo header buffer received");
gst::debug!(CAT, imp = self, "Streaminfo header buffer received");
return self.handle_streaminfo_header(state, inmap.as_ref());
} else if inmap[0] == 0b1111_1111 && inmap[1] & 0b1111_1100 == 0b1111_1000 {
gst::debug!(CAT, imp: self, "Data buffer received");
gst::debug!(CAT, imp = self, "Data buffer received");
return self.handle_data(state, inmap.as_ref());
} else {
// info about other headers in flacparse and https://xiph.org/flac/format.html
gst::debug!(
CAT,
imp: self,
imp = self,
"Other header buffer received {:?}",
inmap[0] & 0x7F
);
@ -220,7 +220,7 @@ impl ClaxonDec {
gst::debug!(
CAT,
imp: self,
imp = self,
"Successfully parsed headers: {:?}",
audio_info
);

View file

@ -17,7 +17,6 @@ use gst_base::subclass::prelude::*;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Mutex;
use std::{f64, i32};
use byte_slice_cast::*;
@ -192,7 +191,7 @@ impl CsoundFilter {
(avail / state.in_info.channels() as usize) * state.out_info.channels() as usize;
let mut buffer = gst::Buffer::with_size(out_bytes).map_err(|e| {
gst::error!(CAT, imp: self, "Failed to allocate buffer at EOS {:?}", e);
gst::error!(CAT, imp = self, "Failed to allocate buffer at EOS {:?}", e);
gst::FlowError::Flushing
})?;
@ -247,7 +246,7 @@ impl CsoundFilter {
gst::log!(
CAT,
imp: self,
imp = self,
"Generating output at: {} - duration: {}",
pts.display(),
duration.display(),
@ -482,7 +481,7 @@ impl BaseTransformImpl for CsoundFilter {
csound.reset();
let _ = self.state.lock().unwrap().take();
gst::info!(CAT, imp: self, "Stopped");
gst::info!(CAT, imp = self, "Stopped");
Ok(())
}
@ -491,7 +490,7 @@ impl BaseTransformImpl for CsoundFilter {
use gst::EventView;
if let EventView::Eos(_) = event.view() {
gst::log!(CAT, imp: self, "Handling Eos");
gst::log!(CAT, imp = self, "Handling Eos");
if self.drain().is_err() {
return false;
}
@ -536,7 +535,7 @@ impl BaseTransformImpl for CsoundFilter {
gst::debug!(
CAT,
imp: self,
imp = self,
"Transformed caps from {} to {} in direction {:?}",
caps,
other_caps,

View file

@ -120,7 +120,7 @@ impl AudioDecoderImpl for LewtonDec {
}
fn set_format(&self, caps: &gst::Caps) -> Result<(), gst::LoggableError> {
gst::debug!(CAT, imp: self, "Setting format {:?}", caps);
gst::debug!(CAT, imp = self, "Setting format {:?}", caps);
// When the caps are changing we require new headers
let mut state_guard = self.state.borrow_mut();
@ -138,7 +138,7 @@ impl AudioDecoderImpl for LewtonDec {
if let Ok(Some(streamheaders)) = s.get_optional::<gst::ArrayRef>("streamheader") {
let streamheaders = streamheaders.as_slice();
if streamheaders.len() < 3 {
gst::debug!(CAT, imp: self, "Not enough streamheaders, trying in-band");
gst::debug!(CAT, imp = self, "Not enough streamheaders, trying in-band");
return Ok(());
}
@ -148,7 +148,7 @@ impl AudioDecoderImpl for LewtonDec {
if let (Ok(Some(ident_buf)), Ok(Some(comment_buf)), Ok(Some(setup_buf))) =
(ident_buf, comment_buf, setup_buf)
{
gst::debug!(CAT, imp: self, "Got streamheader buffers");
gst::debug!(CAT, imp = self, "Got streamheader buffers");
state.header_bufs = (Some(ident_buf), Some(comment_buf), Some(setup_buf));
}
}
@ -157,7 +157,7 @@ impl AudioDecoderImpl for LewtonDec {
}
fn flush(&self, _hard: bool) {
gst::debug!(CAT, imp: self, "Flushing");
gst::debug!(CAT, imp = self, "Flushing");
let mut state_guard = self.state.borrow_mut();
if let Some(ref mut state) = *state_guard {
@ -169,7 +169,7 @@ impl AudioDecoderImpl for LewtonDec {
&self,
inbuf: Option<&gst::Buffer>,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::debug!(CAT, imp: self, "Handling buffer {:?}", inbuf);
gst::debug!(CAT, imp = self, "Handling buffer {:?}", inbuf);
let inbuf = match inbuf {
None => return Ok(gst::FlowSuccess::Ok),
@ -177,7 +177,7 @@ impl AudioDecoderImpl for LewtonDec {
};
let inmap = inbuf.map_readable().map_err(|_| {
gst::error!(CAT, imp: self, "Failed to buffer readable");
gst::error!(CAT, imp = self, "Failed to buffer readable");
gst::FlowError::Error
})?;
@ -191,7 +191,7 @@ impl AudioDecoderImpl for LewtonDec {
if state.headerset.is_some() {
return Ok(gst::FlowSuccess::Ok);
} else {
gst::error!(CAT, imp: self, "Got empty packet before all headers");
gst::error!(CAT, imp = self, "Got empty packet before all headers");
return Err(gst::FlowError::Error);
}
}
@ -219,14 +219,14 @@ impl LewtonDec {
) -> Result<gst::FlowSuccess, gst::FlowError> {
// ident header
if indata[0] == 0x01 {
gst::debug!(CAT, imp: self, "Got ident header buffer");
gst::debug!(CAT, imp = self, "Got ident header buffer");
state.header_bufs = (Some(inbuf.clone()), None, None);
} else if indata[0] == 0x03 {
// comment header
if state.header_bufs.0.is_none() {
gst::warning!(CAT, imp: self, "Got comment header before ident header");
gst::warning!(CAT, imp = self, "Got comment header before ident header");
} else {
gst::debug!(CAT, imp: self, "Got comment header buffer");
gst::debug!(CAT, imp = self, "Got comment header buffer");
state.header_bufs.1 = Some(inbuf.clone());
}
} else if indata[0] == 0x05 {
@ -234,11 +234,11 @@ impl LewtonDec {
if state.header_bufs.0.is_none() || state.header_bufs.1.is_none() {
gst::warning!(
CAT,
imp: self,
imp = self,
"Got setup header before ident/comment header"
);
} else {
gst::debug!(CAT, imp: self, "Got setup header buffer");
gst::debug!(CAT, imp = self, "Got setup header buffer");
state.header_bufs.2 = Some(inbuf.clone());
}
}
@ -263,7 +263,7 @@ impl LewtonDec {
// First try to parse the headers
let ident_map = ident_buf.map_readable().map_err(|_| {
gst::error!(CAT, imp: self, "Failed to map ident buffer readable");
gst::error!(CAT, imp = self, "Failed to map ident buffer readable");
gst::FlowError::Error
})?;
let ident = lewton::header::read_header_ident(ident_map.as_ref()).map_err(|err| {
@ -276,7 +276,7 @@ impl LewtonDec {
})?;
let comment_map = comment_buf.map_readable().map_err(|_| {
gst::error!(CAT, imp: self, "Failed to map comment buffer readable");
gst::error!(CAT, imp = self, "Failed to map comment buffer readable");
gst::FlowError::Error
})?;
let comment = lewton::header::read_header_comment(comment_map.as_ref()).map_err(|err| {
@ -289,7 +289,7 @@ impl LewtonDec {
})?;
let setup_map = setup_buf.map_readable().map_err(|_| {
gst::error!(CAT, imp: self, "Failed to map setup buffer readable");
gst::error!(CAT, imp = self, "Failed to map setup buffer readable");
gst::FlowError::Error
})?;
let setup = lewton::header::read_header_setup(
@ -327,7 +327,7 @@ impl LewtonDec {
if gst_audio::channel_reorder_map(from, to, &mut map[..channels]).is_err() {
gst::error!(
CAT,
imp: self,
imp = self,
"Failed to generate channel reorder map from {:?} to {:?}",
from,
to,
@ -343,7 +343,7 @@ impl LewtonDec {
gst::debug!(
CAT,
imp: self,
imp = self,
"Successfully parsed headers: {:?}",
audio_info
);
@ -396,7 +396,7 @@ impl LewtonDec {
}
let sample_count = decoded.samples.len() / audio_info.channels() as usize;
gst::debug!(CAT, imp: self, "Got {} decoded samples", sample_count);
gst::debug!(CAT, imp = self, "Got {} decoded samples", sample_count);
if sample_count == 0 {
return self.obj().finish_frame(None, 1);

View file

@ -0,0 +1,50 @@
[package]
name = "gst-plugin-speechmatics"
version.workspace = true
authors = ["Mathieu Duponchelle <mathieu@centricular.com>"]
repository.workspace = true
license = "MPL-2.0"
description = "GStreamer Speechmatics plugin"
edition.workspace = true
rust-version.workspace = true
[dependencies]
futures = "0.3"
gst.workspace = true
gst-base.workspace = true
gst-audio = { workspace = true, features = ["v1_16"] }
tokio = { version = "1", features = [ "full" ] }
async-tungstenite = { version = "0.28", features = ["tokio", "tokio-runtime", "tokio-native-tls"] }
once_cell.workspace = true
serde = { version = "1", features = ["derive"] }
serde_json = "1"
atomic_refcell = "0.1"
http = { version = "1.0" }
url = "2"
[lib]
name = "gstspeechmatics"
crate-type = ["cdylib", "rlib"]
path = "src/lib.rs"
[build-dependencies]
gst-plugin-version-helper.workspace = true
[features]
static = []
capi = []
doc = ["gst/v1_18"]
[package.metadata.capi]
min_version = "0.9.21"
[package.metadata.capi.header]
enabled = false
[package.metadata.capi.library]
install_subdir = "gstreamer-1.0"
versioning = false
import_library = false
[package.metadata.capi.pkg_config]
requires_private = "gstreamer-1.0, gstreamer-base-1.0, gobject-2.0, glib-2.0, gmodule-2.0"

View file

@ -0,0 +1,373 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

View file

@ -0,0 +1,3 @@
fn main() {
gst_plugin_version_helper::info()
}

View file

@ -0,0 +1,36 @@
// Copyright (C) 2024 Mathieu Duponchelle <mathieu@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(clippy::non_send_fields_in_send_ty, unused_doc_comments)]
#![recursion_limit = "128"]
/**
* plugin-speechmatics:
*
* Since: plugins-rs-0.14.0
*/
use gst::glib;
mod transcriber;
fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
transcriber::register(plugin)?;
Ok(())
}
gst::plugin_define!(
speechmatics,
env!("CARGO_PKG_DESCRIPTION"),
plugin_init,
concat!(env!("CARGO_PKG_VERSION"), "-", env!("COMMIT_ID")),
"Proprietary",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_REPOSITORY"),
env!("BUILD_REL_DATE")
);

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,33 @@
// Copyright (C) 2024 Mathieu Duponchelle <mathieu@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub struct Transcriber(ObjectSubclass<imp::Transcriber>) @extends gst::Element, gst::Object, @implements gst::ChildProxy;
}
glib::wrapper! {
pub struct TranscriberSrcPad(ObjectSubclass<imp::TranscriberSrcPad>) @extends gst::Pad, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
#[cfg(feature = "doc")]
{
TranscriberSrcPad::static_type().mark_as_plugin_api(gst::PluginAPIFlags::empty());
}
gst::Element::register(
Some(plugin),
"speechmaticstranscriber",
gst::Rank::NONE,
Transcriber::static_type(),
)
}

View file

@ -8,10 +8,11 @@ to respect their legal/licensing restrictions.
## Spotify Credentials
This plugin requires a [Spotify Premium](https://www.spotify.com/premium/) account configured
with a [device password](https://www.spotify.com/us/account/set-device-password/).
This plugin requires a [Spotify Premium](https://www.spotify.com/premium/) account.
If your account is linked with Facebook, you'll need to setup
a [device username and password](https://www.spotify.com/us/account/set-device-password/).
You can then set the device username and password using the `username` and `password` properties.
Those username and password are then set using the `username` and `password` properties.
You may also want to cache credentials and downloaded files, see the `cache-` properties on the element.

View file

@ -30,13 +30,13 @@ impl Settings {
pub fn properties() -> Vec<glib::ParamSpec> {
vec![glib::ParamSpecString::builder("username")
.nick("Username")
.blurb("Spotify device username from https://www.spotify.com/us/account/set-device-password/")
.blurb("Spotify username, Facebook accounts need a device username from https://www.spotify.com/us/account/set-device-password/")
.default_value(Some(""))
.mutable_ready()
.build(),
glib::ParamSpecString::builder("password")
.nick("Password")
.blurb("Spotify device password from https://www.spotify.com/us/account/set-device-password/")
.blurb("Spotify password, Facebook accounts need a device password from https://www.spotify.com/us/account/set-device-password/")
.default_value(Some(""))
.mutable_ready()
.build(),
@ -135,7 +135,7 @@ impl Settings {
if !self.username.is_empty() && self.username != cached_cred.username {
gst::debug!(
cat,
obj: &src,
obj = &src,
"ignore cached credentials for user {} which mismatch user {}",
cached_cred.username,
self.username
@ -143,7 +143,7 @@ impl Settings {
} else {
gst::debug!(
cat,
obj: &src,
obj = &src,
"reuse cached credentials for user {}",
cached_cred.username
);
@ -162,7 +162,7 @@ impl Settings {
gst::debug!(
cat,
obj: &src,
obj = &src,
"credentials not in cache or cached credentials invalid",
);

View file

@ -6,7 +6,7 @@
//
// SPDX-License-Identifier: MPL-2.0
use std::sync::{mpsc, Arc, Mutex, MutexGuard};
use std::sync::{mpsc, Arc, Mutex};
use futures::future::{AbortHandle, Abortable, Aborted};
use once_cell::sync::Lazy;
@ -67,15 +67,39 @@ struct Settings {
}
#[derive(Default)]
pub struct SpotifyAudioSrc {
setup_thread: Mutex<Option<SetupThread>>,
state: Arc<Mutex<Option<State>>>,
settings: Mutex<Settings>,
enum SetupThread {
#[default]
None,
Pending {
thread_handle: Option<std::thread::JoinHandle<Result<anyhow::Result<()>, Aborted>>>,
abort_handle: AbortHandle,
},
Cancelled,
Done,
}
struct SetupThread {
thread_handle: std::thread::JoinHandle<Result<anyhow::Result<()>, Aborted>>,
abort_handle: AbortHandle,
impl SetupThread {
fn abort(&mut self) {
// Cancel setup thread if it is pending and not done yet
if matches!(self, SetupThread::None | SetupThread::Done) {
return;
}
if let SetupThread::Pending {
ref abort_handle, ..
} = *self
{
abort_handle.abort();
}
*self = SetupThread::Cancelled;
}
}
#[derive(Default)]
pub struct SpotifyAudioSrc {
setup_thread: Mutex<SetupThread>,
state: Arc<Mutex<Option<State>>>,
settings: Mutex<Settings>,
}
#[glib::object_subclass]
@ -172,23 +196,20 @@ impl BaseSrcImpl for SpotifyAudioSrc {
}
{
let setup_thread = self.setup_thread.lock().unwrap();
if setup_thread.is_some() {
// already starting
return Ok(());
// If not started yet and not cancelled, start the setup
let mut setup_thread = self.setup_thread.lock().unwrap();
assert!(!matches!(&*setup_thread, SetupThread::Cancelled));
if matches!(&*setup_thread, SetupThread::None) {
self.start_setup(&mut setup_thread);
}
self.start_setup(setup_thread);
}
Ok(())
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
// stop the setup if it's not completed yet
self.cancel_setup();
if let Some(state) = self.state.lock().unwrap().take() {
gst::debug!(CAT, imp: self, "stopping");
gst::debug!(CAT, imp = self, "stopping");
state.player.stop();
state.player_channel_handle.abort();
// FIXME: not sure why this is needed to unblock BufferSink::write(), dropping State should drop the receiver
@ -199,9 +220,17 @@ impl BaseSrcImpl for SpotifyAudioSrc {
}
fn unlock(&self) -> Result<(), gst::ErrorMessage> {
self.cancel_setup();
let mut setup_thread = self.setup_thread.lock().unwrap();
setup_thread.abort();
Ok(())
}
self.parent_unlock()
fn unlock_stop(&self) -> Result<(), gst::ErrorMessage> {
let mut setup_thread = self.setup_thread.lock().unwrap();
if matches!(&*setup_thread, SetupThread::Cancelled) {
*setup_thread = SetupThread::None;
}
Ok(())
}
}
@ -216,30 +245,47 @@ impl PushSrcImpl for SpotifyAudioSrc {
};
if !state_set {
let setup_thread = self.setup_thread.lock().unwrap();
if setup_thread.is_none() {
// unlock() could potentially cancel the setup, and create() can be called after unlock() without going through start() again.
self.start_setup(setup_thread);
// If not started yet and not cancelled, start the setup
let mut setup_thread = self.setup_thread.lock().unwrap();
if matches!(&*setup_thread, SetupThread::Cancelled) {
return Err(gst::FlowError::Flushing);
}
if matches!(&*setup_thread, SetupThread::None) {
self.start_setup(&mut setup_thread);
}
}
{
// wait for the setup to be completed
let mut setup_thread = self.setup_thread.lock().unwrap();
if let Some(setup) = setup_thread.take() {
let res = setup.thread_handle.join().unwrap();
if let SetupThread::Pending {
ref mut thread_handle,
..
} = *setup_thread
{
let thread_handle = thread_handle.take().expect("Waiting multiple times");
drop(setup_thread);
let res = thread_handle.join().unwrap();
match res {
Err(_aborted) => {
gst::debug!(CAT, imp: self, "setup has been cancelled");
gst::debug!(CAT, imp = self, "setup has been cancelled");
setup_thread = self.setup_thread.lock().unwrap();
*setup_thread = SetupThread::Cancelled;
return Err(gst::FlowError::Flushing);
}
Ok(Err(err)) => {
gst::error!(CAT, imp: self, "failed to start: {err:?}");
gst::error!(CAT, imp = self, "failed to start: {err:?}");
gst::element_imp_error!(self, gst::ResourceError::Settings, ["{err:?}"]);
setup_thread = self.setup_thread.lock().unwrap();
*setup_thread = SetupThread::None;
return Err(gst::FlowError::Error);
}
Ok(Ok(_)) => {}
Ok(Ok(_)) => {
setup_thread = self.setup_thread.lock().unwrap();
*setup_thread = SetupThread::Done;
}
}
}
}
@ -249,15 +295,15 @@ impl PushSrcImpl for SpotifyAudioSrc {
match state.receiver.recv().unwrap() {
Message::Buffer(buffer) => {
gst::log!(CAT, imp: self, "got buffer of size {}", buffer.size());
gst::log!(CAT, imp = self, "got buffer of size {}", buffer.size());
Ok(CreateSuccess::NewBuffer(buffer))
}
Message::Eos => {
gst::debug!(CAT, imp: self, "eos");
gst::debug!(CAT, imp = self, "eos");
Err(gst::FlowError::Eos)
}
Message::Unavailable => {
gst::error!(CAT, imp: self, "track is not available");
gst::error!(CAT, imp = self, "track is not available");
gst::element_imp_error!(
self,
gst::ResourceError::NotFound,
@ -306,7 +352,7 @@ impl URIHandlerImpl for SpotifyAudioSrc {
}
fn set_uri(&self, uri: &str) -> Result<(), glib::Error> {
gst::debug!(CAT, imp: self, "set URI: {}", uri);
gst::debug!(CAT, imp = self, "set URI: {}", uri);
let url = url::Url::parse(uri)
.map_err(|e| glib::Error::new(gst::URIError::BadUri, &format!("{e:?}")))?;
@ -318,7 +364,7 @@ impl URIHandlerImpl for SpotifyAudioSrc {
self.obj().set_property(&key, value.as_ref());
}
_ => {
gst::warning!(CAT, imp: self, "unsupported query: {}={}", key, value);
gst::warning!(CAT, imp = self, "unsupported query: {}={}", key, value);
}
}
}
@ -331,7 +377,9 @@ impl URIHandlerImpl for SpotifyAudioSrc {
}
impl SpotifyAudioSrc {
fn start_setup(&self, mut setup_thread: MutexGuard<Option<SetupThread>>) {
fn start_setup(&self, setup_thread: &mut SetupThread) {
assert!(matches!(setup_thread, SetupThread::None));
let self_ = self.to_owned();
// run the runtime from another thread to prevent the "start a runtime from within a runtime" panic
@ -344,10 +392,10 @@ impl SpotifyAudioSrc {
})
});
setup_thread.replace(SetupThread {
thread_handle,
*setup_thread = SetupThread::Pending {
thread_handle: Some(thread_handle),
abort_handle,
});
};
}
async fn setup(&self) -> anyhow::Result<()> {
@ -372,7 +420,7 @@ impl SpotifyAudioSrc {
let session = common.connect_session(src.clone(), &CAT).await?;
let track = common.track_id()?;
gst::debug!(CAT, imp: self, "Requesting bitrate {:?}", bitrate);
gst::debug!(CAT, imp = self, "Requesting bitrate {:?}", bitrate);
(session, track, bitrate)
};
@ -420,12 +468,4 @@ impl SpotifyAudioSrc {
Ok(())
}
fn cancel_setup(&self) {
let mut setup_thread = self.setup_thread.lock().unwrap();
if let Some(setup) = setup_thread.take() {
setup.abort_handle.abort();
}
}
}

View file

@ -0,0 +1,103 @@
#!/usr/bin/python3
#
# Copied from gstreamer.git/ci/gitlab/trigger_cerbero_pipeline.py
import time
import os
import sys
import gitlab
CERBERO_PROJECT = 'gstreamer/cerbero'
class Status:
FAILED = 'failed'
MANUAL = 'manual'
CANCELED = 'canceled'
SUCCESS = 'success'
SKIPPED = 'skipped'
CREATED = 'created'
@classmethod
def is_finished(cls, state):
return state in [
cls.FAILED,
cls.MANUAL,
cls.CANCELED,
cls.SUCCESS,
cls.SKIPPED,
]
def fprint(msg):
print(msg, end="")
sys.stdout.flush()
if __name__ == "__main__":
server = os.environ['CI_SERVER_URL']
gl = gitlab.Gitlab(server,
private_token=os.environ.get('GITLAB_API_TOKEN'),
job_token=os.environ.get('CI_JOB_TOKEN'))
def get_matching_user_project(project, branch):
cerbero = gl.projects.get(project)
# Search for matching branches, return only if the branch name matches
# exactly
for b in cerbero.branches.list(search=cerbero_branch, iterator=True):
if branch == b.name:
return cerbero
return None
cerbero = None
# We do not want to run on (often out of date) user upstream branch
if os.environ["CI_COMMIT_REF_NAME"] != os.environ['CERBERO_UPSTREAM_BRANCH']:
try:
cerbero_name = f'{os.environ["CI_PROJECT_NAMESPACE"]}/cerbero'
cerbero_branch = os.environ["CI_COMMIT_REF_NAME"]
cerbero = get_matching_user_project(cerbero_name, cerbero_branch)
except gitlab.exceptions.GitlabGetError:
pass
if cerbero is None:
cerbero_name = CERBERO_PROJECT
cerbero_branch = os.environ["CERBERO_UPSTREAM_BRANCH"]
cerbero = gl.projects.get(cerbero_name)
fprint(f"-> Triggering on branch {cerbero_branch} in {cerbero_name}\n")
# CI_PROJECT_URL is not necessarily the project where the branch we need to
# build resides, for instance merge request pipelines can be run on
# 'gstreamer' namespace. Fetch the branch name in the same way, just in
# case it breaks in the future.
if 'CI_MERGE_REQUEST_SOURCE_PROJECT_URL' in os.environ:
project_url = os.environ['CI_MERGE_REQUEST_SOURCE_PROJECT_URL']
project_branch = os.environ['CI_MERGE_REQUEST_SOURCE_BRANCH_NAME']
else:
project_url = os.environ['CI_PROJECT_URL']
project_branch = os.environ['CI_COMMIT_REF_NAME']
variables = {
"CI_GST_PLUGINS_RS_URL": project_url,
"CI_GST_PLUGINS_RS_REF_NAME": project_branch,
# This tells cerbero CI that this is a pipeline started via the
# trigger API, which means it can use a deps cache instead of
# building from scratch.
"CI_GSTREAMER_TRIGGERED": "true",
}
pipe = cerbero.trigger_pipeline(
token=os.environ['CI_JOB_TOKEN'],
ref=cerbero_branch,
variables=variables,
)
fprint(f'Cerbero pipeline running at {pipe.web_url} ')
while True:
time.sleep(15)
pipe.refresh()
if Status.is_finished(pipe.status):
fprint(f": {pipe.status}\n")
sys.exit(0 if pipe.status == Status.SUCCESS else 1)
else:
fprint(".")

14
ci/check-meson-version.sh Executable file
View file

@ -0,0 +1,14 @@
#!/bin/bash
MESON_VERSION=`head -n5 meson.build | grep ' version\s*:' | sed -e "s/.*version\s*:\s*'//" -e "s/',.*//"`
CARGO_VERSION=`cat Cargo.toml | grep -A1 workspace.package | grep ^version | sed -e 's/^version = "\(.*\)"/\1/'`
echo "gst-plugins-rs version (meson.build) : $MESON_VERSION"
echo "gst-plugins-rs version (Cargo.toml) : $CARGO_VERSION"
if test "x$MESON_VERSION" != "x$CARGO_VERSION"; then
echo
echo "===> Version mismatch between meson.build and Cargo.toml! <==="
echo
exit 1;
fi

View file

@ -1,11 +0,0 @@
set -e
RELEASE=1.1.0
git clone https://code.videolan.org/videolan/dav1d.git --branch $RELEASE
cd dav1d
meson build -D prefix=/usr/local
ninja -C build
ninja -C build install
cd ..
rm -rf dav1d

View file

@ -1,6 +0,0 @@
source ./ci/env.sh
set -e
export CARGO_HOME='/usr/local/cargo'
cargo install cargo-c --version 0.9.15+cargo-0.67

View file

@ -36,6 +36,7 @@ function Run-Tests {
}
$env:G_DEBUG="fatal_warnings"
$env:RUST_BACKTRACE="1"
cargo test --no-fail-fast --color=always --workspace $local_exclude --all-targets $Features
if (!$?) {

152
deny.toml
View file

@ -1,9 +1,7 @@
[advisories]
version = 2
db-path = "~/.cargo/advisory-db"
db-urls = ["https://github.com/rustsec/advisory-db"]
vulnerability = "deny"
unmaintained = "warn"
notice = "warn"
ignore = [
# Waiting for https://github.com/librespot-org/librespot/issues/937
"RUSTSEC-2021-0059",
@ -11,17 +9,27 @@ ignore = [
"RUSTSEC-2021-0061",
"RUSTSEC-2021-0145",
# sodiumoxide is deprecated
# https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/530
"RUSTSEC-2021-0137",
# proc-macro-error is unmaintained
# https://github.com/yanganto/test-with/issues/91
"RUSTSEC-2024-0370",
]
[licenses]
unlicensed = "deny"
version = 2
allow = [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause",
"ISC",
"OpenSSL",
"Zlib",
"Unicode-DFS-2016",
"Apache-2.0",
"Apache-2.0 WITH LLVM-exception",
"MPL-2.0",
]
default = "deny"
copyleft = "deny"
allow-osi-fsf-free = "either"
confidence-threshold = 0.8
[[licenses.clarify]]
@ -70,6 +78,18 @@ version = "0.9"
[[bans.skip]]
name = "hmac"
version = "0.11"
[[bans.skip]]
name = "zerocopy"
version = "0.6"
[[bans.skip]]
name = "zerocopy-derive"
version = "0.6"
[[bans.skip]]
name = "multimap"
version = "0.8"
[[bans.skip]]
name = "nix"
version = "0.23"
# field-offset and nix depend on an older memoffset
# https://github.com/Diggsey/rust-field-offset/pull/23
@ -82,22 +102,23 @@ version = "0.6"
[[bans.skip]]
name = "hermit-abi"
version = "0.1"
[[bans.skip]]
name = "hermit-abi"
version = "0.3"
# Various crates depend on an older version of base64
[[bans.skip]]
name = "base64"
version = "0.13"
[[bans.skip]]
name = "base64"
version = "0.21"
# Various crates depend on an older version of socket2
[[bans.skip]]
name = "socket2"
version = "0.4"
# Various crates depend on an older version of syn
[[bans.skip]]
name = "syn"
version = "1.0"
# Various crates depend on an older version of bitflags
[[bans.skip]]
name = "bitflags"
@ -122,10 +143,18 @@ version = "1.0"
name = "hashbrown"
version = "0.12"
# various livekit dependencies depend on an old version of itertools
# various livekit dependencies depend on an old version of itertools and sync_wrapper
[[bans.skip]]
name = "itertools"
version = "0.11"
[[bans.skip]]
name = "sync_wrapper"
version = "0.1"
# various rav1e / dssim-core depend on an old version of itertools
[[bans.skip]]
name = "itertools"
version = "0.12"
# matchers depends on an old version of regex-automata
[[bans.skip]]
@ -179,11 +208,102 @@ version = "0.20"
name = "http"
version = "0.2"
# proc-macro-crate depends on an older version of toml_edit
# https://github.com/bkchr/proc-macro-crate/pull/50
# Various crates depend on an older version of heck
[[bans.skip]]
name = "toml_edit"
name = "heck"
version = "0.4"
# Various crates depend on an older version of hyper / reqwest / headers / etc
[[bans.skip]]
name = "hyper"
version = "0.14"
[[bans.skip]]
name = "hyper-tls"
version = "0.5"
[[bans.skip]]
name = "http-body"
version = "0.4"
[[bans.skip]]
name = "headers-core"
version = "0.2"
[[bans.skip]]
name = "headers"
version = "0.3"
[[bans.skip]]
name = "h2"
version = "0.3"
[[bans.skip]]
name = "reqwest"
version = "0.11"
[[bans.skip]]
name = "rustls-pemfile"
version = "1.0"
[[bans.skip]]
name = "winreg"
version = "0.50"
[[bans.skip]]
name = "system-configuration"
version = "0.5"
[[bans.skip]]
name = "system-configuration-sys"
version = "0.5"
# The AWS SDK uses old versions of rustls and related crates
[[bans.skip]]
name = "rustls"
version = "0.21"
[[bans.skip]]
name = "rustls-native-certs"
version = "0.6"
[[bans.skip]]
name = "rustls-webpki"
version = "0.101"
# warp depends on an older version of tokio-tungstenite
[[bans.skip]]
name = "tokio-tungstenite"
version = "0.21"
[[bans.skip]]
name = "tungstenite"
version = "0.21"
# various crates depend on an older version of system-deps
[[bans.skip]]
name = "system-deps"
version = "6"
# various crates depend on an older version of windows-sys
[[bans.skip]]
name = "windows-sys"
version = "0.52"
# derived-into-owned (via pcap-file) depends on old syn / quote
[[bans.skip]]
name = "syn"
version = "0.11"
[[bans.skip]]
name = "quote"
version = "0.3"
# dav1d depends on old system-deps which depends on old cfg-expr
[[bans.skip]]
name = "cfg-expr"
version = "0.15"
# backtrace and png depend on old miniz_oxide
[[bans.skip]]
name = "miniz_oxide"
version = "0.7"
# tokio-rustls via warp depends on old rustls
[[bans.skip]]
name = "rustls"
version = "0.22"
# aws-smithy-runtime depends on old tokio-rustls
[[bans.skip]]
name = "tokio-rustls"
version = "0.24"
[sources]
unknown-registry = "deny"

View file

@ -1,5 +1,9 @@
build_hotdoc = false
if get_option('doc').disabled()
subdir_done()
endif
if meson.is_cross_build()
if get_option('doc').enabled()
error('Documentation enabled but building the doc while cross building is not supported yet.')

File diff suppressed because it is too large Load diff

View file

@ -81,20 +81,20 @@ impl FileSink {
Some(ref location_cur) => {
gst::info!(
CAT,
imp: self,
imp = self,
"Changing `location` from {:?} to {}",
location_cur,
location,
);
}
None => {
gst::info!(CAT, imp: self, "Setting `location` to {}", location,);
gst::info!(CAT, imp = self, "Setting `location` to {}", location,);
}
}
Some(location)
}
None => {
gst::info!(CAT, imp: self, "Resetting `location` to None",);
gst::info!(CAT, imp = self, "Resetting `location` to None",);
None
}
};
@ -140,7 +140,12 @@ impl ObjectImpl for FileSink {
};
if let Err(err) = res {
gst::error!(CAT, imp: self, "Failed to set property `location`: {}", err);
gst::error!(
CAT,
imp = self,
"Failed to set property `location`: {}",
err
);
}
}
_ => unimplemented!(),
@ -222,10 +227,10 @@ impl BaseSinkImpl for FileSink {
]
)
})?;
gst::debug!(CAT, imp: self, "Opened file {:?}", file);
gst::debug!(CAT, imp = self, "Opened file {:?}", file);
*state = State::Started { file, position: 0 };
gst::info!(CAT, imp: self, "Started");
gst::info!(CAT, imp = self, "Started");
Ok(())
}
@ -240,7 +245,7 @@ impl BaseSinkImpl for FileSink {
}
*state = State::Stopped;
gst::info!(CAT, imp: self, "Stopped");
gst::info!(CAT, imp = self, "Stopped");
Ok(())
}
@ -260,7 +265,7 @@ impl BaseSinkImpl for FileSink {
}
};
gst::trace!(CAT, imp: self, "Rendering {:?}", buffer);
gst::trace!(CAT, imp = self, "Rendering {:?}", buffer);
let map = buffer.map_readable().map_err(|_| {
gst::element_imp_error!(self, gst::CoreError::Failed, ["Failed to map buffer"]);
gst::FlowError::Error

View file

@ -94,20 +94,20 @@ impl FileSrc {
Some(ref location_cur) => {
gst::info!(
CAT,
imp: self,
imp = self,
"Changing `location` from {:?} to {}",
location_cur,
location,
);
}
None => {
gst::info!(CAT, imp: self, "Setting `location to {}", location,);
gst::info!(CAT, imp = self, "Setting `location to {}", location,);
}
}
Some(location)
}
None => {
gst::info!(CAT, imp: self, "Resetting `location` to None",);
gst::info!(CAT, imp = self, "Resetting `location` to None",);
None
}
};
@ -148,7 +148,12 @@ impl ObjectImpl for FileSrc {
};
if let Err(err) = res {
gst::error!(CAT, imp: self, "Failed to set property `location`: {}", err);
gst::error!(
CAT,
imp = self,
"Failed to set property `location`: {}",
err
);
}
}
_ => unimplemented!(),
@ -250,11 +255,11 @@ impl BaseSrcImpl for FileSrc {
)
})?;
gst::debug!(CAT, imp: self, "Opened file {:?}", file);
gst::debug!(CAT, imp = self, "Opened file {:?}", file);
*state = State::Started { file, position: 0 };
gst::info!(CAT, imp: self, "Started");
gst::info!(CAT, imp = self, "Started");
Ok(())
}
@ -270,7 +275,7 @@ impl BaseSrcImpl for FileSrc {
*state = State::Stopped;
gst::info!(CAT, imp: self, "Stopped");
gst::info!(CAT, imp = self, "Stopped");
Ok(())
}

View file

@ -0,0 +1,44 @@
[package]
name = "gst-plugin-gopbuffer"
version.workspace = true
authors = ["Matthew Waters <matthew@centricular.com>"]
license = "MPL-2.0"
description = "Store complete groups of pictures at a time"
repository.workspace = true
edition.workspace = true
rust-version.workspace = true
[dependencies]
anyhow = "1"
gst = { workspace = true, features = ["v1_18"] }
gst-video = { workspace = true, features = ["v1_18"] }
once_cell.workspace = true
[lib]
name = "gstgopbuffer"
crate-type = ["cdylib", "rlib"]
path = "src/lib.rs"
[dev-dependencies]
gst-app = { workspace = true, features = ["v1_18"] }
gst-check = { workspace = true, features = ["v1_18"] }
[build-dependencies]
gst-plugin-version-helper.workspace = true
[features]
static = []
capi = []
[package.metadata.capi]
min_version = "0.8.0"
[package.metadata.capi.header]
enabled = false
[package.metadata.capi.library]
install_subdir = "gstreamer-1.0"
versioning = false
[package.metadata.capi.pkg_config]
requires_private = "gstreamer-1.0, gstreamer-base-1.0, gstreamer-audio-1.0, gstreamer-video-1.0, gobject-2.0, glib-2.0, gmodule-2.0"

View file

@ -0,0 +1,373 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

View file

@ -0,0 +1,3 @@
fn main() {
gst_plugin_version_helper::info()
}

View file

@ -0,0 +1,897 @@
// Copyright (C) 2023 Matthew Waters <matthew@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
/**
* SECTION:element-gopbuffer
*
* #gopbuffer is an element that can be used to store a minimum duration of data delimited by
* discrete GOPs (Group of Picture). It does this in by differentiation on the DELTA_UNIT
* flag on each input buffer.
*
* One example of the usefulness of #gopbuffer is its ability to store a backlog of data starting
* on a key frame boundary if say the previous 10s seconds of a stream would like to be recorded to
* disk.
*
* ## Example pipeline
*
* |[
* gst-launch videotestsrc ! vp8enc ! gopbuffer minimum-duration=10000000000 ! fakesink
* ]|
*
* Since: plugins-rs-0.13.0
*/
use gst::glib;
use gst::prelude::*;
use gst::subclass::prelude::*;
use std::collections::VecDeque;
use std::sync::Mutex;
use once_cell::sync::Lazy;
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"gopbuffer",
gst::DebugColorFlags::empty(),
Some("GopBuffer Element"),
)
});
const DEFAULT_MIN_TIME: gst::ClockTime = gst::ClockTime::from_seconds(1);
const DEFAULT_MAX_TIME: Option<gst::ClockTime> = None;
#[derive(Debug, Clone)]
struct Settings {
min_time: gst::ClockTime,
max_time: Option<gst::ClockTime>,
}
impl Default for Settings {
fn default() -> Self {
Settings {
min_time: DEFAULT_MIN_TIME,
max_time: DEFAULT_MAX_TIME,
}
}
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum DeltaFrames {
/// Only single completely decodable frames
IntraOnly,
/// Frames may depend on past frames
PredictiveOnly,
/// Frames may depend on past or future frames
Bidirectional,
}
impl DeltaFrames {
/// Whether dts is required to order buffers differently from presentation order
pub(crate) fn requires_dts(&self) -> bool {
matches!(self, Self::Bidirectional)
}
/// Whether this coding structure does not allow delta flags on buffers
pub(crate) fn intra_only(&self) -> bool {
matches!(self, Self::IntraOnly)
}
pub(crate) fn from_caps(caps: &gst::CapsRef) -> Option<Self> {
let s = caps.structure(0)?;
Some(match s.name().as_str() {
"video/x-h264" | "video/x-h265" => DeltaFrames::Bidirectional,
"video/x-vp8" | "video/x-vp9" | "video/x-av1" => DeltaFrames::PredictiveOnly,
"image/jpeg" | "image/png" | "video/x-raw" => DeltaFrames::IntraOnly,
_ => return None,
})
}
}
// TODO: add buffer list support
#[derive(Debug)]
enum GopItem {
Buffer(gst::Buffer),
Event(gst::Event),
}
struct Gop {
// all times are in running time
start_pts: gst::ClockTime,
start_dts: Option<gst::Signed<gst::ClockTime>>,
earliest_pts: gst::ClockTime,
final_earliest_pts: bool,
end_pts: gst::ClockTime,
end_dts: Option<gst::Signed<gst::ClockTime>>,
final_end_pts: bool,
// Buffer or event
data: VecDeque<GopItem>,
}
impl Gop {
fn push_on_pad(mut self, pad: &gst::Pad) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut iter = self.data.iter().filter_map(|item| match item {
GopItem::Buffer(buffer) => buffer.pts(),
_ => None,
});
let first_pts = iter.next();
let last_pts = iter.last();
gst::debug!(
CAT,
"pushing gop with start pts {} end pts {}",
first_pts.display(),
last_pts.display(),
);
for item in self.data.drain(..) {
match item {
GopItem::Buffer(buffer) => {
pad.push(buffer)?;
}
GopItem::Event(event) => {
pad.push_event(event);
}
}
}
Ok(gst::FlowSuccess::Ok)
}
}
struct Stream {
sinkpad: gst::Pad,
srcpad: gst::Pad,
sink_segment: Option<gst::FormattedSegment<gst::ClockTime>>,
delta_frames: DeltaFrames,
queued_gops: VecDeque<Gop>,
}
impl Stream {
fn queue_buffer(
&mut self,
buffer: gst::Buffer,
segment: &gst::FormattedSegment<gst::ClockTime>,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let pts_position = buffer.pts().unwrap();
let end_pts_position = pts_position
.opt_add(buffer.duration())
.unwrap_or(pts_position);
let pts = segment
.to_running_time_full(pts_position)
.ok_or_else(|| {
gst::error!(
CAT,
obj = self.sinkpad,
"Couldn't convert PTS to running time"
);
gst::FlowError::Error
})?
.positive()
.unwrap_or_else(|| {
gst::warning!(CAT, obj = self.sinkpad, "Negative PTSs are not supported");
gst::ClockTime::ZERO
});
let end_pts = segment
.to_running_time_full(end_pts_position)
.ok_or_else(|| {
gst::error!(
CAT,
obj = self.sinkpad,
"Couldn't convert end PTS to running time"
);
gst::FlowError::Error
})?
.positive()
.unwrap_or_else(|| {
gst::warning!(CAT, obj = self.sinkpad, "Negative PTSs are not supported");
gst::ClockTime::ZERO
});
let (dts, end_dts) = if !self.delta_frames.requires_dts() {
(None, None)
} else {
let dts_position = buffer.dts().expect("No dts");
let end_dts_position = buffer
.duration()
.opt_add(dts_position)
.unwrap_or(dts_position);
let dts = segment.to_running_time_full(dts_position).ok_or_else(|| {
gst::error!(
CAT,
obj = self.sinkpad,
"Couldn't convert DTS to running time"
);
gst::FlowError::Error
})?;
let end_dts = segment
.to_running_time_full(end_dts_position)
.ok_or_else(|| {
gst::error!(
CAT,
obj = self.sinkpad,
"Couldn't convert end DTS to running time"
);
gst::FlowError::Error
})?;
let end_dts = std::cmp::max(end_dts, dts);
(Some(dts), Some(end_dts))
};
if !buffer.flags().contains(gst::BufferFlags::DELTA_UNIT) {
gst::debug!(
CAT,
"New GOP detected with buffer pts {} dts {}",
buffer.pts().display(),
buffer.dts().display()
);
let gop = Gop {
start_pts: pts,
start_dts: dts,
earliest_pts: pts,
final_earliest_pts: false,
end_pts: pts,
end_dts,
final_end_pts: false,
data: VecDeque::from([GopItem::Buffer(buffer)]),
};
self.queued_gops.push_front(gop);
if let Some(prev_gop) = self.queued_gops.get_mut(1) {
gst::debug!(
CAT,
obj = self.sinkpad,
"Updating previous GOP starting at PTS {} to end PTS {}",
prev_gop.earliest_pts,
pts,
);
prev_gop.end_pts = std::cmp::max(prev_gop.end_pts, pts);
prev_gop.end_dts = std::cmp::max(prev_gop.end_dts, dts);
if !self.delta_frames.requires_dts() {
prev_gop.final_end_pts = true;
}
if !prev_gop.final_earliest_pts {
// Don't bother logging this for intra-only streams as it would be for every
// single buffer.
if self.delta_frames.requires_dts() {
gst::debug!(
CAT,
obj = self.sinkpad,
"Previous GOP has final earliest PTS at {}",
prev_gop.earliest_pts
);
}
prev_gop.final_earliest_pts = true;
if let Some(prev_prev_gop) = self.queued_gops.get_mut(2) {
prev_prev_gop.final_end_pts = true;
}
}
}
} else if let Some(gop) = self.queued_gops.front_mut() {
gop.end_pts = std::cmp::max(gop.end_pts, end_pts);
gop.end_dts = gop.end_dts.opt_max(end_dts);
gop.data.push_back(GopItem::Buffer(buffer));
if self.delta_frames.requires_dts() {
let dts = dts.unwrap();
if gop.earliest_pts > pts && !gop.final_earliest_pts {
gst::debug!(
CAT,
obj = self.sinkpad,
"Updating current GOP earliest PTS from {} to {}",
gop.earliest_pts,
pts
);
gop.earliest_pts = pts;
if let Some(prev_gop) = self.queued_gops.get_mut(1) {
if prev_gop.end_pts < pts {
gst::debug!(
CAT,
obj = self.sinkpad,
"Updating previous GOP starting PTS {} end time from {} to {}",
pts,
prev_gop.end_pts,
pts
);
prev_gop.end_pts = pts;
}
}
}
let gop = self.queued_gops.front_mut().unwrap();
// The earliest PTS is known when the current DTS is bigger or equal to the first
// PTS that was observed in this GOP. If there was another frame later that had a
// lower PTS then it wouldn't be possible to display it in time anymore, i.e. the
// stream would be invalid.
if gop.start_pts <= dts && !gop.final_earliest_pts {
gst::debug!(
CAT,
obj = self.sinkpad,
"GOP has final earliest PTS at {}",
gop.earliest_pts
);
gop.final_earliest_pts = true;
if let Some(prev_gop) = self.queued_gops.get_mut(1) {
prev_gop.final_end_pts = true;
}
}
}
} else {
gst::debug!(
CAT,
"dropping buffer before first GOP with pts {} dts {}",
buffer.pts().display(),
buffer.dts().display()
);
}
if let Some((prev_gop, first_gop)) = Option::zip(
self.queued_gops.iter().find(|gop| gop.final_end_pts),
self.queued_gops.back(),
) {
gst::debug!(
CAT,
obj = self.sinkpad,
"Queued full GOPs duration updated to {}",
prev_gop.end_pts.saturating_sub(first_gop.earliest_pts),
);
}
gst::debug!(
CAT,
obj = self.sinkpad,
"Queued duration updated to {}",
Option::zip(self.queued_gops.front(), self.queued_gops.back())
.map(|(end, start)| end.end_pts.saturating_sub(start.start_pts))
.unwrap_or(gst::ClockTime::ZERO)
);
Ok(gst::FlowSuccess::Ok)
}
fn oldest_gop(&mut self) -> Option<Gop> {
self.queued_gops.pop_back()
}
fn peek_oldest_gop(&self) -> Option<&Gop> {
self.queued_gops.back()
}
fn peek_second_oldest_gop(&self) -> Option<&Gop> {
if self.queued_gops.len() <= 1 {
return None;
}
self.queued_gops.get(self.queued_gops.len() - 2)
}
fn drain_all(&mut self) -> impl Iterator<Item = Gop> + '_ {
self.queued_gops.drain(..)
}
fn flush(&mut self) {
self.queued_gops.clear();
}
}
#[derive(Default)]
struct State {
streams: Vec<Stream>,
}
impl State {
fn stream_from_sink_pad(&self, pad: &gst::Pad) -> Option<&Stream> {
self.streams.iter().find(|stream| &stream.sinkpad == pad)
}
fn stream_from_sink_pad_mut(&mut self, pad: &gst::Pad) -> Option<&mut Stream> {
self.streams
.iter_mut()
.find(|stream| &stream.sinkpad == pad)
}
fn stream_from_src_pad(&self, pad: &gst::Pad) -> Option<&Stream> {
self.streams.iter().find(|stream| &stream.srcpad == pad)
}
}
#[derive(Default)]
pub(crate) struct GopBuffer {
state: Mutex<State>,
settings: Mutex<Settings>,
}
impl GopBuffer {
fn sink_chain(
&self,
pad: &gst::Pad,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let obj = self.obj();
if buffer.pts().is_none() {
gst::error!(CAT, obj = obj, "Require timestamped buffers!");
return Err(gst::FlowError::Error);
}
let settings = self.settings.lock().unwrap().clone();
let mut state = self.state.lock().unwrap();
let stream = state
.stream_from_sink_pad_mut(pad)
.expect("pad without an internal Stream");
let Some(segment) = stream.sink_segment.clone() else {
gst::element_imp_error!(self, gst::CoreError::Clock, ["Got buffer before segment"]);
return Err(gst::FlowError::Error);
};
if stream.delta_frames.intra_only() && buffer.flags().contains(gst::BufferFlags::DELTA_UNIT)
{
gst::error!(CAT, obj = pad, "Intra-only stream with delta units");
return Err(gst::FlowError::Error);
}
if stream.delta_frames.requires_dts() && buffer.dts().is_none() {
gst::error!(CAT, obj = pad, "Require DTS for video streams");
return Err(gst::FlowError::Error);
}
let srcpad = stream.srcpad.clone();
stream.queue_buffer(buffer, &segment)?;
let mut gops_to_push = vec![];
let Some(newest_gop) = stream.queued_gops.front() else {
return Ok(gst::FlowSuccess::Ok);
};
// we are looking for the latest pts value here (which should be the largest)
let newest_ts = if stream.delta_frames.requires_dts() {
newest_gop.end_dts.unwrap()
} else {
gst::Signed::Positive(newest_gop.end_pts)
};
loop {
// check stored times as though the oldest GOP doesn't exist.
let Some(second_oldest_gop) = stream.peek_second_oldest_gop() else {
break;
};
// we are looking for the oldest pts here (with the largest value). This is our potentially
// new end time.
let oldest_ts = if stream.delta_frames.requires_dts() {
second_oldest_gop.start_dts.unwrap()
} else {
gst::Signed::Positive(second_oldest_gop.start_pts)
};
let stored_duration_without_oldest = newest_ts.saturating_sub(oldest_ts);
gst::trace!(
CAT,
obj = obj,
"newest_pts {}, second oldest_pts {}, stored_duration_without_oldest_gop {}, min-time {}",
newest_ts.display(),
oldest_ts.display(),
stored_duration_without_oldest.display(),
settings.min_time.display()
);
if stored_duration_without_oldest < settings.min_time {
break;
}
gops_to_push.push(stream.oldest_gop().unwrap());
}
if let Some(max_time) = settings.max_time {
while let Some(oldest_gop) = stream.peek_oldest_gop() {
let oldest_ts = oldest_gop.data.iter().rev().find_map(|item| match item {
GopItem::Buffer(buffer) => {
if stream.delta_frames.requires_dts() {
Some(gst::Signed::Positive(buffer.dts().unwrap()))
} else {
Some(gst::Signed::Positive(buffer.pts().unwrap()))
}
}
_ => None,
});
if newest_ts
.opt_saturating_sub(oldest_ts)
.is_some_and(|diff| diff > gst::Signed::Positive(max_time))
{
gst::warning!(CAT, obj = obj, "Stored data has overflowed the maximum allowed stored time {}, pushing oldest GOP", max_time.display());
gops_to_push.push(stream.oldest_gop().unwrap());
} else {
break;
}
}
}
drop(state);
for gop in gops_to_push.into_iter() {
gop.push_on_pad(&srcpad)?;
}
Ok(gst::FlowSuccess::Ok)
}
fn sink_event(&self, pad: &gst::Pad, event: gst::Event) -> bool {
let obj = self.obj();
let mut state = self.state.lock().unwrap();
let stream = state
.stream_from_sink_pad_mut(pad)
.expect("pad without an internal Stream!");
match event.view() {
gst::EventView::Caps(caps) => {
let Some(delta_frames) = DeltaFrames::from_caps(caps.caps()) else {
return false;
};
stream.delta_frames = delta_frames;
}
gst::EventView::FlushStop(_flush) => {
gst::debug!(CAT, obj = obj, "flushing stored data");
stream.flush();
}
gst::EventView::Eos(_eos) => {
gst::debug!(CAT, obj = obj, "draining data at EOS");
let gops = stream.drain_all().collect::<Vec<_>>();
let srcpad = stream.srcpad.clone();
drop(state);
for gop in gops.into_iter() {
let _ = gop.push_on_pad(&srcpad);
}
// once we've pushed all the data, we can push the corresponding eos
gst::Pad::event_default(pad, Some(&*obj), event);
return true;
}
gst::EventView::Segment(segment) => {
let Ok(segment) = segment.segment().clone().downcast::<gst::ClockTime>() else {
gst::error!(CAT, "Non TIME segments are not supported");
return false;
};
stream.sink_segment = Some(segment);
}
_ => (),
};
if event.is_serialized() {
if stream.peek_oldest_gop().is_none() {
// if there is nothing queued, the event can go straight through
gst::trace!(
CAT,
obj = obj,
"nothing queued, event {:?} passthrough",
event.structure().map(|s| s.name().as_str())
);
drop(state);
return gst::Pad::event_default(pad, Some(&*obj), event);
}
let gop = stream.queued_gops.front_mut().unwrap();
gop.data.push_back(GopItem::Event(event));
true
} else {
// non-serialized events can be pushed directly
drop(state);
gst::Pad::event_default(pad, Some(&*obj), event)
}
}
fn sink_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
let obj = self.obj();
if query.is_serialized() {
// TODO: serialized queries somehow?
gst::warning!(
CAT,
obj = pad,
"Serialized queries are currently not supported"
);
return false;
}
gst::Pad::query_default(pad, Some(&*obj), query)
}
fn src_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
let obj = self.obj();
match query.view_mut() {
gst::QueryViewMut::Latency(latency) => {
let mut upstream_query = gst::query::Latency::new();
let otherpad = {
let state = self.state.lock().unwrap();
let Some(stream) = state.stream_from_src_pad(pad) else {
return false;
};
stream.sinkpad.clone()
};
let ret = otherpad.peer_query(&mut upstream_query);
if ret {
let (live, mut min, mut max) = upstream_query.result();
let settings = self.settings.lock().unwrap();
min += settings.max_time.unwrap_or(settings.min_time);
max = max.opt_max(settings.max_time);
latency.set(live, min, max);
gst::debug!(
CAT,
obj = pad,
"Latency query response: live {} min {} max {}",
live,
min,
max.display()
);
}
ret
}
_ => gst::Pad::query_default(pad, Some(&*obj), query),
}
}
fn iterate_internal_links(&self, pad: &gst::Pad) -> gst::Iterator<gst::Pad> {
let state = self.state.lock().unwrap();
let otherpad = match pad.direction() {
gst::PadDirection::Src => state
.stream_from_src_pad(pad)
.map(|stream| stream.sinkpad.clone()),
gst::PadDirection::Sink => state
.stream_from_sink_pad(pad)
.map(|stream| stream.srcpad.clone()),
_ => unreachable!(),
};
if let Some(otherpad) = otherpad {
gst::Iterator::from_vec(vec![otherpad])
} else {
gst::Iterator::from_vec(vec![])
}
}
}
#[glib::object_subclass]
impl ObjectSubclass for GopBuffer {
const NAME: &'static str = "GstGopBuffer";
type Type = super::GopBuffer;
type ParentType = gst::Element;
}
impl ObjectImpl for GopBuffer {
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| {
vec![
glib::ParamSpecUInt64::builder("minimum-duration")
.nick("Minimum Duration")
.blurb("The minimum duration to store")
.default_value(DEFAULT_MIN_TIME.nseconds())
.mutable_ready()
.build(),
glib::ParamSpecUInt64::builder("max-size-time")
.nick("Maximum Duration")
.blurb("The maximum duration to store (0=disable)")
.default_value(0)
.mutable_ready()
.build(),
]
});
&PROPERTIES
}
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
match pspec.name() {
"minimum-duration" => {
let mut settings = self.settings.lock().unwrap();
let min_time = value.get().expect("type checked upstream");
if settings.min_time != min_time {
settings.min_time = min_time;
drop(settings);
self.post_message(gst::message::Latency::builder().src(&*self.obj()).build());
}
}
"max-size-time" => {
let mut settings = self.settings.lock().unwrap();
let max_time = value
.get::<Option<gst::ClockTime>>()
.expect("type checked upstream");
let max_time = if matches!(max_time, Some(gst::ClockTime::ZERO) | None) {
None
} else {
max_time
};
if settings.max_time != max_time {
settings.max_time = max_time;
drop(settings);
self.post_message(gst::message::Latency::builder().src(&*self.obj()).build());
}
}
_ => unimplemented!(),
}
}
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
match pspec.name() {
"minimum-duration" => {
let settings = self.settings.lock().unwrap();
settings.min_time.to_value()
}
"max-size-time" => {
let settings = self.settings.lock().unwrap();
settings.max_time.unwrap_or(gst::ClockTime::ZERO).to_value()
}
_ => unimplemented!(),
}
}
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
let class = obj.class();
let templ = class.pad_template("video_sink").unwrap();
let sinkpad = gst::Pad::builder_from_template(&templ)
.name("video_sink")
.chain_function(|pad, parent, buffer| {
GopBuffer::catch_panic_pad_function(
parent,
|| Err(gst::FlowError::Error),
|gopbuffer| gopbuffer.sink_chain(pad, buffer),
)
})
.event_function(|pad, parent, event| {
GopBuffer::catch_panic_pad_function(
parent,
|| false,
|gopbuffer| gopbuffer.sink_event(pad, event),
)
})
.query_function(|pad, parent, query| {
GopBuffer::catch_panic_pad_function(
parent,
|| false,
|gopbuffer| gopbuffer.sink_query(pad, query),
)
})
.iterate_internal_links_function(|pad, parent| {
GopBuffer::catch_panic_pad_function(
parent,
|| gst::Pad::iterate_internal_links_default(pad, parent),
|gopbuffer| gopbuffer.iterate_internal_links(pad),
)
})
.flags(gst::PadFlags::PROXY_CAPS)
.build();
obj.add_pad(&sinkpad).unwrap();
let templ = class.pad_template("video_src").unwrap();
let srcpad = gst::Pad::builder_from_template(&templ)
.name("video_src")
.query_function(|pad, parent, query| {
GopBuffer::catch_panic_pad_function(
parent,
|| false,
|gopbuffer| gopbuffer.src_query(pad, query),
)
})
.iterate_internal_links_function(|pad, parent| {
GopBuffer::catch_panic_pad_function(
parent,
|| gst::Pad::iterate_internal_links_default(pad, parent),
|gopbuffer| gopbuffer.iterate_internal_links(pad),
)
})
.build();
obj.add_pad(&srcpad).unwrap();
let mut state = self.state.lock().unwrap();
state.streams.push(Stream {
sinkpad,
srcpad,
sink_segment: None,
delta_frames: DeltaFrames::IntraOnly,
queued_gops: VecDeque::new(),
});
}
}
impl GstObjectImpl for GopBuffer {}
impl ElementImpl for GopBuffer {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"GopBuffer",
"Video",
"GOP Buffer",
"Matthew Waters <matthew@centricular.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
// This element is designed to implement multiple streams but it has not been
// implemented.
//
// The things missing for multiple (audio or video) streams are:
// 1. More pad templates
// 2. Choosing a main stream to drive the timestamp logic between all input streams
// 3. Allowing either the main stream to cause other streams to push data
// regardless of it's GOP state, or allow each stream to be individually delimited
// by GOP but all still within the minimum duration.
let video_caps = [
gst::Structure::builder("video/x-h264")
.field("stream-format", gst::List::new(["avc", "avc3"]))
.field("alignment", "au")
.build(),
gst::Structure::builder("video/x-h265")
.field("stream-format", gst::List::new(["hvc1", "hev1"]))
.field("alignment", "au")
.build(),
gst::Structure::builder("video/x-vp8").build(),
gst::Structure::builder("video/x-vp9").build(),
gst::Structure::builder("video/x-av1")
.field("stream-format", "obu-stream")
.field("alignment", "tu")
.build(),
]
.into_iter()
.collect::<gst::Caps>();
let src_pad_template = gst::PadTemplate::new(
"video_src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&video_caps,
)
.unwrap();
let sink_pad_template = gst::PadTemplate::new(
"video_sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&video_caps,
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
fn change_state(
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
#[allow(clippy::single_match)]
match transition {
gst::StateChange::NullToReady => {
let settings = self.settings.lock().unwrap();
if let Some(max_time) = settings.max_time {
if max_time < settings.min_time {
gst::element_imp_error!(
self,
gst::CoreError::StateChange,
["Configured maximum time is less than the minimum time"]
);
return Err(gst::StateChangeError);
}
}
}
_ => (),
}
self.parent_change_state(transition)?;
Ok(gst::StateChangeSuccess::Success)
}
}

View file

@ -0,0 +1,27 @@
// Copyright (C) 2022 Matthew Waters <matthew@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub(crate) struct GopBuffer(ObjectSubclass<imp::GopBuffer>) @extends gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"gopbuffer",
gst::Rank::PRIMARY,
GopBuffer::static_type(),
)?;
Ok(())
}

View file

@ -0,0 +1,34 @@
// Copyright (C) 2022 Matthew Waters <matthew@centricular.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(clippy::non_send_fields_in_send_ty, unused_doc_comments)]
/**
* plugin-gopbuffer:
*
* Since: plugins-rs-0.13.0
*/
use gst::glib;
mod gopbuffer;
fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gopbuffer::register(plugin)
}
gst::plugin_define!(
gopbuffer,
env!("CARGO_PKG_DESCRIPTION"),
plugin_init,
concat!(env!("CARGO_PKG_VERSION"), "-", env!("COMMIT_ID")),
// FIXME: MPL-2.0 is only allowed since 1.18.3 (as unknown) and 1.20 (as known)
"MPL",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_REPOSITORY"),
env!("BUILD_REL_DATE")
);

View file

@ -0,0 +1,128 @@
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
//
use gst::prelude::*;
fn init() {
use std::sync::Once;
static INIT: Once = Once::new();
INIT.call_once(|| {
gst::init().unwrap();
gstgopbuffer::plugin_register_static().unwrap();
});
}
macro_rules! check_buffer {
($buf1:expr, $buf2:expr) => {
assert_eq!($buf1.pts(), $buf2.pts());
assert_eq!($buf1.dts(), $buf2.dts());
assert_eq!($buf1.flags(), $buf2.flags());
};
}
#[test]
fn test_min_one_gop_held() {
const OFFSET: gst::ClockTime = gst::ClockTime::from_seconds(10);
init();
let mut h =
gst_check::Harness::with_padnames("gopbuffer", Some("video_sink"), Some("video_src"));
// 200ms min buffer time
let element = h.element().unwrap();
element.set_property("minimum-duration", gst::ClockTime::from_mseconds(200));
h.set_src_caps(
gst::Caps::builder("video/x-h264")
.field("width", 320i32)
.field("height", 240i32)
.field("framerate", gst::Fraction::new(10, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::with_size(1).unwrap())
.build(),
);
let mut in_segment = gst::Segment::new();
in_segment.set_format(gst::Format::Time);
in_segment.set_base(10.seconds());
assert!(h.push_event(gst::event::Segment::builder(&in_segment).build()));
h.play();
// Push 10 buffers of 100ms each, 2nd and 5th buffer without DELTA_UNIT flag
let in_buffers: Vec<_> = (0..6)
.map(|i| {
let mut buffer = gst::Buffer::with_size(1).unwrap();
{
let buffer = buffer.get_mut().unwrap();
buffer.set_pts(OFFSET + gst::ClockTime::from_mseconds(i * 100));
buffer.set_dts(OFFSET + gst::ClockTime::from_mseconds(i * 100));
buffer.set_duration(gst::ClockTime::from_mseconds(100));
if i != 1 && i != 4 {
buffer.set_flags(gst::BufferFlags::DELTA_UNIT);
}
}
assert_eq!(h.push(buffer.clone()), Ok(gst::FlowSuccess::Ok));
buffer
})
.collect();
// pull mandatory events
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::StreamStart);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Caps);
// GstHarness pushes its own segment event that we need to eat
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Segment);
let ev = h.pull_event().unwrap();
let gst::event::EventView::Segment(recv_segment) = ev.view() else {
unreachable!()
};
let recv_segment = recv_segment.segment();
assert_eq!(recv_segment, &in_segment);
// check that at least the first GOP has been output already as it exceeds the minimum-time
// value
let mut in_iter = in_buffers.iter();
// the first buffer is dropped because it was not preceded by a keyframe
let _buffer = in_iter.next().unwrap();
// a keyframe
let out = h.pull().unwrap();
let buffer = in_iter.next().unwrap();
check_buffer!(buffer, out);
// not a keyframe
let out = h.pull().unwrap();
let buffer = in_iter.next().unwrap();
check_buffer!(buffer, out);
// not a keyframe
let out = h.pull().unwrap();
let buffer = in_iter.next().unwrap();
check_buffer!(buffer, out);
// no more buffers
assert_eq!(h.buffers_in_queue(), 0);
// push eos to drain out the rest of the data
assert!(h.push_event(gst::event::Eos::new()));
for buffer in in_iter {
let out = h.pull().unwrap();
check_buffer!(buffer, out);
}
// no more buffers
assert_eq!(h.buffers_in_queue(), 0);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Eos);
}

View file

@ -108,7 +108,7 @@ impl ObjectImpl for InterSink {
InterStreamProducer::acquire(&settings.producer_name, &appsink)
{
drop(settings);
gst::error!(CAT, imp: self, "{err}");
gst::error!(CAT, imp = self, "{err}");
self.post_error_message(gst::error_msg!(
gst::StreamError::Failed,
["{err}"]
@ -191,7 +191,7 @@ impl ElementImpl for InterSink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
if transition == gst::StateChange::ReadyToPaused {
if let Err(err) = self.prepare() {

View file

@ -177,7 +177,7 @@ impl ElementImpl for InterSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
if transition == gst::StateChange::ReadyToPaused {
if let Err(err) = self.prepare() {

View file

@ -0,0 +1,43 @@
[package]
name = "gst-plugin-originalbuffer"
version.workspace = true
authors = ["Olivier Crête <olivier.crete@collabora.com>"]
repository.workspace = true
license = "MPL-2.0"
description = "GStreamer Origin buffer meta Plugin"
edition.workspace = true
rust-version.workspace = true
[dependencies]
glib.workspace = true
gst.workspace = true
gst-video.workspace = true
atomic_refcell = "0.1"
once_cell.workspace = true
[lib]
name = "gstoriginalbuffer"
crate-type = ["cdylib", "rlib"]
path = "src/lib.rs"
[build-dependencies]
gst-plugin-version-helper.workspace = true
[features]
static = []
capi = []
doc = ["gst/v1_16"]
[package.metadata.capi]
min_version = "0.9.21"
[package.metadata.capi.header]
enabled = false
[package.metadata.capi.library]
install_subdir = "gstreamer-1.0"
versioning = false
import_library = false
[package.metadata.capi.pkg_config]
requires_private = "gstreamer-1.0, gstreamer-base-1.0, gobject-2.0, glib-2.0, gmodule-2.0"

View file

@ -0,0 +1,3 @@
fn main() {
gst_plugin_version_helper::info()
}

View file

@ -0,0 +1,38 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(clippy::non_send_fields_in_send_ty, unused_doc_comments)]
/**
* plugin-originalbuffer:
*
* Since: plugins-rs-0.12 */
use gst::glib;
mod originalbuffermeta;
mod originalbufferrestore;
mod originalbuffersave;
fn plugin_init(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
originalbuffersave::register(plugin)?;
originalbufferrestore::register(plugin)?;
Ok(())
}
gst::plugin_define!(
originalbuffer,
env!("CARGO_PKG_DESCRIPTION"),
plugin_init,
concat!(env!("CARGO_PKG_VERSION"), "-", env!("COMMIT_ID")),
"MPL",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_REPOSITORY"),
env!("BUILD_REL_DATE")
);

View file

@ -0,0 +1,199 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::prelude::*;
use std::fmt;
use std::mem;
#[repr(transparent)]
pub struct OriginalBufferMeta(imp::OriginalBufferMeta);
unsafe impl Send for OriginalBufferMeta {}
unsafe impl Sync for OriginalBufferMeta {}
impl OriginalBufferMeta {
pub fn add(
buffer: &mut gst::BufferRef,
original: gst::Buffer,
caps: Option<gst::Caps>,
) -> gst::MetaRefMut<'_, Self, gst::meta::Standalone> {
unsafe {
// Manually dropping because gst_buffer_add_meta() takes ownership of the
// content of the struct
let mut params =
mem::ManuallyDrop::new(imp::OriginalBufferMetaParams { original, caps });
let meta = gst::ffi::gst_buffer_add_meta(
buffer.as_mut_ptr(),
imp::original_buffer_meta_get_info(),
&mut *params as *mut imp::OriginalBufferMetaParams as gst::glib::ffi::gpointer,
) as *mut imp::OriginalBufferMeta;
Self::from_mut_ptr(buffer, meta)
}
}
pub fn replace(&mut self, original: gst::Buffer, caps: Option<gst::Caps>) {
self.0.original = Some(original);
self.0.caps = caps;
}
pub fn original(&self) -> &gst::Buffer {
self.0.original.as_ref().unwrap()
}
pub fn caps(&self) -> &gst::Caps {
self.0.caps.as_ref().unwrap()
}
}
unsafe impl MetaAPI for OriginalBufferMeta {
type GstType = imp::OriginalBufferMeta;
fn meta_api() -> gst::glib::Type {
imp::original_buffer_meta_api_get_type()
}
}
impl fmt::Debug for OriginalBufferMeta {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("OriginalBufferMeta")
.field("buffer", &self.original())
.finish()
}
}
mod imp {
use gst::glib::translate::*;
use once_cell::sync::Lazy;
use std::mem;
use std::ptr;
pub(super) struct OriginalBufferMetaParams {
pub original: gst::Buffer,
pub caps: Option<gst::Caps>,
}
#[repr(C)]
pub struct OriginalBufferMeta {
parent: gst::ffi::GstMeta,
pub(super) original: Option<gst::Buffer>,
pub(super) caps: Option<gst::Caps>,
}
pub(super) fn original_buffer_meta_api_get_type() -> glib::Type {
static TYPE: Lazy<glib::Type> = Lazy::new(|| unsafe {
let t = from_glib(gst::ffi::gst_meta_api_type_register(
b"GstOriginalBufferMetaAPI\0".as_ptr() as *const _,
[ptr::null::<std::os::raw::c_char>()].as_ptr() as *mut *const _,
));
assert_ne!(t, glib::Type::INVALID);
t
});
*TYPE
}
unsafe extern "C" fn original_buffer_meta_init(
meta: *mut gst::ffi::GstMeta,
params: glib::ffi::gpointer,
_buffer: *mut gst::ffi::GstBuffer,
) -> glib::ffi::gboolean {
assert!(!params.is_null());
let meta = &mut *(meta as *mut OriginalBufferMeta);
let params = ptr::read(params as *const OriginalBufferMetaParams);
let OriginalBufferMetaParams { original, caps } = params;
ptr::write(&mut meta.original, Some(original));
ptr::write(&mut meta.caps, caps);
true.into_glib()
}
unsafe extern "C" fn original_buffer_meta_free(
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
) {
let meta = &mut *(meta as *mut OriginalBufferMeta);
meta.original = None;
meta.caps = None;
}
unsafe extern "C" fn original_buffer_meta_transform(
dest: *mut gst::ffi::GstBuffer,
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
_type_: glib::ffi::GQuark,
_data: glib::ffi::gpointer,
) -> glib::ffi::gboolean {
let dest = gst::BufferRef::from_mut_ptr(dest);
let meta = &*(meta as *const OriginalBufferMeta);
if dest.meta::<super::OriginalBufferMeta>().is_some() {
return true.into_glib();
}
// We don't store a ref in the meta if it's self-refencing, but we add it
// when copying the meta to another buffer.
super::OriginalBufferMeta::add(
dest,
meta.original.as_ref().unwrap().clone(),
meta.caps.clone(),
);
true.into_glib()
}
pub(super) fn original_buffer_meta_get_info() -> *const gst::ffi::GstMetaInfo {
struct MetaInfo(ptr::NonNull<gst::ffi::GstMetaInfo>);
unsafe impl Send for MetaInfo {}
unsafe impl Sync for MetaInfo {}
static META_INFO: Lazy<MetaInfo> = Lazy::new(|| unsafe {
MetaInfo(
ptr::NonNull::new(gst::ffi::gst_meta_register(
original_buffer_meta_api_get_type().into_glib(),
b"OriginalBufferMeta\0".as_ptr() as *const _,
mem::size_of::<OriginalBufferMeta>(),
Some(original_buffer_meta_init),
Some(original_buffer_meta_free),
Some(original_buffer_meta_transform),
) as *mut gst::ffi::GstMetaInfo)
.expect("Failed to register meta API"),
)
});
META_INFO.0.as_ptr()
}
}
#[test]
fn test() {
gst::init().unwrap();
let mut b = gst::Buffer::with_size(10).unwrap();
let caps = gst::Caps::new_empty_simple("video/x-raw");
let copy = b.copy();
let m = OriginalBufferMeta::add(b.make_mut(), copy, Some(caps.clone()));
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original().clone(), b);
let b2: gst::Buffer = b.copy_deep().unwrap();
let m = b.meta::<OriginalBufferMeta>().unwrap();
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original(), &b);
let m = b2.meta::<OriginalBufferMeta>().unwrap();
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original(), &b);
let b3: gst::Buffer = b2.copy_deep().unwrap();
drop(b2);
let m = b3.meta::<OriginalBufferMeta>().unwrap();
assert_eq!(m.caps(), caps.as_ref());
assert_eq!(m.original(), &b);
}

View file

@ -0,0 +1,315 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::subclass::prelude::*;
use gst_video::prelude::*;
use atomic_refcell::AtomicRefCell;
use crate::originalbuffermeta;
use crate::originalbuffermeta::OriginalBufferMeta;
struct CapsState {
caps: gst::Caps,
vinfo: Option<gst_video::VideoInfo>,
}
impl Default for CapsState {
fn default() -> Self {
CapsState {
caps: gst::Caps::new_empty(),
vinfo: None,
}
}
}
#[derive(Default)]
struct State {
sinkpad_caps: CapsState,
meta_caps: CapsState,
sinkpad_segment: Option<gst::Event>,
}
pub struct OriginalBufferRestore {
state: AtomicRefCell<State>,
src_pad: gst::Pad,
sink_pad: gst::Pad,
}
use once_cell::sync::Lazy;
#[allow(dead_code)]
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"originalbufferrestore",
gst::DebugColorFlags::empty(),
Some("Restore Original buffer as meta"),
)
});
#[glib::object_subclass]
impl ObjectSubclass for OriginalBufferRestore {
const NAME: &'static str = "GstOriginalBufferRestore";
type Type = super::OriginalBufferRestore;
type ParentType = gst::Element;
fn with_class(klass: &Self::Class) -> Self {
let sink_templ = klass.pad_template("sink").unwrap();
let src_templ = klass.pad_template("src").unwrap();
let sink_pad = gst::Pad::builder_from_template(&sink_templ)
.chain_function(|pad, parent, buffer| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| Err(gst::FlowError::Error),
|obj| obj.sink_chain(pad, buffer),
)
})
.event_function(|pad, parent, event| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| false,
|obj| obj.sink_event(pad, parent, event),
)
})
.query_function(|pad, parent, query| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| false,
|obj| obj.sink_query(pad, parent, query),
)
})
.build();
let src_pad = gst::Pad::builder_from_template(&src_templ)
.event_function(|pad, parent, event| {
OriginalBufferRestore::catch_panic_pad_function(
parent,
|| false,
|obj| obj.src_event(pad, parent, event),
)
})
.build();
Self {
src_pad,
sink_pad,
state: Default::default(),
}
}
}
impl ObjectImpl for OriginalBufferRestore {
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
obj.add_pad(&self.sink_pad).unwrap();
obj.add_pad(&self.src_pad).unwrap();
}
}
impl GstObjectImpl for OriginalBufferRestore {}
impl ElementImpl for OriginalBufferRestore {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"Original Buffer Restore",
"Generic",
"Restores a reference to the buffer in a meta",
"Olivier Crête <olivier.crete@collabora.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let caps = gst::Caps::new_any();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
fn change_state(
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
let ret = self.parent_change_state(transition)?;
if transition == gst::StateChange::PausedToReady {
let mut state = self.state.borrow_mut();
*state = State::default();
}
Ok(ret)
}
}
impl OriginalBufferRestore {
fn sink_event(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
event: gst::Event,
) -> bool {
match event.view() {
gst::EventView::Caps(e) => {
let mut state = self.state.borrow_mut();
let caps = e.caps_owned();
let vinfo = gst_video::VideoInfo::from_caps(&caps).ok();
state.sinkpad_caps = CapsState { caps, vinfo };
true
}
gst::EventView::Segment(_) => {
let mut state = self.state.borrow_mut();
state.sinkpad_segment = Some(event);
true
}
_ => gst::Pad::event_default(pad, parent, event),
}
}
fn src_event(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
event: gst::Event,
) -> bool {
if event.type_() == gst::EventType::Reconfigure
|| event.has_name("gst-original-buffer-forward-upstream-event")
{
let s = gst::Structure::builder("gst-original-buffer-forward-upstream-event")
.field("event", event)
.build();
let event = gst::event::CustomUpstream::new(s);
self.sink_pad.push_event(event)
} else {
gst::Pad::event_default(pad, parent, event)
}
}
fn sink_query(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
query: &mut gst::QueryRef,
) -> bool {
if let gst::QueryViewMut::Custom(_) = query.view_mut() {
let s = query.structure_mut();
if s.has_name("gst-original-buffer-forward-query") {
if let Ok(mut q) = s.get::<gst::Query>("query") {
s.remove_field("query");
assert!(q.is_writable());
let res = self.src_pad.peer_query(q.get_mut().unwrap());
s.set("query", q);
s.set("result", res);
return true;
}
}
}
gst::Pad::query_default(pad, parent, query)
}
fn sink_chain(
&self,
_pad: &gst::Pad,
inbuf: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let Some(ometa) = inbuf.meta::<OriginalBufferMeta>() else {
//gst::element_warning!(self, gst::StreamError::Failed, ["Buffer {} is missing the GstOriginalBufferMeta, put originalbuffersave upstream in your pipeline", buffer]);
return Ok(gst::FlowSuccess::Ok);
};
let mut state = self.state.borrow_mut();
let meta_caps = &mut state.meta_caps;
if &meta_caps.caps != ometa.caps() {
if !self.src_pad.push_event(gst::event::Caps::new(ometa.caps())) {
return Err(gst::FlowError::NotNegotiated);
}
meta_caps.caps = ometa.caps().clone();
meta_caps.vinfo = gst_video::VideoInfo::from_caps(&meta_caps.caps).ok();
}
let mut outbuf = ometa.original().copy();
inbuf
.copy_into(
outbuf.make_mut(),
gst::BufferCopyFlags::TIMESTAMPS | gst::BufferCopyFlags::FLAGS,
..,
)
.unwrap();
for meta in inbuf.iter_meta::<gst::Meta>() {
if meta.api() == originalbuffermeta::OriginalBufferMeta::meta_api() {
continue;
}
if meta.has_tag::<gst::meta::tags::Memory>()
|| meta.has_tag::<gst::meta::tags::MemoryReference>()
{
continue;
}
if meta.has_tag::<gst_video::video_meta::tags::Size>() {
if let (Some(ref meta_vinfo), Some(ref sink_vinfo)) =
(&state.meta_caps.vinfo, &state.sinkpad_caps.vinfo)
{
if (meta_vinfo.width() != sink_vinfo.width()
|| meta_vinfo.height() != sink_vinfo.height())
&& meta
.transform(
outbuf.make_mut(),
&gst_video::video_meta::VideoMetaTransformScale::new(
sink_vinfo, meta_vinfo,
),
)
.is_ok()
{
continue;
}
}
}
let _ = meta.transform(
outbuf.make_mut(),
&gst::meta::MetaTransformCopy::new(false, ..),
);
}
if let Some(event) = state.sinkpad_segment.take() {
if !self.src_pad.push_event(event) {
return Err(gst::FlowError::Error);
}
}
self.src_pad.push(outbuf)
}
}

View file

@ -0,0 +1,31 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
/**
* SECTION:element-originalbufferrestore
*
* See originalbuffersave for details
*/
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub struct OriginalBufferRestore(ObjectSubclass<imp::OriginalBufferRestore>) @extends gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"originalbufferrestore",
gst::Rank::NONE,
OriginalBufferRestore::static_type(),
)
}

View file

@ -0,0 +1,205 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
use gst::glib;
use gst::prelude::*;
use gst::subclass::prelude::*;
use crate::originalbuffermeta::OriginalBufferMeta;
pub struct OriginalBufferSave {
src_pad: gst::Pad,
sink_pad: gst::Pad,
}
use once_cell::sync::Lazy;
#[allow(dead_code)]
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"originalbuffersave",
gst::DebugColorFlags::empty(),
Some("Save Original buffer as meta"),
)
});
#[glib::object_subclass]
impl ObjectSubclass for OriginalBufferSave {
const NAME: &'static str = "GstOriginalBufferSave";
type Type = super::OriginalBufferSave;
type ParentType = gst::Element;
fn with_class(klass: &Self::Class) -> Self {
let sink_templ = klass.pad_template("sink").unwrap();
let src_templ = klass.pad_template("src").unwrap();
let sink_pad = gst::Pad::builder_from_template(&sink_templ)
.chain_function(|pad, parent, buffer| {
OriginalBufferSave::catch_panic_pad_function(
parent,
|| Err(gst::FlowError::Error),
|obj| obj.sink_chain(pad, buffer),
)
})
.query_function(|pad, parent, query| {
OriginalBufferSave::catch_panic_pad_function(
parent,
|| false,
|obj| obj.sink_query(pad, parent, query),
)
})
.flags(gst::PadFlags::PROXY_CAPS | gst::PadFlags::PROXY_ALLOCATION)
.build();
let src_pad = gst::Pad::builder_from_template(&src_templ)
.event_function(|pad, parent, event| {
OriginalBufferSave::catch_panic_pad_function(
parent,
|| false,
|obj| obj.src_event(pad, parent, event),
)
})
.build();
Self { src_pad, sink_pad }
}
}
impl ObjectImpl for OriginalBufferSave {
fn constructed(&self) {
self.parent_constructed();
let obj = self.obj();
obj.add_pad(&self.sink_pad).unwrap();
obj.add_pad(&self.src_pad).unwrap();
}
}
impl GstObjectImpl for OriginalBufferSave {}
impl ElementImpl for OriginalBufferSave {
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: Lazy<gst::subclass::ElementMetadata> = Lazy::new(|| {
gst::subclass::ElementMetadata::new(
"Original Buffer Save",
"Generic",
"Saves a reference to the buffer in a meta",
"Olivier Crête <olivier.crete@collabora.com>",
)
});
Some(&*ELEMENT_METADATA)
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: Lazy<Vec<gst::PadTemplate>> = Lazy::new(|| {
let caps = gst::Caps::new_any();
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap();
vec![src_pad_template, sink_pad_template]
});
PAD_TEMPLATES.as_ref()
}
}
impl OriginalBufferSave {
fn forward_query(&self, query: gst::Query) -> Option<gst::Query> {
let mut s = gst::Structure::new_empty("gst-original-buffer-forward-query");
s.set("query", query);
let mut query = gst::query::Custom::new(s);
if self.src_pad.peer_query(&mut query) {
let s = query.structure_mut();
if let (Ok(true), Ok(q)) = (s.get("result"), s.get::<gst::Query>("query")) {
Some(q)
} else {
None
}
} else {
None
}
}
fn sink_chain(
&self,
pad: &gst::Pad,
inbuf: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut buf = inbuf.copy();
let caps = pad.current_caps();
if let Some(mut meta) = buf.make_mut().meta_mut::<OriginalBufferMeta>() {
meta.replace(inbuf, caps);
} else {
OriginalBufferMeta::add(buf.make_mut(), inbuf, caps);
}
self.src_pad.push(buf)
}
fn sink_query(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
query: &mut gst::QueryRef,
) -> bool {
let ret = gst::Pad::query_default(pad, parent, query);
if !ret {
return ret;
}
if let gst::QueryViewMut::Caps(q) = query.view_mut() {
if let Some(caps) = q.result_owned() {
let forwarding_q = gst::query::Caps::new(Some(&caps)).into();
if let Some(forwarding_q) = self.forward_query(forwarding_q) {
if let gst::QueryView::Caps(c) = forwarding_q.view() {
let res = c
.result_owned()
.map(|c| c.intersect_with_mode(&caps, gst::CapsIntersectMode::First));
q.set_result(&res);
}
}
}
}
// We should also do allocation queries, but that requires supporting the same
// intersection semantics as gsttee, which should be in a helper function.
true
}
fn src_event(
&self,
pad: &gst::Pad,
parent: Option<&impl IsA<gst::Object>>,
event: gst::Event,
) -> bool {
let event = if event.has_name("gst-original-buffer-forward-upstream-event") {
event.structure().unwrap().get("event").unwrap()
} else {
event
};
gst::Pad::event_default(pad, parent, event)
}
}

View file

@ -0,0 +1,41 @@
// Copyright (C) 2024 Collabora Ltd
// @author: Olivier Crête <olivier.crete@collabora.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
/**
* SECTION:element-originalbuffersave
*
* GStreamer elements to store the original buffer and restore it later
*
* In many analysis scenario (for example machine learning), it is desirable to
* use a pre-processed buffer, for example by lowering the resolution, but we may
* want to take the output of this analysis, and apply it to the original buffer.
*
* These elements do just this, the typical usage would be a pipeline like:
*
* `... ! originalbuffersave ! videoconvertscale ! video/x-raw, width=100, height=100 ! analysiselement ! originalbufferrestore ! ...`
*
* The originalbufferrestore element will "restore" the buffer that was entered to the "save" element, but will keep any metadata that was added later.
*/
use gst::glib;
use gst::prelude::*;
mod imp;
glib::wrapper! {
pub struct OriginalBufferSave(ObjectSubclass<imp::OriginalBufferSave>) @extends gst::Element, gst::Object;
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"originalbuffersave",
gst::Rank::NONE,
OriginalBufferSave::static_type(),
)
}

View file

@ -112,7 +112,7 @@ impl State {
gst::FlowError::Error
})?;
gst::debug!(CAT, obj: pad, "Returned pull size: {}", map.len());
gst::debug!(CAT, obj = pad, "Returned pull size: {}", map.len());
let mut nonce = add_nonce(self.initial_nonce.unwrap(), chunk_index);
let block_size = self.block_size.expect("Block size wasn't set") as usize + box_::MACBYTES;
@ -144,8 +144,8 @@ impl State {
adapter_offset: usize,
) -> Result<gst::PadGetRangeSuccess, gst::FlowError> {
let avail = self.adapter.available();
gst::debug!(CAT, obj: pad, "Avail: {}", avail);
gst::debug!(CAT, obj: pad, "Adapter offset: {}", adapter_offset);
gst::debug!(CAT, obj = pad, "Avail: {}", avail);
gst::debug!(CAT, obj = pad, "Adapter offset: {}", adapter_offset);
// if this underflows, the available buffer in the adapter is smaller than the
// requested offset, which means we have reached EOS
@ -189,7 +189,7 @@ impl State {
Err(e) => {
gst::error!(
CAT,
obj: pad,
obj = pad,
"Failed to map provided buffer writable: {}",
e
);
@ -197,7 +197,7 @@ impl State {
}
};
if let Err(e) = self.adapter.copy(0, &mut map[..available_size]) {
gst::error!(CAT, obj: pad, "Failed to copy into provided buffer: {}", e);
gst::error!(CAT, obj = pad, "Failed to copy into provided buffer: {}", e);
return Err(gst::FlowError::Error);
}
if map.len() != available_size {
@ -278,7 +278,7 @@ impl Decrypter {
fn src_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
use gst::QueryViewMut;
gst::log!(CAT, obj: pad, "Handling query {:?}", query);
gst::log!(CAT, obj = pad, "Handling query {:?}", query);
match query.view_mut() {
QueryViewMut::Scheduling(q) => {
@ -288,12 +288,12 @@ impl Decrypter {
return res;
}
gst::log!(CAT, obj: pad, "Upstream returned {:?}", peer_query);
gst::log!(CAT, obj = pad, "Upstream returned {:?}", peer_query);
let (flags, min, max, align) = peer_query.result();
q.set(flags, min, max, align);
q.add_scheduling_modes(&[gst::PadMode::Pull]);
gst::log!(CAT, obj: pad, "Returning {:?}", q.query_mut());
gst::log!(CAT, obj = pad, "Returning {:?}", q.query_mut());
true
}
QueryViewMut::Duration(q) => {
@ -334,7 +334,7 @@ impl Decrypter {
// subtrack the MAC of each block
let size = size - total_chunks * box_::MACBYTES as u64;
gst::debug!(CAT, obj: pad, "Setting duration bytes: {}", size);
gst::debug!(CAT, obj = pad, "Setting duration bytes: {}", size);
q.set(size.bytes());
true
@ -402,9 +402,9 @@ impl Decrypter {
let state = state.as_mut().unwrap();
state.initial_nonce = Some(nonce);
gst::debug!(CAT, imp: self, "Setting nonce to: {:?}", nonce.0);
gst::debug!(CAT, imp = self, "Setting nonce to: {:?}", nonce.0);
state.block_size = Some(block_size);
gst::debug!(CAT, imp: self, "Setting block size to: {}", block_size);
gst::debug!(CAT, imp = self, "Setting block size to: {}", block_size);
Ok(())
}
@ -420,8 +420,8 @@ impl Decrypter {
+ (chunk_index * block_size as u64)
+ (chunk_index * box_::MACBYTES as u64);
gst::debug!(CAT, obj: pad, "Pull offset: {}", pull_offset);
gst::debug!(CAT, obj: pad, "block size: {}", block_size);
gst::debug!(CAT, obj = pad, "Pull offset: {}", pull_offset);
gst::debug!(CAT, obj = pad, "block size: {}", block_size);
// calculate how many chunks are needed, if we need something like 3.2
// round the number to 4 and cut the buffer afterwards.
@ -440,7 +440,7 @@ impl Decrypter {
// Read at least one chunk in case 0 bytes were requested
let total_chunks = u32::max((checked - 1) / block_size, 1);
gst::debug!(CAT, obj: pad, "Blocks to be pulled: {}", total_chunks);
gst::debug!(CAT, obj = pad, "Blocks to be pulled: {}", total_chunks);
// Pull a buffer of all the chunks we will need
let checked_size = total_chunks.checked_mul(block_size).ok_or_else(|| {
@ -457,23 +457,34 @@ impl Decrypter {
})?;
let total_size = checked_size + (total_chunks * box_::MACBYTES as u32);
gst::debug!(CAT, obj: pad, "Requested pull size: {}", total_size);
gst::debug!(CAT, obj = pad, "Requested pull size: {}", total_size);
self.sinkpad.pull_range(pull_offset, total_size).map_err(|err| {
match err {
gst::FlowError::Flushing => {
gst::debug!(CAT, obj: self.sinkpad, "Pausing after pulling buffer, reason: flushing");
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj: self.sinkpad, "Eos");
}
flow => {
gst::error!(CAT, obj: self.sinkpad, "Failed to pull, reason: {:?}", flow);
}
};
self.sinkpad
.pull_range(pull_offset, total_size)
.map_err(|err| {
match err {
gst::FlowError::Flushing => {
gst::debug!(
CAT,
obj = self.sinkpad,
"Pausing after pulling buffer, reason: flushing"
);
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj = self.sinkpad, "Eos");
}
flow => {
gst::error!(
CAT,
obj = self.sinkpad,
"Failed to pull, reason: {:?}",
flow
);
}
};
err
})
err
})
}
fn range(
@ -493,14 +504,14 @@ impl Decrypter {
state.block_size.expect("Block size wasn't set")
};
gst::debug!(CAT, obj: pad, "Requested offset: {}", offset);
gst::debug!(CAT, obj: pad, "Requested size: {}", requested_size);
gst::debug!(CAT, obj = pad, "Requested offset: {}", offset);
gst::debug!(CAT, obj = pad, "Requested size: {}", requested_size);
let chunk_index = offset / block_size as u64;
gst::debug!(CAT, obj: pad, "Stream Block index: {}", chunk_index);
gst::debug!(CAT, obj = pad, "Stream Block index: {}", chunk_index);
let pull_offset = offset - (chunk_index * block_size as u64);
assert!(pull_offset <= std::u32::MAX as u64);
assert!(pull_offset <= u32::MAX as u64);
let pull_offset = pull_offset as u32;
let pulled_buffer =
@ -670,7 +681,7 @@ impl ElementImpl for Decrypter {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::debug!(CAT, imp: self, "Changing state {:?}", transition);
gst::debug!(CAT, imp = self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -166,7 +166,7 @@ impl Encrypter {
pad: &gst::Pad,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::log!(CAT, obj: pad, "Handling buffer {:?}", buffer);
gst::log!(CAT, obj = pad, "Handling buffer {:?}", buffer);
let mut buffers = BufferVec::new();
let mut state_guard = self.state.lock().unwrap();
@ -193,7 +193,7 @@ impl Encrypter {
for buffer in buffers {
self.srcpad.push(buffer).map_err(|err| {
gst::error!(CAT, imp: self, "Failed to push buffer {:?}", err);
gst::error!(CAT, imp = self, "Failed to push buffer {:?}", err);
err
})?;
}
@ -204,7 +204,7 @@ impl Encrypter {
fn sink_event(&self, pad: &gst::Pad, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj: pad, "Handling event {:?}", event);
gst::log!(CAT, obj = pad, "Handling event {:?}", event);
match event.view() {
EventView::Caps(_) => {
@ -236,7 +236,7 @@ impl Encrypter {
for buffer in buffers {
if let Err(err) = self.srcpad.push(buffer) {
gst::error!(CAT, imp: self, "Failed to push buffer at EOS {:?}", err);
gst::error!(CAT, imp = self, "Failed to push buffer at EOS {:?}", err);
return false;
}
}
@ -250,7 +250,7 @@ impl Encrypter {
fn src_event(&self, pad: &gst::Pad, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj: pad, "Handling event {:?}", event);
gst::log!(CAT, obj = pad, "Handling event {:?}", event);
match event.view() {
EventView::Seek(_) => false,
@ -261,7 +261,7 @@ impl Encrypter {
fn src_query(&self, pad: &gst::Pad, query: &mut gst::QueryRef) -> bool {
use gst::QueryViewMut;
gst::log!(CAT, obj: pad, "Handling query {:?}", query);
gst::log!(CAT, obj = pad, "Handling query {:?}", query);
match query.view_mut() {
QueryViewMut::Seeking(q) => {
@ -271,7 +271,7 @@ impl Encrypter {
gst::GenericFormattedValue::none_for_format(format),
gst::GenericFormattedValue::none_for_format(format),
);
gst::log!(CAT, obj: pad, "Returning {:?}", q.query_mut());
gst::log!(CAT, obj = pad, "Returning {:?}", q.query_mut());
true
}
QueryViewMut::Duration(q) => {
@ -311,7 +311,7 @@ impl Encrypter {
// add static offsets
let size = size + crate::HEADERS_SIZE as u64;
gst::debug!(CAT, obj: pad, "Setting duration bytes: {}", size);
gst::debug!(CAT, obj = pad, "Setting duration bytes: {}", size);
q.set(size.bytes());
true
@ -492,7 +492,7 @@ impl ElementImpl for Encrypter {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::debug!(CAT, imp: self, "Changing state {:?}", transition);
gst::debug!(CAT, imp = self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -84,13 +84,16 @@ fn main() {
.property("signal-handoffs", true)
.build()
.unwrap();
sink.connect(
sink.connect_closure(
"handoff",
true,
glib::clone!(@strong counter => move |_| {
let _ = counter.fetch_add(1, Ordering::SeqCst);
None
}),
glib::closure!(
#[strong]
counter,
move |_fakesink: &gst::Element, _buffer: &gst::Buffer, _pad: &gst::Pad| {
let _ = counter.fetch_add(1, Ordering::SeqCst);
}
),
);
let (source, context) = match source.as_str() {

View file

@ -1,19 +1,19 @@
macro_rules! debug_or_trace {
($cat:expr, $raise_log_level:expr, $qual:ident: $obj:expr, $rest:tt $(,)?) => {
($cat:expr, $raise_log_level:expr, $qual:ident = $obj:expr, $rest:tt $(,)?) => {
if $raise_log_level {
gst::debug!($cat, $qual: $obj, $rest);
gst::debug!($cat, $qual = $obj, $rest);
} else {
gst::trace!($cat, $qual: $obj, $rest);
gst::trace!($cat, $qual = $obj, $rest);
}
};
}
macro_rules! log_or_trace {
($cat:expr, $raise_log_level:expr, $qual:ident: $obj:expr, $rest:tt $(,)?) => {
($cat:expr, $raise_log_level:expr, $qual:ident = $obj:expr, $rest:tt $(,)?) => {
if $raise_log_level {
gst::log!($cat, $qual: $obj, $rest);
gst::log!($cat, $qual = $obj, $rest);
} else {
gst::trace!($cat, $qual: $obj, $rest);
gst::trace!($cat, $qual = $obj, $rest);
}
};
}

View file

@ -43,14 +43,14 @@ impl PadSinkHandlerInner {
log_or_trace!(
CAT,
self.is_main_elem,
obj: elem,
obj = elem,
"Discarding {buffer:?} (flushing)"
);
return Err(gst::FlowError::Flushing);
}
debug_or_trace!(CAT, self.is_main_elem, obj: elem, "Received {buffer:?}");
debug_or_trace!(CAT, self.is_main_elem, obj = elem, "Received {buffer:?}");
let dts = buffer
.dts()
@ -67,18 +67,23 @@ impl PadSinkHandlerInner {
stats.add_buffer(latency, interval);
}
debug_or_trace!(CAT, self.is_main_elem, obj: elem, "o latency {latency:.2?}");
debug_or_trace!(
CAT,
self.is_main_elem,
obj: elem,
obj = elem,
"o latency {latency:.2?}"
);
debug_or_trace!(
CAT,
self.is_main_elem,
obj = elem,
"o interval {interval:.2?}",
);
}
self.last_dts = Some(dts);
log_or_trace!(CAT, self.is_main_elem, obj: elem, "Buffer processed");
log_or_trace!(CAT, self.is_main_elem, obj = elem, "Buffer processed");
Ok(())
}
@ -117,7 +122,7 @@ impl PadSinkHandler for AsyncPadSinkHandler {
EventView::Eos(_) => {
{
let mut inner = self.0.lock().await;
debug_or_trace!(CAT, inner.is_main_elem, obj: elem, "EOS");
debug_or_trace!(CAT, inner.is_main_elem, obj = elem, "EOS");
inner.is_flushing = true;
}
@ -196,7 +201,7 @@ pub struct AsyncMutexSink {
impl AsyncMutexSink {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
let settings = self.settings.lock().unwrap();
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Preparing");
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Preparing");
let stats = if settings.logs_stats {
Some(Stats::new(
settings.max_buffers,
@ -207,25 +212,25 @@ impl AsyncMutexSink {
};
self.sink_pad_handler.prepare(settings.is_main_elem, stats);
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Prepared");
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Prepared");
Ok(())
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopping");
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopping");
self.sink_pad_handler.stop();
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopped");
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Starting");
debug_or_trace!(CAT, is_main_elem, imp = self, "Starting");
self.sink_pad_handler.start();
debug_or_trace!(CAT, is_main_elem, imp: self, "Started");
debug_or_trace!(CAT, is_main_elem, imp = self, "Started");
Ok(())
}
@ -311,7 +316,7 @@ impl ElementImpl for AsyncMutexSink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {transition:?}");
gst::trace!(CAT, imp = self, "Changing state {transition:?}");
match transition {
gst::StateChange::NullToReady => {

View file

@ -42,14 +42,14 @@ impl PadSinkHandlerInner {
log_or_trace!(
CAT,
self.is_main_elem,
obj: elem,
obj = elem,
"Discarding {buffer:?} (flushing)"
);
return Err(gst::FlowError::Flushing);
}
debug_or_trace!(CAT, self.is_main_elem, obj: elem, "Received {buffer:?}");
debug_or_trace!(CAT, self.is_main_elem, obj = elem, "Received {buffer:?}");
let dts = buffer
.dts()
@ -66,18 +66,23 @@ impl PadSinkHandlerInner {
stats.add_buffer(latency, interval);
}
debug_or_trace!(CAT, self.is_main_elem, obj: elem, "o latency {latency:.2?}");
debug_or_trace!(
CAT,
self.is_main_elem,
obj: elem,
obj = elem,
"o latency {latency:.2?}"
);
debug_or_trace!(
CAT,
self.is_main_elem,
obj = elem,
"o interval {interval:.2?}",
);
}
self.last_dts = Some(dts);
log_or_trace!(CAT, self.is_main_elem, obj: elem, "Buffer processed");
log_or_trace!(CAT, self.is_main_elem, obj = elem, "Buffer processed");
Ok(())
}
@ -116,7 +121,7 @@ impl PadSinkHandler for SyncPadSinkHandler {
EventView::Eos(_) => {
{
let mut inner = self.0.lock().unwrap();
debug_or_trace!(CAT, inner.is_main_elem, obj: elem, "EOS");
debug_or_trace!(CAT, inner.is_main_elem, obj = elem, "EOS");
inner.is_flushing = true;
}
@ -189,7 +194,7 @@ pub struct DirectSink {
impl DirectSink {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
let settings = self.settings.lock().unwrap();
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Preparing");
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Preparing");
let stats = if settings.logs_stats {
Some(Stats::new(
settings.max_buffers,
@ -200,25 +205,25 @@ impl DirectSink {
};
self.sink_pad_handler.prepare(settings.is_main_elem, stats);
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Prepared");
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Prepared");
Ok(())
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopping");
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopping");
self.sink_pad_handler.stop();
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopped");
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Starting");
debug_or_trace!(CAT, is_main_elem, imp = self, "Starting");
self.sink_pad_handler.start();
debug_or_trace!(CAT, is_main_elem, imp: self, "Started");
debug_or_trace!(CAT, is_main_elem, imp = self, "Started");
Ok(())
}
@ -304,7 +309,7 @@ impl ElementImpl for DirectSink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {transition:?}");
gst::trace!(CAT, imp = self, "Changing state {transition:?}");
match transition {
gst::StateChange::NullToReady => {

View file

@ -68,7 +68,7 @@ impl PadSinkHandler for TaskPadSinkHandler {
}
EventView::Eos(_) => {
let is_main_elem = elem.imp().settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, obj: elem, "EOS");
debug_or_trace!(CAT, is_main_elem, obj = elem, "EOS");
// When each element sends its own EOS message,
// it takes ages for the pipeline to process all of them.
@ -137,13 +137,13 @@ impl TaskImpl for TaskSinkTask {
type Item = StreamItem;
fn prepare(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Preparing Task");
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Preparing Task");
future::ok(()).boxed()
}
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async {
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Starting Task");
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Starting Task");
self.last_dts = None;
if let Some(stats) = self.stats.as_mut() {
stats.start();
@ -156,7 +156,7 @@ impl TaskImpl for TaskSinkTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async {
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Stopping Task");
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Stopping Task");
self.flush();
Ok(())
}
@ -172,7 +172,7 @@ impl TaskImpl for TaskSinkTask {
fn handle_item(&mut self, item: StreamItem) -> BoxFuture<'_, Result<(), gst::FlowError>> {
async move {
debug_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Received {item:?}");
debug_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Received {item:?}");
match item {
StreamItem::Buffer(buffer) => {
@ -194,20 +194,20 @@ impl TaskImpl for TaskSinkTask {
debug_or_trace!(
CAT,
self.is_main_elem,
obj: self.elem,
obj = self.elem,
"o latency {latency:.2?}",
);
debug_or_trace!(
CAT,
self.is_main_elem,
obj: self.elem,
obj = self.elem,
"o interval {interval:.2?}",
);
}
self.last_dts = Some(dts);
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Buffer processed");
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Buffer processed");
}
StreamItem::Event(evt) => {
if let EventView::Segment(evt) = evt.view() {
@ -249,7 +249,7 @@ impl TaskSink {
None
};
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Preparing");
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Preparing");
let ts_ctx = Context::acquire(&settings.context, settings.context_wait).map_err(|err| {
error_msg!(
@ -265,32 +265,32 @@ impl TaskSink {
*self.item_sender.lock().unwrap() = Some(item_sender);
debug_or_trace!(CAT, settings.is_main_elem, imp: self, "Prepared");
debug_or_trace!(CAT, settings.is_main_elem, imp = self, "Prepared");
Ok(())
}
fn unprepare(&self) {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Unpreparing");
debug_or_trace!(CAT, is_main_elem, imp = self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
debug_or_trace!(CAT, is_main_elem, imp: self, "Unprepared");
debug_or_trace!(CAT, is_main_elem, imp = self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopping");
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopping");
self.task.stop().block_on()?;
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopped");
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Starting");
debug_or_trace!(CAT, is_main_elem, imp = self, "Starting");
self.task.start().block_on()?;
debug_or_trace!(CAT, is_main_elem, imp: self, "Started");
debug_or_trace!(CAT, is_main_elem, imp = self, "Started");
Ok(())
}
@ -376,7 +376,7 @@ impl ElementImpl for TaskSink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {transition:?}");
gst::trace!(CAT, imp = self, "Changing state {transition:?}");
match transition {
gst::StateChange::NullToReady => {

View file

@ -103,7 +103,7 @@ impl TaskImpl for SrcTask {
let settings = imp.settings.lock().unwrap();
self.is_main_elem = settings.is_main_elem;
log_or_trace!(CAT, self.is_main_elem, imp: imp, "Preparing Task");
log_or_trace!(CAT, self.is_main_elem, imp = imp, "Preparing Task");
self.push_period = settings.push_period;
self.num_buffers = settings.num_buffers;
@ -113,12 +113,17 @@ impl TaskImpl for SrcTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Starting Task");
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Starting Task");
if self.need_initial_events {
let imp = self.elem.imp();
debug_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Pushing initial events");
debug_or_trace!(
CAT,
self.is_main_elem,
obj = self.elem,
"Pushing initial events"
);
let stream_id =
format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
@ -157,7 +162,7 @@ impl TaskImpl for SrcTask {
}
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Stopping Task");
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Stopping Task");
self.buffer_pool.set_active(false).unwrap();
self.timer = None;
self.need_initial_events = true;
@ -167,9 +172,9 @@ impl TaskImpl for SrcTask {
fn try_next(&mut self) -> BoxFuture<'_, Result<(), gst::FlowError>> {
async move {
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Awaiting timer");
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Awaiting timer");
self.timer.as_mut().unwrap().next().await;
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Timer ticked");
log_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Timer ticked");
Ok(())
}
@ -190,13 +195,18 @@ impl TaskImpl for SrcTask {
buffer
})
.map_err(|err| {
gst::error!(CAT, obj: self.elem, "Failed to acquire buffer {err}");
gst::error!(CAT, obj = self.elem, "Failed to acquire buffer {err}");
err
})?;
debug_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Forwarding buffer");
debug_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Forwarding buffer");
self.elem.imp().src_pad.push(buffer).await?;
log_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Successfully pushed buffer");
log_or_trace!(
CAT,
self.is_main_elem,
obj = self.elem,
"Successfully pushed buffer"
);
self.buffer_count += 1;
@ -213,22 +223,22 @@ impl TaskImpl for SrcTask {
async move {
match err {
gst::FlowError::Eos => {
debug_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Pushing EOS");
debug_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Pushing EOS");
let imp = self.elem.imp();
if !imp.src_pad.push_event(gst::event::Eos::new()).await {
gst::error!(CAT, imp: imp, "Error pushing EOS");
gst::error!(CAT, imp = imp, "Error pushing EOS");
}
task::Trigger::Stop
}
gst::FlowError::Flushing => {
debug_or_trace!(CAT, self.is_main_elem, obj: self.elem, "Flushing");
debug_or_trace!(CAT, self.is_main_elem, obj = self.elem, "Flushing");
task::Trigger::FlushStart
}
err => {
gst::error!(CAT, obj: self.elem, "Got error {err}");
gst::error!(CAT, obj = self.elem, "Got error {err}");
gst::element_error!(
&self.elem,
gst::StreamError::Failed,
@ -254,7 +264,7 @@ pub struct TestSrc {
impl TestSrc {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Preparing");
debug_or_trace!(CAT, is_main_elem, imp = self, "Preparing");
let settings = self.settings.lock().unwrap();
let ts_ctx = Context::acquire(&settings.context, settings.context_wait).map_err(|err| {
@ -269,41 +279,41 @@ impl TestSrc {
.prepare(SrcTask::new(self.obj().clone()), ts_ctx)
.block_on()?;
debug_or_trace!(CAT, is_main_elem, imp: self, "Prepared");
debug_or_trace!(CAT, is_main_elem, imp = self, "Prepared");
Ok(())
}
fn unprepare(&self) {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Unpreparing");
debug_or_trace!(CAT, is_main_elem, imp = self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
debug_or_trace!(CAT, is_main_elem, imp: self, "Unprepared");
debug_or_trace!(CAT, is_main_elem, imp = self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopping");
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopping");
self.task.stop().block_on()?;
debug_or_trace!(CAT, is_main_elem, imp: self, "Stopped");
debug_or_trace!(CAT, is_main_elem, imp = self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Starting");
debug_or_trace!(CAT, is_main_elem, imp = self, "Starting");
self.task.start().block_on()?;
debug_or_trace!(CAT, is_main_elem, imp: self, "Started");
debug_or_trace!(CAT, is_main_elem, imp = self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
let is_main_elem = self.settings.lock().unwrap().is_main_elem;
debug_or_trace!(CAT, is_main_elem, imp: self, "Pausing");
debug_or_trace!(CAT, is_main_elem, imp = self, "Pausing");
self.task.pause().block_on()?;
debug_or_trace!(CAT, is_main_elem, imp: self, "Paused");
debug_or_trace!(CAT, is_main_elem, imp = self, "Paused");
Ok(())
}
@ -453,7 +463,7 @@ impl ElementImpl for TestSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {transition:?}");
gst::trace!(CAT, imp = self, "Changing state {transition:?}");
match transition {
gst::StateChange::NullToReady => {

View file

@ -30,7 +30,6 @@ use once_cell::sync::Lazy;
use std::sync::Mutex;
use std::time::Duration;
use std::u32;
use crate::runtime::prelude::*;
use crate::runtime::{Context, PadSrc, Task, TaskState};
@ -83,7 +82,7 @@ impl PadSrcHandler for AppSrcPadHandler {
type ElementImpl = AppSrc;
fn src_event(self, pad: &gst::Pad, imp: &AppSrc, event: gst::Event) -> bool {
gst::log!(CAT, obj: pad, "Handling {:?}", event);
gst::log!(CAT, obj = pad, "Handling {:?}", event);
use gst::EventView;
let ret = match event.view() {
@ -95,16 +94,16 @@ impl PadSrcHandler for AppSrcPadHandler {
};
if ret {
gst::log!(CAT, obj: pad, "Handled {:?}", event);
gst::log!(CAT, obj = pad, "Handled {:?}", event);
} else {
gst::log!(CAT, obj: pad, "Didn't handle {:?}", event);
gst::log!(CAT, obj = pad, "Didn't handle {:?}", event);
}
ret
}
fn src_query(self, pad: &gst::Pad, imp: &AppSrc, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj: pad, "Handling {:?}", query);
gst::log!(CAT, obj = pad, "Handling {:?}", query);
use gst::QueryViewMut;
let ret = match query.view_mut() {
@ -136,9 +135,9 @@ impl PadSrcHandler for AppSrcPadHandler {
};
if ret {
gst::log!(CAT, obj: pad, "Handled {:?}", query);
gst::log!(CAT, obj = pad, "Handled {:?}", query);
} else {
gst::log!(CAT, obj: pad, "Didn't handle {:?}", query);
gst::log!(CAT, obj = pad, "Didn't handle {:?}", query);
}
ret
}
@ -170,11 +169,11 @@ impl AppSrcTask {
}
async fn push_item(&mut self, item: StreamItem) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::log!(CAT, obj: self.element, "Handling {:?}", item);
gst::log!(CAT, obj = self.element, "Handling {:?}", item);
let appsrc = self.element.imp();
if self.need_initial_events {
gst::debug!(CAT, obj: self.element, "Pushing initial events");
gst::debug!(CAT, obj = self.element, "Pushing initial events");
let stream_id = format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
let stream_start_evt = gst::event::StreamStart::builder(&stream_id)
@ -204,7 +203,7 @@ impl AppSrcTask {
match item {
StreamItem::Buffer(buffer) => {
gst::log!(CAT, obj: self.element, "Forwarding {:?}", buffer);
gst::log!(CAT, obj = self.element, "Forwarding {:?}", buffer);
appsrc.src_pad.push(buffer).await
}
StreamItem::Event(event) => {
@ -214,7 +213,7 @@ impl AppSrcTask {
Err(gst::FlowError::Eos)
}
_ => {
gst::log!(CAT, obj: self.element, "Forwarding {:?}", event);
gst::log!(CAT, obj = self.element, "Forwarding {:?}", event);
appsrc.src_pad.push_event(event).await;
Ok(gst::FlowSuccess::Ok)
}
@ -242,18 +241,18 @@ impl TaskImpl for AppSrcTask {
let res = self.push_item(item).await;
match res {
Ok(_) => {
gst::log!(CAT, obj: self.element, "Successfully pushed item");
gst::log!(CAT, obj = self.element, "Successfully pushed item");
}
Err(gst::FlowError::Eos) => {
gst::debug!(CAT, obj: self.element, "EOS");
gst::debug!(CAT, obj = self.element, "EOS");
let appsrc = self.element.imp();
appsrc.src_pad.push_event(gst::event::Eos::new()).await;
}
Err(gst::FlowError::Flushing) => {
gst::debug!(CAT, obj: self.element, "Flushing");
gst::debug!(CAT, obj = self.element, "Flushing");
}
Err(err) => {
gst::error!(CAT, obj: self.element, "Got error {}", err);
gst::error!(CAT, obj = self.element, "Got error {}", err);
gst::element_error!(
&self.element,
gst::StreamError::Failed,
@ -270,13 +269,13 @@ impl TaskImpl for AppSrcTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Stopping task");
gst::log!(CAT, obj = self.element, "Stopping task");
self.flush();
self.need_initial_events = true;
self.need_segment = true;
gst::log!(CAT, obj: self.element, "Task stopped");
gst::log!(CAT, obj = self.element, "Task stopped");
Ok(())
}
.boxed()
@ -284,12 +283,12 @@ impl TaskImpl for AppSrcTask {
fn flush_start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Starting task flush");
gst::log!(CAT, obj = self.element, "Starting task flush");
self.flush();
self.need_segment = true;
gst::log!(CAT, obj: self.element, "Task flush started");
gst::log!(CAT, obj = self.element, "Task flush started");
Ok(())
}
.boxed()
@ -309,7 +308,7 @@ impl AppSrc {
fn push_buffer(&self, mut buffer: gst::Buffer) -> bool {
let state = self.task.lock_state();
if *state != TaskState::Started && *state != TaskState::Paused {
gst::debug!(CAT, imp: self, "Rejecting buffer due to element state");
gst::debug!(CAT, imp = self, "Rejecting buffer due to element state");
return false;
}
@ -324,7 +323,7 @@ impl AppSrc {
buffer.set_dts(now.opt_checked_sub(base_time).ok().flatten());
buffer.set_pts(None);
} else {
gst::error!(CAT, imp: self, "Don't have a clock yet");
gst::error!(CAT, imp = self, "Don't have a clock yet");
return false;
}
}
@ -337,7 +336,7 @@ impl AppSrc {
{
Ok(_) => true,
Err(err) => {
gst::error!(CAT, imp: self, "Failed to queue buffer: {}", err);
gst::error!(CAT, imp = self, "Failed to queue buffer: {}", err);
false
}
}
@ -353,14 +352,14 @@ impl AppSrc {
match sender.try_send(StreamItem::Event(gst::event::Eos::new())) {
Ok(_) => true,
Err(err) => {
gst::error!(CAT, imp: self, "Failed to queue EOS: {}", err);
gst::error!(CAT, imp = self, "Failed to queue EOS: {}", err);
false
}
}
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Preparing");
gst::debug!(CAT, imp = self, "Preparing");
let settings = self.settings.lock().unwrap();
let context =
@ -387,38 +386,38 @@ impl AppSrc {
.prepare(AppSrcTask::new(self.obj().clone(), receiver), context)
.block_on()?;
gst::debug!(CAT, imp: self, "Prepared");
gst::debug!(CAT, imp = self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp: self, "Unpreparing");
gst::debug!(CAT, imp = self, "Unpreparing");
*self.sender.lock().unwrap() = None;
self.task.unprepare().block_on().unwrap();
gst::debug!(CAT, imp: self, "Unprepared");
gst::debug!(CAT, imp = self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Stopping");
gst::debug!(CAT, imp = self, "Stopping");
self.task.stop().block_on()?;
gst::debug!(CAT, imp: self, "Stopped");
gst::debug!(CAT, imp = self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Starting");
gst::debug!(CAT, imp = self, "Starting");
self.task.start().block_on()?;
gst::debug!(CAT, imp: self, "Started");
gst::debug!(CAT, imp = self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Pausing");
gst::debug!(CAT, imp = self, "Pausing");
self.task.pause().block_on()?;
gst::debug!(CAT, imp: self, "Paused");
gst::debug!(CAT, imp = self, "Paused");
Ok(())
}
}
@ -599,7 +598,7 @@ impl ElementImpl for AppSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -90,7 +90,7 @@ impl PadSrcHandler for AudioTestSrcPadHandler {
type ElementImpl = AudioTestSrc;
fn src_query(self, pad: &gst::Pad, imp: &Self::ElementImpl, query: &mut gst::QueryRef) -> bool {
gst::debug!(CAT, obj: pad, "Received {query:?}");
gst::debug!(CAT, obj = pad, "Received {query:?}");
if let gst::QueryViewMut::Latency(q) = query.view_mut() {
let settings = imp.settings.lock().unwrap();
@ -187,17 +187,17 @@ impl AudioTestSrcTask {
}
let mut caps = pad.peer_query_caps(Some(&DEFAULT_CAPS));
gst::debug!(CAT, imp: imp, "Peer returned {caps:?}");
gst::debug!(CAT, imp = imp, "Peer returned {caps:?}");
if caps.is_empty() {
pad.mark_reconfigure();
let err = gst::error_msg!(gst::CoreError::Pad, ["No common Caps"]);
gst::error!(CAT, imp: imp, "{err}");
gst::error!(CAT, imp = imp, "{err}");
return Err(err);
}
if caps.is_any() {
gst::debug!(CAT, imp: imp, "Using our own Caps");
gst::debug!(CAT, imp = imp, "Using our own Caps");
caps = DEFAULT_CAPS.clone();
}
@ -205,7 +205,7 @@ impl AudioTestSrcTask {
let caps = caps.make_mut();
let s = caps.structure_mut(0).ok_or_else(|| {
let err = gst::error_msg!(gst::CoreError::Pad, ["Invalid peer Caps structure"]);
gst::error!(CAT, imp: imp, "{err}");
gst::error!(CAT, imp = imp, "{err}");
err
})?;
@ -227,7 +227,7 @@ impl AudioTestSrcTask {
}
caps.fixate();
gst::debug!(CAT, imp: imp, "fixated to {caps:?}");
gst::debug!(CAT, imp = imp, "fixated to {caps:?}");
imp.src_pad.push_event(gst::event::Caps::new(&caps)).await;
@ -241,7 +241,7 @@ impl TaskImpl for AudioTestSrcTask {
type Item = gst::Buffer;
fn prepare(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
gst::log!(CAT, obj: self.elem, "Preparing Task");
gst::log!(CAT, obj = self.elem, "Preparing Task");
let imp = self.elem.imp();
let settings = imp.settings.lock().unwrap();
@ -260,10 +260,10 @@ impl TaskImpl for AudioTestSrcTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.elem, "Starting Task");
gst::log!(CAT, obj = self.elem, "Starting Task");
if self.need_initial_events {
gst::debug!(CAT, obj: self.elem, "Pushing initial events");
gst::debug!(CAT, obj = self.elem, "Pushing initial events");
let stream_id =
format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
@ -311,14 +311,14 @@ impl TaskImpl for AudioTestSrcTask {
}
fn pause(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
gst::log!(CAT, obj: self.elem, "Pausing Task");
gst::log!(CAT, obj = self.elem, "Pausing Task");
self.buffer_pool.set_active(false).unwrap();
future::ok(()).boxed()
}
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
gst::log!(CAT, obj: self.elem, "Stopping Task");
gst::log!(CAT, obj = self.elem, "Stopping Task");
self.need_initial_events = true;
self.accumulator = 0.0;
@ -331,7 +331,7 @@ impl TaskImpl for AudioTestSrcTask {
let mut buffer = match self.buffer_pool.acquire_buffer(None) {
Ok(buffer) => buffer,
Err(err) => {
gst::error!(CAT, obj: self.elem, "Failed to acquire buffer {}", err);
gst::error!(CAT, obj = self.elem, "Failed to acquire buffer {}", err);
return future::err(err).boxed();
}
};
@ -399,9 +399,9 @@ impl TaskImpl for AudioTestSrcTask {
async move {
let imp = self.elem.imp();
gst::debug!(CAT, imp: imp, "Pushing {buffer:?}");
gst::debug!(CAT, imp = imp, "Pushing {buffer:?}");
imp.src_pad.push(buffer).await?;
gst::log!(CAT, imp: imp, "Successfully pushed buffer");
gst::log!(CAT, imp = imp, "Successfully pushed buffer");
self.buffer_count += 1;
@ -442,12 +442,12 @@ impl TaskImpl for AudioTestSrcTask {
async move {
match err {
gst::FlowError::Flushing => {
gst::debug!(CAT, obj: self.elem, "Flushing");
gst::debug!(CAT, obj = self.elem, "Flushing");
task::Trigger::FlushStart
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj: self.elem, "EOS");
gst::debug!(CAT, obj = self.elem, "EOS");
self.elem
.imp()
.src_pad
@ -457,7 +457,7 @@ impl TaskImpl for AudioTestSrcTask {
task::Trigger::Stop
}
err => {
gst::error!(CAT, obj: self.elem, "Got error {err}");
gst::error!(CAT, obj = self.elem, "Got error {err}");
gst::element_error!(
&self.elem,
gst::StreamError::Failed,
@ -482,7 +482,7 @@ pub struct AudioTestSrc {
impl AudioTestSrc {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Preparing");
gst::debug!(CAT, imp = self, "Preparing");
let settings = self.settings.lock().unwrap();
let context =
@ -498,37 +498,37 @@ impl AudioTestSrc {
.prepare(AudioTestSrcTask::new(self.obj().clone()), context)
.block_on()?;
gst::debug!(CAT, imp: self, "Prepared");
gst::debug!(CAT, imp = self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp: self, "Unpreparing");
gst::debug!(CAT, imp = self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
gst::debug!(CAT, imp: self, "Unprepared");
gst::debug!(CAT, imp = self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Stopping");
gst::debug!(CAT, imp = self, "Stopping");
self.task.stop().block_on()?;
gst::debug!(CAT, imp: self, "Stopped");
gst::debug!(CAT, imp = self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Starting");
gst::debug!(CAT, imp = self, "Starting");
self.task.start().block_on()?;
gst::debug!(CAT, imp: self, "Started");
gst::debug!(CAT, imp = self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Pausing");
gst::debug!(CAT, imp = self, "Pausing");
self.task.pause().block_on()?;
gst::debug!(CAT, imp: self, "Paused");
gst::debug!(CAT, imp = self, "Paused");
Ok(())
}
@ -695,7 +695,7 @@ impl ElementImpl for AudioTestSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {transition:?}");
gst::trace!(CAT, imp = self, "Changing state {transition:?}");
match transition {
gst::StateChange::NullToReady => {

View file

@ -26,7 +26,6 @@ use once_cell::sync::Lazy;
use std::collections::VecDeque;
use std::sync::Arc;
use std::sync::Mutex as StdMutex;
use std::u32;
static DATA_QUEUE_CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
@ -127,10 +126,14 @@ impl DataQueue {
pub fn start(&self) {
let mut inner = self.0.lock().unwrap();
if inner.state == DataQueueState::Started {
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Data queue already Started");
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Data queue already Started"
);
return;
}
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Starting data queue");
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Starting data queue");
inner.state = DataQueueState::Started;
inner.wake();
}
@ -138,10 +141,14 @@ impl DataQueue {
pub fn stop(&self) {
let mut inner = self.0.lock().unwrap();
if inner.state == DataQueueState::Stopped {
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Data queue already Stopped");
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Data queue already Stopped"
);
return;
}
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Stopping data queue");
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Stopping data queue");
inner.state = DataQueueState::Stopped;
inner.wake();
}
@ -149,7 +156,7 @@ impl DataQueue {
pub fn clear(&self) {
let mut inner = self.0.lock().unwrap();
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Clearing data queue");
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Clearing data queue");
let src_pad = inner.src_pad.clone();
for item in inner.queue.drain(..) {
@ -163,7 +170,7 @@ impl DataQueue {
}
}
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Data queue cleared");
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Data queue cleared");
}
pub fn push(&self, item: DataQueueItem) -> Result<(), DataQueueItem> {
@ -172,7 +179,7 @@ impl DataQueue {
if inner.state == DataQueueState::Stopped {
gst::debug!(
DATA_QUEUE_CAT,
obj: inner.element,
obj = inner.element,
"Rejecting item {:?} in state {:?}",
item,
inner.state
@ -180,7 +187,12 @@ impl DataQueue {
return Err(item);
}
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Pushing item {:?}", item);
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Pushing item {:?}",
item
);
let (count, bytes) = item.size();
let queue_ts = inner.queue.iter().filter_map(|i| i.timestamp()).next();
@ -188,14 +200,26 @@ impl DataQueue {
if let Some(max) = inner.max_size_buffers {
if max <= inner.cur_size_buffers {
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Queue is full (buffers): {} <= {}", max, inner.cur_size_buffers);
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Queue is full (buffers): {} <= {}",
max,
inner.cur_size_buffers
);
return Err(item);
}
}
if let Some(max) = inner.max_size_bytes {
if max <= inner.cur_size_bytes {
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Queue is full (bytes): {} <= {}", max, inner.cur_size_bytes);
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Queue is full (bytes): {} <= {}",
max,
inner.cur_size_bytes
);
return Err(item);
}
}
@ -209,7 +233,13 @@ impl DataQueue {
};
if max <= level {
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Queue is full (time): {} <= {}", max, level);
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Queue is full (time): {} <= {}",
max,
level
);
return Err(item);
}
}
@ -232,10 +262,15 @@ impl DataQueue {
match inner.state {
DataQueueState::Started => match inner.queue.pop_front() {
None => {
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Data queue is empty");
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Data queue is empty");
}
Some(item) => {
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Popped item {:?}", item);
gst::debug!(
DATA_QUEUE_CAT,
obj = inner.element,
"Popped item {:?}",
item
);
let (count, bytes) = item.size();
inner.cur_size_buffers -= count;
@ -245,7 +280,7 @@ impl DataQueue {
}
},
DataQueueState::Stopped => {
gst::debug!(DATA_QUEUE_CAT, obj: inner.element, "Data queue Stopped");
gst::debug!(DATA_QUEUE_CAT, obj = inner.element, "Data queue Stopped");
return None;
}
}

View file

@ -30,7 +30,6 @@ use once_cell::sync::Lazy;
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
use std::time::Duration;
use std::u32;
use crate::runtime::prelude::*;
use crate::runtime::{self, PadSink, PadSrc};
@ -140,7 +139,7 @@ impl InputSelectorPadSinkHandler {
}
if is_active {
gst::log!(CAT, obj: pad, "Forwarding {:?}", buffer);
gst::log!(CAT, obj = pad, "Forwarding {:?}", buffer);
if switched_pad && !buffer.flags().contains(gst::BufferFlags::DISCONT) {
let buffer = buffer.make_mut();
@ -173,7 +172,7 @@ impl PadSinkHandler for InputSelectorPadSinkHandler {
list: gst::BufferList,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::log!(CAT, obj: pad, "Handling buffer list {:?}", list);
gst::log!(CAT, obj = pad, "Handling buffer list {:?}", list);
// TODO: Ideally we would keep the list intact and forward it in one go
for buffer in list.iter_owned() {
self.handle_item(&pad, &elem, buffer).await?;
@ -230,14 +229,14 @@ impl PadSinkHandler for InputSelectorPadSinkHandler {
}
fn sink_query(self, pad: &gst::Pad, imp: &InputSelector, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj: pad, "Handling query {:?}", query);
gst::log!(CAT, obj = pad, "Handling query {:?}", query);
if query.is_serialized() {
// FIXME: How can we do this (drops ALLOCATION and DRAIN)?
gst::log!(CAT, obj: pad, "Dropping serialized query {:?}", query);
gst::log!(CAT, obj = pad, "Dropping serialized query {:?}", query);
false
} else {
gst::log!(CAT, obj: pad, "Forwarding query {:?}", query);
gst::log!(CAT, obj = pad, "Forwarding query {:?}", query);
imp.src_pad.gst_pad().peer_query(query)
}
}
@ -250,7 +249,7 @@ impl PadSrcHandler for InputSelectorPadSrcHandler {
type ElementImpl = InputSelector;
fn src_query(self, pad: &gst::Pad, imp: &InputSelector, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj: pad, "Handling {:?}", query);
gst::log!(CAT, obj = pad, "Handling {:?}", query);
use gst::QueryViewMut;
match query.view_mut() {
@ -340,9 +339,9 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
impl InputSelector {
fn unprepare(&self) {
let mut state = self.state.lock().unwrap();
gst::debug!(CAT, imp: self, "Unpreparing");
gst::debug!(CAT, imp = self, "Unpreparing");
*state = State::default();
gst::debug!(CAT, imp: self, "Unprepared");
gst::debug!(CAT, imp = self, "Unprepared");
}
}
@ -417,8 +416,8 @@ impl ObjectImpl for InputSelector {
let pads = self.pads.lock().unwrap();
let mut old_pad = None;
if let Some(ref pad) = pad {
if pads.sink_pads.get(pad).is_some() {
old_pad = state.active_sinkpad.clone();
if pads.sink_pads.contains_key(pad) {
old_pad.clone_from(&state.active_sinkpad);
state.active_sinkpad = Some(pad.clone());
state.switched_pad = true;
}
@ -516,7 +515,7 @@ impl ElementImpl for InputSelector {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
if let gst::StateChange::ReadyToNull = transition {
self.unprepare();

View file

@ -144,7 +144,7 @@ impl SinkHandler {
// For resetting if seqnum discontinuities
fn reset(&self, inner: &mut SinkHandlerInner, jb: &JitterBuffer) -> BTreeSet<GapPacket> {
gst::info!(CAT, imp: jb, "Resetting");
gst::info!(CAT, imp = jb, "Resetting");
let mut state = jb.state.lock().unwrap();
state.jbuf.flush();
@ -176,17 +176,17 @@ impl SinkHandler {
) -> Result<gst::FlowSuccess, gst::FlowError> {
let s = caps.structure(0).ok_or(gst::FlowError::Error)?;
gst::debug!(CAT, imp: jb, "Parsing {:?}", caps);
gst::debug!(CAT, imp = jb, "Parsing {:?}", caps);
let payload = s.get::<i32>("payload").map_err(|err| {
gst::debug!(CAT, imp: jb, "Caps 'payload': {}", err);
gst::debug!(CAT, imp = jb, "Caps 'payload': {}", err);
gst::FlowError::Error
})?;
if pt != 0 && payload as u8 != pt {
gst::debug!(
CAT,
imp: jb,
imp = jb,
"Caps 'payload' ({}) doesn't match payload type ({})",
payload,
pt
@ -196,12 +196,12 @@ impl SinkHandler {
inner.last_pt = Some(pt);
let clock_rate = s.get::<i32>("clock-rate").map_err(|err| {
gst::debug!(CAT, imp: jb, "Caps 'clock-rate': {}", err);
gst::debug!(CAT, imp = jb, "Caps 'clock-rate': {}", err);
gst::FlowError::Error
})?;
if clock_rate <= 0 {
gst::debug!(CAT, imp: jb, "Caps 'clock-rate' <= 0");
gst::debug!(CAT, imp = jb, "Caps 'clock-rate' <= 0");
return Err(gst::FlowError::Error);
}
state.clock_rate = Some(clock_rate as u32);
@ -258,7 +258,7 @@ impl SinkHandler {
gst::debug!(
CAT,
imp: jb,
imp = jb,
"Handling big gap, gap packets length: {}",
gap_packets_length
);
@ -266,20 +266,20 @@ impl SinkHandler {
inner.gap_packets.insert(GapPacket::new(buffer));
if gap_packets_length > 0 {
let mut prev_gap_seq = std::u32::MAX;
let mut prev_gap_seq = u32::MAX;
let mut all_consecutive = true;
for gap_packet in inner.gap_packets.iter() {
gst::log!(
CAT,
imp: jb,
imp = jb,
"Looking at gap packet with seq {}",
gap_packet.seq,
);
all_consecutive = gap_packet.pt == pt;
if prev_gap_seq == std::u32::MAX {
if prev_gap_seq == u32::MAX {
prev_gap_seq = gap_packet.seq as u32;
} else if gst_rtp::compare_seqnum(gap_packet.seq, prev_gap_seq as u16) != -1 {
all_consecutive = false;
@ -292,7 +292,7 @@ impl SinkHandler {
}
}
gst::debug!(CAT, imp: jb, "all consecutive: {}", all_consecutive);
gst::debug!(CAT, imp = jb, "all consecutive: {}", all_consecutive);
if all_consecutive && gap_packets_length > 3 {
reset = true;
@ -334,7 +334,7 @@ impl SinkHandler {
gst::log!(
CAT,
imp: jb,
imp = jb,
"Storing buffer, seq: {}, rtptime: {}, pt: {}",
seq,
rtptime,
@ -367,7 +367,7 @@ impl SinkHandler {
inner.last_pt = Some(pt);
state.clock_rate = None;
gst::debug!(CAT, obj: pad, "New payload type: {}", pt);
gst::debug!(CAT, obj = pad, "New payload type: {}", pt);
if let Some(caps) = pad.current_caps() {
/* Ignore errors at this point, as we want to emit request-pt-map */
@ -381,7 +381,7 @@ impl SinkHandler {
let caps = element
.emit_by_name::<Option<gst::Caps>>("request-pt-map", &[&(pt as u32)])
.ok_or_else(|| {
gst::error!(CAT, obj: pad, "Signal 'request-pt-map' returned None");
gst::error!(CAT, obj = pad, "Signal 'request-pt-map' returned None");
gst::FlowError::Error
})?;
let mut state = jb.state.lock().unwrap();
@ -404,7 +404,7 @@ impl SinkHandler {
if pts.is_none() {
gst::debug!(
CAT,
imp: jb,
imp = jb,
"cannot calculate a valid pts for #{}, discard",
seq
);
@ -437,7 +437,7 @@ impl SinkHandler {
if gap <= 0 {
state.stats.num_late += 1;
gst::debug!(CAT, imp: jb, "Dropping late {}", seq);
gst::debug!(CAT, imp = jb, "Dropping late {}", seq);
return Ok(gst::FlowSuccess::Ok);
}
}
@ -472,7 +472,7 @@ impl SinkHandler {
(Some(earliest_pts), Some(pts)) if pts < earliest_pts => true,
(Some(earliest_pts), Some(pts)) if pts == earliest_pts => state
.earliest_seqnum
.map_or(false, |earliest_seqnum| seq > earliest_seqnum),
.is_some_and(|earliest_seqnum| seq > earliest_seqnum),
_ => false,
};
@ -481,7 +481,7 @@ impl SinkHandler {
state.earliest_seqnum = Some(seq);
}
gst::log!(CAT, obj: pad, "Stored buffer");
gst::log!(CAT, obj = pad, "Stored buffer");
Ok(gst::FlowSuccess::Ok)
}
@ -527,11 +527,11 @@ impl SinkHandler {
if let Some((next_wakeup, _)) = next_wakeup {
if let Some((previous_next_wakeup, ref abort_handle)) = state.wait_handle {
if previous_next_wakeup.is_none()
|| next_wakeup.map_or(false, |next| previous_next_wakeup.unwrap() > next)
|| next_wakeup.is_some_and(|next| previous_next_wakeup.unwrap() > next)
{
gst::debug!(
CAT,
obj: pad,
obj = pad,
"Rescheduling for new item {} < {}",
next_wakeup.display(),
previous_next_wakeup.display(),
@ -555,7 +555,7 @@ impl PadSinkHandler for SinkHandler {
buffer: gst::Buffer,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::debug!(CAT, obj: pad, "Handling {:?}", buffer);
gst::debug!(CAT, obj = pad, "Handling {:?}", buffer);
self.enqueue_item(pad, elem.imp(), Some(buffer))
}
.boxed()
@ -564,11 +564,11 @@ impl PadSinkHandler for SinkHandler {
fn sink_event(self, pad: &gst::Pad, jb: &JitterBuffer, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj: pad, "Handling {:?}", event);
gst::log!(CAT, obj = pad, "Handling {:?}", event);
if let EventView::FlushStart(..) = event.view() {
if let Err(err) = jb.task.flush_start().await_maybe_on_context() {
gst::error!(CAT, obj: pad, "FlushStart failed {:?}", err);
gst::error!(CAT, obj = pad, "FlushStart failed {:?}", err);
gst::element_imp_error!(
jb,
gst::StreamError::Failed,
@ -579,7 +579,7 @@ impl PadSinkHandler for SinkHandler {
}
}
gst::log!(CAT, obj: pad, "Forwarding {:?}", event);
gst::log!(CAT, obj = pad, "Forwarding {:?}", event);
jb.src_pad.gst_pad().push_event(event)
}
@ -590,7 +590,7 @@ impl PadSinkHandler for SinkHandler {
event: gst::Event,
) -> BoxFuture<'static, bool> {
async move {
gst::log!(CAT, obj: pad, "Handling {:?}", event);
gst::log!(CAT, obj = pad, "Handling {:?}", event);
let jb = elem.imp();
@ -603,7 +603,7 @@ impl PadSinkHandler for SinkHandler {
}
EventView::FlushStop(..) => {
if let Err(err) = jb.task.flush_stop().await_maybe_on_context() {
gst::error!(CAT, obj: pad, "FlushStop failed {:?}", err);
gst::error!(CAT, obj = pad, "FlushStop failed {:?}", err);
gst::element_error!(
elem,
gst::StreamError::Failed,
@ -626,7 +626,7 @@ impl PadSinkHandler for SinkHandler {
if forward {
// FIXME: These events should really be queued up and stay in order
gst::log!(CAT, obj: pad, "Forwarding serialized {:?}", event);
gst::log!(CAT, obj = pad, "Forwarding serialized {:?}", event);
jb.src_pad.push_event(event).await
} else {
true
@ -665,7 +665,7 @@ impl SrcHandler {
gst::debug!(
CAT,
obj: element,
obj = element,
"Generating lost events seq: {}, last popped seq: {:?}",
seqnum,
last_popped_seqnum,
@ -801,11 +801,22 @@ impl SrcHandler {
};
for event in lost_events {
gst::debug!(CAT, obj: jb.src_pad.gst_pad(), "Pushing lost event {:?}", event);
gst::debug!(
CAT,
obj = jb.src_pad.gst_pad(),
"Pushing lost event {:?}",
event
);
let _ = jb.src_pad.push_event(event).await;
}
gst::debug!(CAT, obj: jb.src_pad.gst_pad(), "Pushing {:?} with seq {:?}", buffer, seq);
gst::debug!(
CAT,
obj = jb.src_pad.gst_pad(),
"Pushing {:?} with seq {:?}",
buffer,
seq
);
jb.src_pad.push(buffer).await
}
@ -824,7 +835,7 @@ impl SrcHandler {
gst::debug!(
CAT,
obj: element,
obj = element,
"Now is {}, EOS {}, earliest pts is {}, packet_spacing {} and latency {}",
now.display(),
state.eos,
@ -834,7 +845,7 @@ impl SrcHandler {
);
if state.eos {
gst::debug!(CAT, obj: element, "EOS, not waiting");
gst::debug!(CAT, obj = element, "EOS, not waiting");
return (now, Some((now, Duration::ZERO)));
}
@ -854,7 +865,7 @@ impl SrcHandler {
gst::debug!(
CAT,
obj: element,
obj = element,
"Next wakeup at {} with delay {}",
next_wakeup.display(),
delay
@ -870,12 +881,12 @@ impl PadSrcHandler for SrcHandler {
fn src_event(self, pad: &gst::Pad, jb: &JitterBuffer, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj: pad, "Handling {:?}", event);
gst::log!(CAT, obj = pad, "Handling {:?}", event);
match event.view() {
EventView::FlushStart(..) => {
if let Err(err) = jb.task.flush_start().await_maybe_on_context() {
gst::error!(CAT, obj: pad, "FlushStart failed {:?}", err);
gst::error!(CAT, obj = pad, "FlushStart failed {:?}", err);
gst::element_imp_error!(
jb,
gst::StreamError::Failed,
@ -887,7 +898,7 @@ impl PadSrcHandler for SrcHandler {
}
EventView::FlushStop(..) => {
if let Err(err) = jb.task.flush_stop().await_maybe_on_context() {
gst::error!(CAT, obj: pad, "FlushStop failed {:?}", err);
gst::error!(CAT, obj = pad, "FlushStop failed {:?}", err);
gst::element_imp_error!(
jb,
gst::StreamError::Failed,
@ -900,14 +911,14 @@ impl PadSrcHandler for SrcHandler {
_ => (),
}
gst::log!(CAT, obj: pad, "Forwarding {:?}", event);
gst::log!(CAT, obj = pad, "Forwarding {:?}", event);
jb.sink_pad.gst_pad().push_event(event)
}
fn src_query(self, pad: &gst::Pad, jb: &JitterBuffer, query: &mut gst::QueryRef) -> bool {
use gst::QueryViewMut;
gst::log!(CAT, obj: pad, "Forwarding {:?}", query);
gst::log!(CAT, obj = pad, "Forwarding {:?}", query);
match query.view_mut() {
QueryViewMut::Latency(q) => {
@ -1030,7 +1041,7 @@ impl TaskImpl for JitterBufferTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Starting task");
gst::log!(CAT, obj = self.element, "Starting task");
self.src_pad_handler.clear();
self.sink_pad_handler.clear();
@ -1043,7 +1054,7 @@ impl TaskImpl for JitterBufferTask {
state.jbuf.set_delay(latency);
*jb.state.lock().unwrap() = state;
gst::log!(CAT, obj: self.element, "Task started");
gst::log!(CAT, obj = self.element, "Task started");
Ok(())
}
.boxed()
@ -1103,9 +1114,9 @@ impl TaskImpl for JitterBufferTask {
// Got aborted, reschedule if needed
if let Some(delay_fut) = delay_fut {
gst::debug!(CAT, obj: self.element, "Waiting");
gst::debug!(CAT, obj = self.element, "Waiting");
if let Err(Aborted) = delay_fut.await {
gst::debug!(CAT, obj: self.element, "Waiting aborted");
gst::debug!(CAT, obj = self.element, "Waiting aborted");
return Ok(());
}
}
@ -1123,7 +1134,7 @@ impl TaskImpl for JitterBufferTask {
gst::debug!(
CAT,
obj: self.element,
obj = self.element,
"Woke up at {}, earliest_pts {}",
now.display(),
state.earliest_pts.display()
@ -1166,7 +1177,7 @@ impl TaskImpl for JitterBufferTask {
context_wait,
);
if let Some((Some(next_wakeup), _)) = next_wakeup {
if now.map_or(false, |now| next_wakeup > now) {
if now.is_some_and(|now| next_wakeup > now) {
// Reschedule and wait a bit longer in the next iteration
return Ok(());
}
@ -1179,13 +1190,13 @@ impl TaskImpl for JitterBufferTask {
if let Err(err) = res {
match err {
gst::FlowError::Eos => {
gst::debug!(CAT, obj: self.element, "Pushing EOS event");
gst::debug!(CAT, obj = self.element, "Pushing EOS event");
let _ = jb.src_pad.push_event(gst::event::Eos::new()).await;
}
gst::FlowError::Flushing => {
gst::debug!(CAT, obj: self.element, "Flushing")
gst::debug!(CAT, obj = self.element, "Flushing")
}
err => gst::error!(CAT, obj: self.element, "Error {}", err),
err => gst::error!(CAT, obj = self.element, "Error {}", err),
}
return Err(err);
@ -1201,7 +1212,7 @@ impl TaskImpl for JitterBufferTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Stopping task");
gst::log!(CAT, obj = self.element, "Stopping task");
let jb = self.element.imp();
let mut jb_state = jb.state.lock().unwrap();
@ -1215,7 +1226,7 @@ impl TaskImpl for JitterBufferTask {
*jb_state = State::default();
gst::log!(CAT, obj: self.element, "Task stopped");
gst::log!(CAT, obj = self.element, "Task stopped");
Ok(())
}
.boxed()
@ -1242,7 +1253,7 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
impl JitterBuffer {
fn clear_pt_map(&self) {
gst::debug!(CAT, imp: self, "Clearing PT map");
gst::debug!(CAT, imp = self, "Clearing PT map");
let mut state = self.state.lock().unwrap();
state.clock_rate = None;
@ -1250,7 +1261,7 @@ impl JitterBuffer {
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Preparing");
gst::debug!(CAT, imp = self, "Preparing");
let context = {
let settings = self.settings.lock().unwrap();
@ -1264,28 +1275,28 @@ impl JitterBuffer {
)
.block_on()?;
gst::debug!(CAT, imp: self, "Prepared");
gst::debug!(CAT, imp = self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp: self, "Unpreparing");
gst::debug!(CAT, imp = self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
gst::debug!(CAT, imp: self, "Unprepared");
gst::debug!(CAT, imp = self, "Unprepared");
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Starting");
gst::debug!(CAT, imp = self, "Starting");
self.task.start().block_on()?;
gst::debug!(CAT, imp: self, "Started");
gst::debug!(CAT, imp = self, "Started");
Ok(())
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Stopping");
gst::debug!(CAT, imp = self, "Stopping");
self.task.stop().block_on()?;
gst::debug!(CAT, imp: self, "Stopped");
gst::debug!(CAT, imp = self, "Stopped");
Ok(())
}
}
@ -1528,7 +1539,7 @@ impl ElementImpl for JitterBuffer {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -91,7 +91,7 @@ impl RTPJitterBufferItem {
r#type: 0,
dts: dts.into().into_glib(),
pts: pts.into().into_glib(),
seqnum: seqnum.map(|s| s as u32).unwrap_or(std::u32::MAX),
seqnum: seqnum.map(|s| s as u32).unwrap_or(u32::MAX),
count: 1,
rtptime,
},
@ -138,7 +138,7 @@ impl RTPJitterBufferItem {
pub fn seqnum(&self) -> Option<u16> {
unsafe {
let item = self.0.as_ref().expect("Invalid wrapper");
if item.as_ref().seqnum == std::u32::MAX {
if item.as_ref().seqnum == u32::MAX {
None
} else {
Some(item.as_ref().seqnum as u16)
@ -306,7 +306,7 @@ impl RTPJitterBuffer {
let pts = from_glib(pts.assume_init());
let seqnum = seqnum.assume_init();
let seqnum = if seqnum == std::u32::MAX {
let seqnum = if seqnum == u32::MAX {
None
} else {
Some(seqnum as u16)
@ -339,7 +339,7 @@ impl RTPJitterBuffer {
(None, None)
} else {
let seqnum = (*item).seqnum;
let seqnum = if seqnum == std::u32::MAX {
let seqnum = if seqnum == u32::MAX {
None
} else {
Some(seqnum as u16)

View file

@ -31,7 +31,6 @@ use std::collections::{HashMap, VecDeque};
use std::sync::{Arc, Weak};
use std::sync::{Mutex, MutexGuard};
use std::time::Duration;
use std::{u32, u64};
use crate::runtime::prelude::*;
use crate::runtime::{Context, PadSink, PadSinkWeak, PadSrc, PadSrcWeak, Task};
@ -218,7 +217,7 @@ impl PadSinkHandler for ProxySinkPadHandler {
buffer: gst::Buffer,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::log!(SINK_CAT, obj: pad, "Handling {:?}", buffer);
gst::log!(SINK_CAT, obj = pad, "Handling {:?}", buffer);
let imp = elem.imp();
imp.enqueue_item(DataQueueItem::Buffer(buffer)).await
}
@ -232,7 +231,7 @@ impl PadSinkHandler for ProxySinkPadHandler {
list: gst::BufferList,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::log!(SINK_CAT, obj: pad, "Handling {:?}", list);
gst::log!(SINK_CAT, obj = pad, "Handling {:?}", list);
let imp = elem.imp();
imp.enqueue_item(DataQueueItem::BufferList(list)).await
}
@ -240,7 +239,7 @@ impl PadSinkHandler for ProxySinkPadHandler {
}
fn sink_event(self, pad: &gst::Pad, imp: &ProxySink, event: gst::Event) -> bool {
gst::debug!(SINK_CAT, obj: pad, "Handling non-serialized {:?}", event);
gst::debug!(SINK_CAT, obj = pad, "Handling non-serialized {:?}", event);
let src_pad = {
let proxy_ctx = imp.proxy_ctx.lock().unwrap();
@ -258,12 +257,12 @@ impl PadSinkHandler for ProxySinkPadHandler {
}
if let Some(src_pad) = src_pad {
gst::log!(SINK_CAT, obj: pad, "Forwarding non-serialized {:?}", event);
gst::log!(SINK_CAT, obj = pad, "Forwarding non-serialized {:?}", event);
src_pad.push_event(event)
} else {
gst::error!(
SINK_CAT,
obj: pad,
obj = pad,
"No src pad to forward non-serialized {:?} to",
event
);
@ -278,7 +277,7 @@ impl PadSinkHandler for ProxySinkPadHandler {
event: gst::Event,
) -> BoxFuture<'static, bool> {
async move {
gst::log!(SINK_CAT, obj: pad, "Handling serialized {:?}", event);
gst::log!(SINK_CAT, obj = pad, "Handling serialized {:?}", event);
let imp = elem.imp();
@ -291,7 +290,7 @@ impl PadSinkHandler for ProxySinkPadHandler {
_ => (),
}
gst::log!(SINK_CAT, obj: pad, "Queuing serialized {:?}", event);
gst::log!(SINK_CAT, obj = pad, "Queuing serialized {:?}", event);
imp.enqueue_item(DataQueueItem::Event(event)).await.is_ok()
}
.boxed()
@ -320,7 +319,7 @@ impl ProxySink {
let proxy_ctx = self.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
gst::log!(SINK_CAT, imp: self, "Trying to empty pending queue");
gst::log!(SINK_CAT, imp = self, "Trying to empty pending queue");
let ProxyContextInner {
pending_queue: ref mut pq,
@ -345,7 +344,7 @@ impl ProxySink {
receiver
} else {
gst::log!(SINK_CAT, imp: self, "Pending queue is empty now");
gst::log!(SINK_CAT, imp = self, "Pending queue is empty now");
*pq = None;
return;
}
@ -356,13 +355,13 @@ impl ProxySink {
receiver
}
} else {
gst::log!(SINK_CAT, imp: self, "Flushing, dropping pending queue");
gst::log!(SINK_CAT, imp = self, "Flushing, dropping pending queue");
*pq = None;
return;
}
};
gst::log!(SINK_CAT, imp: self, "Waiting for more queue space");
gst::log!(SINK_CAT, imp = self, "Waiting for more queue space");
let _ = more_queue_space_receiver.await;
}
}
@ -432,18 +431,18 @@ impl ProxySink {
gst::log!(
SINK_CAT,
imp: self,
imp = self,
"Proxy is full - Pushing first item on pending queue"
);
if schedule_now {
gst::log!(SINK_CAT, imp: self, "Scheduling pending queue now");
gst::log!(SINK_CAT, imp = self, "Scheduling pending queue now");
pending_queue.scheduled = true;
let wait_fut = self.schedule_pending_queue();
Some(wait_fut)
} else {
gst::log!(SINK_CAT, imp: self, "Scheduling pending queue later");
gst::log!(SINK_CAT, imp = self, "Scheduling pending queue later");
None
}
@ -463,7 +462,7 @@ impl ProxySink {
};
if let Some(wait_fut) = wait_fut {
gst::log!(SINK_CAT, imp: self, "Blocking until queue has space again");
gst::log!(SINK_CAT, imp = self, "Blocking until queue has space again");
wait_fut.await;
}
@ -473,7 +472,7 @@ impl ProxySink {
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SINK_CAT, imp: self, "Preparing");
gst::debug!(SINK_CAT, imp = self, "Preparing");
let proxy_context = self.settings.lock().unwrap().proxy_context.to_string();
@ -492,22 +491,22 @@ impl ProxySink {
*self.proxy_ctx.lock().unwrap() = Some(proxy_ctx);
gst::debug!(SINK_CAT, imp: self, "Prepared");
gst::debug!(SINK_CAT, imp = self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(SINK_CAT, imp: self, "Unpreparing");
gst::debug!(SINK_CAT, imp = self, "Unpreparing");
*self.proxy_ctx.lock().unwrap() = None;
gst::debug!(SINK_CAT, imp: self, "Unprepared");
gst::debug!(SINK_CAT, imp = self, "Unprepared");
}
fn start(&self) {
let proxy_ctx = self.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
gst::debug!(SINK_CAT, imp: self, "Starting");
gst::debug!(SINK_CAT, imp = self, "Starting");
{
let settings = self.settings.lock().unwrap();
@ -517,19 +516,19 @@ impl ProxySink {
shared_ctx.last_res = Ok(gst::FlowSuccess::Ok);
gst::debug!(SINK_CAT, imp: self, "Started");
gst::debug!(SINK_CAT, imp = self, "Started");
}
fn stop(&self) {
let proxy_ctx = self.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
gst::debug!(SINK_CAT, imp: self, "Stopping");
gst::debug!(SINK_CAT, imp = self, "Stopping");
let _ = shared_ctx.pending_queue.take();
shared_ctx.last_res = Err(gst::FlowError::Flushing);
gst::debug!(SINK_CAT, imp: self, "Stopped");
gst::debug!(SINK_CAT, imp = self, "Stopped");
}
}
@ -632,7 +631,7 @@ impl ElementImpl for ProxySink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(SINK_CAT, imp: self, "Changing state {:?}", transition);
gst::trace!(SINK_CAT, imp = self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {
@ -667,7 +666,7 @@ impl PadSrcHandler for ProxySrcPadHandler {
type ElementImpl = ProxySrc;
fn src_event(self, pad: &gst::Pad, imp: &ProxySrc, event: gst::Event) -> bool {
gst::log!(SRC_CAT, obj: pad, "Handling {:?}", event);
gst::log!(SRC_CAT, obj = pad, "Handling {:?}", event);
let sink_pad = {
let proxy_ctx = imp.proxy_ctx.lock().unwrap();
@ -684,7 +683,7 @@ impl PadSrcHandler for ProxySrcPadHandler {
match event.view() {
EventView::FlushStart(..) => {
if let Err(err) = imp.task.flush_start().await_maybe_on_context() {
gst::error!(SRC_CAT, obj: pad, "FlushStart failed {:?}", err);
gst::error!(SRC_CAT, obj = pad, "FlushStart failed {:?}", err);
gst::element_imp_error!(
imp,
gst::StreamError::Failed,
@ -696,7 +695,7 @@ impl PadSrcHandler for ProxySrcPadHandler {
}
EventView::FlushStop(..) => {
if let Err(err) = imp.task.flush_stop().await_maybe_on_context() {
gst::error!(SRC_CAT, obj: pad, "FlushStop failed {:?}", err);
gst::error!(SRC_CAT, obj = pad, "FlushStop failed {:?}", err);
gst::element_imp_error!(
imp,
gst::StreamError::Failed,
@ -710,16 +709,16 @@ impl PadSrcHandler for ProxySrcPadHandler {
}
if let Some(sink_pad) = sink_pad {
gst::log!(SRC_CAT, obj: pad, "Forwarding {:?}", event);
gst::log!(SRC_CAT, obj = pad, "Forwarding {:?}", event);
sink_pad.push_event(event)
} else {
gst::error!(SRC_CAT, obj: pad, "No sink pad to forward {:?} to", event);
gst::error!(SRC_CAT, obj = pad, "No sink pad to forward {:?} to", event);
false
}
}
fn src_query(self, pad: &gst::Pad, _proxysrc: &ProxySrc, query: &mut gst::QueryRef) -> bool {
gst::log!(SRC_CAT, obj: pad, "Handling {:?}", query);
gst::log!(SRC_CAT, obj = pad, "Handling {:?}", query);
use gst::QueryViewMut;
let ret = match query.view_mut() {
@ -751,9 +750,9 @@ impl PadSrcHandler for ProxySrcPadHandler {
};
if ret {
gst::log!(SRC_CAT, obj: pad, "Handled {:?}", query);
gst::log!(SRC_CAT, obj = pad, "Handled {:?}", query);
} else {
gst::log!(SRC_CAT, obj: pad, "Didn't handle {:?}", query);
gst::log!(SRC_CAT, obj = pad, "Didn't handle {:?}", query);
}
ret
@ -784,15 +783,15 @@ impl ProxySrcTask {
match item {
DataQueueItem::Buffer(buffer) => {
gst::log!(SRC_CAT, obj: self.element, "Forwarding {:?}", buffer);
gst::log!(SRC_CAT, obj = self.element, "Forwarding {:?}", buffer);
proxysrc.src_pad.push(buffer).await.map(drop)
}
DataQueueItem::BufferList(list) => {
gst::log!(SRC_CAT, obj: self.element, "Forwarding {:?}", list);
gst::log!(SRC_CAT, obj = self.element, "Forwarding {:?}", list);
proxysrc.src_pad.push_list(list).await.map(drop)
}
DataQueueItem::Event(event) => {
gst::log!(SRC_CAT, obj: self.element, "Forwarding {:?}", event);
gst::log!(SRC_CAT, obj = self.element, "Forwarding {:?}", event);
proxysrc.src_pad.push_event(event).await;
Ok(())
}
@ -805,7 +804,7 @@ impl TaskImpl for ProxySrcTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(SRC_CAT, obj: self.element, "Starting task");
gst::log!(SRC_CAT, obj = self.element, "Starting task");
let proxysrc = self.element.imp();
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
@ -819,7 +818,7 @@ impl TaskImpl for ProxySrcTask {
self.dataqueue.start();
gst::log!(SRC_CAT, obj: self.element, "Task started");
gst::log!(SRC_CAT, obj = self.element, "Task started");
Ok(())
}
.boxed()
@ -841,25 +840,25 @@ impl TaskImpl for ProxySrcTask {
let proxysrc = self.element.imp();
match res {
Ok(()) => {
gst::log!(SRC_CAT, obj: self.element, "Successfully pushed item");
gst::log!(SRC_CAT, obj = self.element, "Successfully pushed item");
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
shared_ctx.last_res = Ok(gst::FlowSuccess::Ok);
}
Err(gst::FlowError::Flushing) => {
gst::debug!(SRC_CAT, obj: self.element, "Flushing");
gst::debug!(SRC_CAT, obj = self.element, "Flushing");
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
shared_ctx.last_res = Err(gst::FlowError::Flushing);
}
Err(gst::FlowError::Eos) => {
gst::debug!(SRC_CAT, obj: self.element, "EOS");
gst::debug!(SRC_CAT, obj = self.element, "EOS");
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
let mut shared_ctx = proxy_ctx.as_ref().unwrap().lock_shared();
shared_ctx.last_res = Err(gst::FlowError::Eos);
}
Err(err) => {
gst::error!(SRC_CAT, obj: self.element, "Got error {}", err);
gst::error!(SRC_CAT, obj = self.element, "Got error {}", err);
gst::element_error!(
&self.element,
gst::StreamError::Failed,
@ -879,7 +878,7 @@ impl TaskImpl for ProxySrcTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(SRC_CAT, obj: self.element, "Stopping task");
gst::log!(SRC_CAT, obj = self.element, "Stopping task");
let proxysrc = self.element.imp();
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
@ -894,7 +893,7 @@ impl TaskImpl for ProxySrcTask {
pending_queue.notify_more_queue_space();
}
gst::log!(SRC_CAT, obj: self.element, "Task stopped");
gst::log!(SRC_CAT, obj = self.element, "Task stopped");
Ok(())
}
.boxed()
@ -902,7 +901,7 @@ impl TaskImpl for ProxySrcTask {
fn flush_start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(SRC_CAT, obj: self.element, "Starting task flush");
gst::log!(SRC_CAT, obj = self.element, "Starting task flush");
let proxysrc = self.element.imp();
let proxy_ctx = proxysrc.proxy_ctx.lock().unwrap();
@ -912,7 +911,7 @@ impl TaskImpl for ProxySrcTask {
shared_ctx.last_res = Err(gst::FlowError::Flushing);
gst::log!(SRC_CAT, obj: self.element, "Task flush started");
gst::log!(SRC_CAT, obj = self.element, "Task flush started");
Ok(())
}
.boxed()
@ -938,7 +937,7 @@ static SRC_CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
impl ProxySrc {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SRC_CAT, imp: self, "Preparing");
gst::debug!(SRC_CAT, imp = self, "Preparing");
let settings = self.settings.lock().unwrap().clone();
@ -992,13 +991,13 @@ impl ProxySrc {
.prepare(ProxySrcTask::new(self.obj().clone(), dataqueue), ts_ctx)
.block_on()?;
gst::debug!(SRC_CAT, imp: self, "Prepared");
gst::debug!(SRC_CAT, imp = self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(SRC_CAT, imp: self, "Unpreparing");
gst::debug!(SRC_CAT, imp = self, "Unpreparing");
{
let settings = self.settings.lock().unwrap();
@ -1011,27 +1010,27 @@ impl ProxySrc {
*self.dataqueue.lock().unwrap() = None;
*self.proxy_ctx.lock().unwrap() = None;
gst::debug!(SRC_CAT, imp: self, "Unprepared");
gst::debug!(SRC_CAT, imp = self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SRC_CAT, imp: self, "Stopping");
gst::debug!(SRC_CAT, imp = self, "Stopping");
self.task.stop().await_maybe_on_context()?;
gst::debug!(SRC_CAT, imp: self, "Stopped");
gst::debug!(SRC_CAT, imp = self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SRC_CAT, imp: self, "Starting");
gst::debug!(SRC_CAT, imp = self, "Starting");
self.task.start().await_maybe_on_context()?;
gst::debug!(SRC_CAT, imp: self, "Started");
gst::debug!(SRC_CAT, imp = self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SRC_CAT, imp: self, "Pausing");
gst::debug!(SRC_CAT, imp = self, "Pausing");
self.task.pause().block_on()?;
gst::debug!(SRC_CAT, imp: self, "Paused");
gst::debug!(SRC_CAT, imp = self, "Paused");
Ok(())
}
}
@ -1191,7 +1190,7 @@ impl ElementImpl for ProxySrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(SRC_CAT, imp: self, "Changing state {:?}", transition);
gst::trace!(SRC_CAT, imp = self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -30,7 +30,6 @@ use once_cell::sync::Lazy;
use std::collections::VecDeque;
use std::sync::Mutex;
use std::time::Duration;
use std::{u32, u64};
use crate::runtime::prelude::*;
use crate::runtime::{Context, PadSink, PadSrc, Task};
@ -90,7 +89,7 @@ impl PadSinkHandler for QueuePadSinkHandler {
buffer: gst::Buffer,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::log!(CAT, obj: pad, "Handling {:?}", buffer);
gst::log!(CAT, obj = pad, "Handling {:?}", buffer);
let imp = elem.imp();
imp.enqueue_item(DataQueueItem::Buffer(buffer)).await
}
@ -104,7 +103,7 @@ impl PadSinkHandler for QueuePadSinkHandler {
list: gst::BufferList,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
async move {
gst::log!(CAT, obj: pad, "Handling {:?}", list);
gst::log!(CAT, obj = pad, "Handling {:?}", list);
let imp = elem.imp();
imp.enqueue_item(DataQueueItem::BufferList(list)).await
}
@ -112,11 +111,11 @@ impl PadSinkHandler for QueuePadSinkHandler {
}
fn sink_event(self, pad: &gst::Pad, imp: &Queue, event: gst::Event) -> bool {
gst::debug!(CAT, obj: pad, "Handling non-serialized {:?}", event);
gst::debug!(CAT, obj = pad, "Handling non-serialized {:?}", event);
if let gst::EventView::FlushStart(..) = event.view() {
if let Err(err) = imp.task.flush_start().await_maybe_on_context() {
gst::error!(CAT, obj: pad, "FlushStart failed {:?}", err);
gst::error!(CAT, obj = pad, "FlushStart failed {:?}", err);
gst::element_imp_error!(
imp,
gst::StreamError::Failed,
@ -127,7 +126,7 @@ impl PadSinkHandler for QueuePadSinkHandler {
}
}
gst::log!(CAT, obj: pad, "Forwarding non-serialized {:?}", event);
gst::log!(CAT, obj = pad, "Forwarding non-serialized {:?}", event);
imp.src_pad.gst_pad().push_event(event)
}
@ -138,13 +137,13 @@ impl PadSinkHandler for QueuePadSinkHandler {
event: gst::Event,
) -> BoxFuture<'static, bool> {
async move {
gst::log!(CAT, obj: pad, "Handling serialized {:?}", event);
gst::log!(CAT, obj = pad, "Handling serialized {:?}", event);
let imp = elem.imp();
if let gst::EventView::FlushStop(..) = event.view() {
if let Err(err) = imp.task.flush_stop().await_maybe_on_context() {
gst::error!(CAT, obj: pad, "FlushStop failed {:?}", err);
gst::error!(CAT, obj = pad, "FlushStop failed {:?}", err);
gst::element_imp_error!(
imp,
gst::StreamError::Failed,
@ -155,21 +154,21 @@ impl PadSinkHandler for QueuePadSinkHandler {
}
}
gst::log!(CAT, obj: pad, "Queuing serialized {:?}", event);
gst::log!(CAT, obj = pad, "Queuing serialized {:?}", event);
imp.enqueue_item(DataQueueItem::Event(event)).await.is_ok()
}
.boxed()
}
fn sink_query(self, pad: &gst::Pad, imp: &Queue, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj: pad, "Handling {:?}", query);
gst::log!(CAT, obj = pad, "Handling {:?}", query);
if query.is_serialized() {
// FIXME: How can we do this?
gst::log!(CAT, obj: pad, "Dropping serialized {:?}", query);
gst::log!(CAT, obj = pad, "Dropping serialized {:?}", query);
false
} else {
gst::log!(CAT, obj: pad, "Forwarding {:?}", query);
gst::log!(CAT, obj = pad, "Forwarding {:?}", query);
imp.src_pad.gst_pad().peer_query(query)
}
}
@ -182,18 +181,18 @@ impl PadSrcHandler for QueuePadSrcHandler {
type ElementImpl = Queue;
fn src_event(self, pad: &gst::Pad, imp: &Queue, event: gst::Event) -> bool {
gst::log!(CAT, obj: pad, "Handling {:?}", event);
gst::log!(CAT, obj = pad, "Handling {:?}", event);
use gst::EventView;
match event.view() {
EventView::FlushStart(..) => {
if let Err(err) = imp.task.flush_start().await_maybe_on_context() {
gst::error!(CAT, obj: pad, "FlushStart failed {:?}", err);
gst::error!(CAT, obj = pad, "FlushStart failed {:?}", err);
}
}
EventView::FlushStop(..) => {
if let Err(err) = imp.task.flush_stop().await_maybe_on_context() {
gst::error!(CAT, obj: pad, "FlushStop failed {:?}", err);
gst::error!(CAT, obj = pad, "FlushStop failed {:?}", err);
gst::element_imp_error!(
imp,
gst::StreamError::Failed,
@ -206,12 +205,12 @@ impl PadSrcHandler for QueuePadSrcHandler {
_ => (),
}
gst::log!(CAT, obj: pad, "Forwarding {:?}", event);
gst::log!(CAT, obj = pad, "Forwarding {:?}", event);
imp.sink_pad.gst_pad().push_event(event)
}
fn src_query(self, pad: &gst::Pad, imp: &Queue, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj: pad, "Handling {:?}", query);
gst::log!(CAT, obj = pad, "Handling {:?}", query);
if let gst::QueryViewMut::Scheduling(q) = query.view_mut() {
let mut new_query = gst::query::Scheduling::new();
@ -220,7 +219,7 @@ impl PadSrcHandler for QueuePadSrcHandler {
return res;
}
gst::log!(CAT, obj: pad, "Upstream returned {:?}", new_query);
gst::log!(CAT, obj = pad, "Upstream returned {:?}", new_query);
let (flags, min, max, align) = new_query.result();
q.set(flags, min, max, align);
@ -232,11 +231,11 @@ impl PadSrcHandler for QueuePadSrcHandler {
.filter(|m| m != &gst::PadMode::Pull)
.collect::<Vec<_>>(),
);
gst::log!(CAT, obj: pad, "Returning {:?}", q.query_mut());
gst::log!(CAT, obj = pad, "Returning {:?}", q.query_mut());
return true;
}
gst::log!(CAT, obj: pad, "Forwarding {:?}", query);
gst::log!(CAT, obj = pad, "Forwarding {:?}", query);
imp.sink_pad.gst_pad().peer_query(query)
}
}
@ -261,15 +260,15 @@ impl QueueTask {
match item {
DataQueueItem::Buffer(buffer) => {
gst::log!(CAT, obj: self.element, "Forwarding {:?}", buffer);
gst::log!(CAT, obj = self.element, "Forwarding {:?}", buffer);
queue.src_pad.push(buffer).await.map(drop)
}
DataQueueItem::BufferList(list) => {
gst::log!(CAT, obj: self.element, "Forwarding {:?}", list);
gst::log!(CAT, obj = self.element, "Forwarding {:?}", list);
queue.src_pad.push_list(list).await.map(drop)
}
DataQueueItem::Event(event) => {
gst::log!(CAT, obj: self.element, "Forwarding {:?}", event);
gst::log!(CAT, obj = self.element, "Forwarding {:?}", event);
queue.src_pad.push_event(event).await;
Ok(())
}
@ -282,7 +281,7 @@ impl TaskImpl for QueueTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Starting task");
gst::log!(CAT, obj = self.element, "Starting task");
let queue = self.element.imp();
let mut last_res = queue.last_res.lock().unwrap();
@ -291,7 +290,7 @@ impl TaskImpl for QueueTask {
*last_res = Ok(gst::FlowSuccess::Ok);
gst::log!(CAT, obj: self.element, "Task started");
gst::log!(CAT, obj = self.element, "Task started");
Ok(())
}
.boxed()
@ -313,20 +312,20 @@ impl TaskImpl for QueueTask {
let queue = self.element.imp();
match res {
Ok(()) => {
gst::log!(CAT, obj: self.element, "Successfully pushed item");
gst::log!(CAT, obj = self.element, "Successfully pushed item");
*queue.last_res.lock().unwrap() = Ok(gst::FlowSuccess::Ok);
}
Err(gst::FlowError::Flushing) => {
gst::debug!(CAT, obj: self.element, "Flushing");
gst::debug!(CAT, obj = self.element, "Flushing");
*queue.last_res.lock().unwrap() = Err(gst::FlowError::Flushing);
}
Err(gst::FlowError::Eos) => {
gst::debug!(CAT, obj: self.element, "EOS");
gst::debug!(CAT, obj = self.element, "EOS");
*queue.last_res.lock().unwrap() = Err(gst::FlowError::Eos);
queue.src_pad.push_event(gst::event::Eos::new()).await;
}
Err(err) => {
gst::error!(CAT, obj: self.element, "Got error {}", err);
gst::error!(CAT, obj = self.element, "Got error {}", err);
gst::element_error!(
&self.element,
gst::StreamError::Failed,
@ -344,7 +343,7 @@ impl TaskImpl for QueueTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Stopping task");
gst::log!(CAT, obj = self.element, "Stopping task");
let queue = self.element.imp();
let mut last_res = queue.last_res.lock().unwrap();
@ -358,7 +357,7 @@ impl TaskImpl for QueueTask {
*last_res = Err(gst::FlowError::Flushing);
gst::log!(CAT, obj: self.element, "Task stopped");
gst::log!(CAT, obj = self.element, "Task stopped");
Ok(())
}
.boxed()
@ -366,7 +365,7 @@ impl TaskImpl for QueueTask {
fn flush_start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Starting task flush");
gst::log!(CAT, obj = self.element, "Starting task flush");
let queue = self.element.imp();
let mut last_res = queue.last_res.lock().unwrap();
@ -379,7 +378,7 @@ impl TaskImpl for QueueTask {
*last_res = Err(gst::FlowError::Flushing);
gst::log!(CAT, obj: self.element, "Task flush started");
gst::log!(CAT, obj = self.element, "Task flush started");
Ok(())
}
.boxed()
@ -454,7 +453,7 @@ impl Queue {
}
let mut pending_queue_grd = self.pending_queue.lock().unwrap();
gst::log!(CAT, imp: self, "Trying to empty pending queue");
gst::log!(CAT, imp = self, "Trying to empty pending queue");
if let Some(pending_queue) = pending_queue_grd.as_mut() {
let mut failed_item = None;
@ -471,17 +470,17 @@ impl Queue {
receiver
} else {
gst::log!(CAT, imp: self, "Pending queue is empty now");
gst::log!(CAT, imp = self, "Pending queue is empty now");
*pending_queue_grd = None;
return;
}
} else {
gst::log!(CAT, imp: self, "Flushing, dropping pending queue");
gst::log!(CAT, imp = self, "Flushing, dropping pending queue");
return;
}
};
gst::log!(CAT, imp: self, "Waiting for more queue space");
gst::log!(CAT, imp = self, "Waiting for more queue space");
let _ = more_queue_space_receiver.await;
}
}
@ -490,7 +489,7 @@ impl Queue {
let wait_fut = {
let dataqueue = self.dataqueue.lock().unwrap();
let dataqueue = dataqueue.as_ref().ok_or_else(|| {
gst::error!(CAT, imp: self, "No DataQueue");
gst::error!(CAT, imp = self, "No DataQueue");
gst::FlowError::Error
})?;
@ -519,18 +518,18 @@ impl Queue {
gst::log!(
CAT,
imp: self,
imp = self,
"Queue is full - Pushing first item on pending queue"
);
if schedule_now {
gst::log!(CAT, imp: self, "Scheduling pending queue now");
gst::log!(CAT, imp = self, "Scheduling pending queue now");
pending_queue.as_mut().unwrap().scheduled = true;
let wait_fut = self.schedule_pending_queue();
Some(wait_fut)
} else {
gst::log!(CAT, imp: self, "Scheduling pending queue later");
gst::log!(CAT, imp = self, "Scheduling pending queue later");
None
}
} else {
@ -543,7 +542,7 @@ impl Queue {
};
if let Some(wait_fut) = wait_fut {
gst::log!(CAT, imp: self, "Blocking until queue has space again");
gst::log!(CAT, imp = self, "Blocking until queue has space again");
wait_fut.await;
}
@ -551,7 +550,7 @@ impl Queue {
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Preparing");
gst::debug!(CAT, imp = self, "Preparing");
let settings = self.settings.lock().unwrap().clone();
@ -589,13 +588,13 @@ impl Queue {
.prepare(QueueTask::new(self.obj().clone(), dataqueue), context)
.block_on()?;
gst::debug!(CAT, imp: self, "Prepared");
gst::debug!(CAT, imp = self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp: self, "Unpreparing");
gst::debug!(CAT, imp = self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
@ -604,20 +603,20 @@ impl Queue {
*self.last_res.lock().unwrap() = Ok(gst::FlowSuccess::Ok);
gst::debug!(CAT, imp: self, "Unprepared");
gst::debug!(CAT, imp = self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Stopping");
gst::debug!(CAT, imp = self, "Stopping");
self.task.stop().await_maybe_on_context()?;
gst::debug!(CAT, imp: self, "Stopped");
gst::debug!(CAT, imp = self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Starting");
gst::debug!(CAT, imp = self, "Starting");
self.task.start().await_maybe_on_context()?;
gst::debug!(CAT, imp: self, "Started");
gst::debug!(CAT, imp = self, "Started");
Ok(())
}
}
@ -778,7 +777,7 @@ impl ElementImpl for Queue {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -541,7 +541,7 @@ unsafe impl<T: IoSafe + Write> IoSafe for std::io::BufWriter<T> {}
unsafe impl<T: IoSafe + Write> IoSafe for std::io::LineWriter<T> {}
unsafe impl<T: IoSafe + ?Sized> IoSafe for &mut T {}
unsafe impl<T: IoSafe + ?Sized> IoSafe for Box<T> {}
unsafe impl<T: Clone + IoSafe + ?Sized> IoSafe for std::borrow::Cow<'_, T> {}
unsafe impl<T: Clone + IoSafe> IoSafe for std::borrow::Cow<'_, T> {}
impl<T: Read + Send + 'static> AsyncRead for Async<T> {
fn poll_read(

View file

@ -57,7 +57,7 @@ const READ: usize = 0;
const WRITE: usize = 1;
thread_local! {
static CURRENT_REACTOR: RefCell<Option<Reactor>> = RefCell::new(None);
static CURRENT_REACTOR: RefCell<Option<Reactor>> = const { RefCell::new(None) };
}
#[derive(Debug)]

View file

@ -27,7 +27,7 @@ use super::{CallOnDrop, JoinHandle, Reactor};
use crate::runtime::RUNTIME_CAT;
thread_local! {
static CURRENT_SCHEDULER: RefCell<Option<HandleWeak>> = RefCell::new(None);
static CURRENT_SCHEDULER: RefCell<Option<HandleWeak>> = const { RefCell::new(None) };
}
#[derive(Debug)]
@ -301,9 +301,7 @@ impl Scheduler {
.borrow()
.as_ref()
.and_then(HandleWeak::upgrade)
.map_or(false, |cur| {
std::ptr::eq(self, Arc::as_ptr(&cur.0.scheduler))
})
.is_some_and(|cur| std::ptr::eq(self, Arc::as_ptr(&cur.0.scheduler)))
})
}
}

View file

@ -24,7 +24,7 @@ use super::CallOnDrop;
use crate::runtime::RUNTIME_CAT;
thread_local! {
static CURRENT_TASK_ID: Cell<Option<TaskId>> = Cell::new(None);
static CURRENT_TASK_ID: Cell<Option<TaskId>> = const { Cell::new(None) };
}
#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)]

View file

@ -129,7 +129,7 @@ pub trait PadSrcHandler: Clone + Send + Sync + 'static {
if pad.is_active() {
gst::debug!(
RUNTIME_CAT,
obj: pad,
obj = pad,
"Already activated in {:?} mode ",
pad.mode()
);
@ -137,7 +137,12 @@ pub trait PadSrcHandler: Clone + Send + Sync + 'static {
}
pad.activate_mode(gst::PadMode::Push, true).map_err(|err| {
gst::error!(RUNTIME_CAT, obj: pad, "Error in PadSrc activate: {:?}", err);
gst::error!(
RUNTIME_CAT,
obj = pad,
"Error in PadSrc activate: {:?}",
err
);
gst::loggable_error!(RUNTIME_CAT, "Error in PadSrc activate: {:?}", err)
})
}
@ -153,7 +158,7 @@ pub trait PadSrcHandler: Clone + Send + Sync + 'static {
}
fn src_event(self, pad: &gst::Pad, imp: &Self::ElementImpl, event: gst::Event) -> bool {
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", event);
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", event);
let elem = imp.obj();
// FIXME with GAT on `Self::ElementImpl`, we should be able to
@ -178,13 +183,13 @@ pub trait PadSrcHandler: Clone + Send + Sync + 'static {
}
fn src_query(self, pad: &gst::Pad, imp: &Self::ElementImpl, query: &mut gst::QueryRef) -> bool {
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", query);
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", query);
if query.is_serialized() {
// FIXME serialized queries should be handled with the dataflow
// but we can't return a `Future` because we couldn't honor QueryRef's lifetime
false
} else {
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", query);
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", query);
let elem = imp.obj();
// FIXME with GAT on `Self::ElementImpl`, we should be able to
@ -217,48 +222,61 @@ impl PadSrcInner {
}
pub async fn push(&self, buffer: gst::Buffer) -> Result<FlowSuccess, FlowError> {
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Pushing {:?}", buffer);
gst::log!(RUNTIME_CAT, obj = self.gst_pad, "Pushing {:?}", buffer);
let success = self.gst_pad.push(buffer).map_err(|err| {
gst::error!(RUNTIME_CAT,
obj: self.gst_pad,
gst::error!(
RUNTIME_CAT,
obj = self.gst_pad,
"Failed to push Buffer to PadSrc: {:?}",
err,
);
err
})?;
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Processing any pending sub tasks");
gst::log!(
RUNTIME_CAT,
obj = self.gst_pad,
"Processing any pending sub tasks"
);
Context::drain_sub_tasks().await?;
Ok(success)
}
pub async fn push_list(&self, list: gst::BufferList) -> Result<FlowSuccess, FlowError> {
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Pushing {:?}", list);
gst::log!(RUNTIME_CAT, obj = self.gst_pad, "Pushing {:?}", list);
let success = self.gst_pad.push_list(list).map_err(|err| {
gst::error!(
RUNTIME_CAT,
obj: self.gst_pad,
obj = self.gst_pad,
"Failed to push BufferList to PadSrc: {:?}",
err,
);
err
})?;
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Processing any pending sub tasks");
gst::log!(
RUNTIME_CAT,
obj = self.gst_pad,
"Processing any pending sub tasks"
);
Context::drain_sub_tasks().await?;
Ok(success)
}
pub async fn push_event(&self, event: gst::Event) -> bool {
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Pushing {:?}", event);
gst::log!(RUNTIME_CAT, obj = self.gst_pad, "Pushing {:?}", event);
let was_handled = self.gst_pad.push_event(event);
gst::log!(RUNTIME_CAT, obj: self.gst_pad, "Processing any pending sub tasks");
gst::log!(
RUNTIME_CAT,
obj = self.gst_pad,
"Processing any pending sub tasks"
);
if Context::drain_sub_tasks().await.is_err() {
return false;
}
@ -365,7 +383,7 @@ impl PadSrc {
H::ElementImpl::catch_panic_pad_function(
parent,
|| {
gst::error!(RUNTIME_CAT, obj: gst_pad, "Panic in PadSrc activate");
gst::error!(RUNTIME_CAT, obj = gst_pad, "Panic in PadSrc activate");
Err(gst::loggable_error!(
RUNTIME_CAT,
"Panic in PadSrc activate"
@ -383,7 +401,7 @@ impl PadSrc {
H::ElementImpl::catch_panic_pad_function(
parent,
|| {
gst::error!(RUNTIME_CAT, obj: gst_pad, "Panic in PadSrc activatemode");
gst::error!(RUNTIME_CAT, obj = gst_pad, "Panic in PadSrc activatemode");
Err(gst::loggable_error!(
RUNTIME_CAT,
"Panic in PadSrc activatemode"
@ -392,7 +410,7 @@ impl PadSrc {
move |imp| {
gst::log!(
RUNTIME_CAT,
obj: gst_pad,
obj = gst_pad,
"ActivateMode {:?}, {}",
mode,
active
@ -401,7 +419,7 @@ impl PadSrc {
if mode == gst::PadMode::Pull {
gst::error!(
RUNTIME_CAT,
obj: gst_pad,
obj = gst_pad,
"Pull mode not supported by PadSrc"
);
return Err(gst::loggable_error!(
@ -442,7 +460,7 @@ impl PadSrc {
} else {
gst::fixme!(
RUNTIME_CAT,
obj: gst_pad,
obj = gst_pad,
"Serialized Query not supported"
);
false
@ -507,7 +525,7 @@ pub trait PadSinkHandler: Clone + Send + Sync + 'static {
if pad.is_active() {
gst::debug!(
RUNTIME_CAT,
obj: pad,
obj = pad,
"Already activated in {:?} mode ",
pad.mode()
);
@ -517,7 +535,7 @@ pub trait PadSinkHandler: Clone + Send + Sync + 'static {
pad.activate_mode(gst::PadMode::Push, true).map_err(|err| {
gst::error!(
RUNTIME_CAT,
obj: pad,
obj = pad,
"Error in PadSink activate: {:?}",
err
);
@ -555,7 +573,7 @@ pub trait PadSinkHandler: Clone + Send + Sync + 'static {
fn sink_event(self, pad: &gst::Pad, imp: &Self::ElementImpl, event: gst::Event) -> bool {
assert!(!event.is_serialized());
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", event);
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", event);
let elem = imp.obj();
// FIXME with GAT on `Self::ElementImpl`, we should be able to
@ -581,7 +599,7 @@ pub trait PadSinkHandler: Clone + Send + Sync + 'static {
let element = unsafe { elem.unsafe_cast::<gst::Element>() };
async move {
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", event);
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", event);
gst::Pad::event_default(&pad, Some(&element), event)
}
@ -624,12 +642,12 @@ pub trait PadSinkHandler: Clone + Send + Sync + 'static {
query: &mut gst::QueryRef,
) -> bool {
if query.is_serialized() {
gst::log!(RUNTIME_CAT, obj: pad, "Dropping {:?}", query);
gst::log!(RUNTIME_CAT, obj = pad, "Dropping {:?}", query);
// FIXME serialized queries should be handled with the dataflow
// but we can't return a `Future` because we couldn't honor QueryRef's lifetime
false
} else {
gst::log!(RUNTIME_CAT, obj: pad, "Handling {:?}", query);
gst::log!(RUNTIME_CAT, obj = pad, "Handling {:?}", query);
let elem = imp.obj();
// FIXME with GAT on `Self::ElementImpl`, we should be able to
@ -764,7 +782,7 @@ impl PadSink {
H::ElementImpl::catch_panic_pad_function(
parent,
|| {
gst::error!(RUNTIME_CAT, obj: gst_pad, "Panic in PadSink activate");
gst::error!(RUNTIME_CAT, obj = gst_pad, "Panic in PadSink activate");
Err(gst::loggable_error!(
RUNTIME_CAT,
"Panic in PadSink activate"
@ -782,7 +800,11 @@ impl PadSink {
H::ElementImpl::catch_panic_pad_function(
parent,
|| {
gst::error!(RUNTIME_CAT, obj: gst_pad, "Panic in PadSink activatemode");
gst::error!(
RUNTIME_CAT,
obj = gst_pad,
"Panic in PadSink activatemode"
);
Err(gst::loggable_error!(
RUNTIME_CAT,
"Panic in PadSink activatemode"
@ -791,7 +813,7 @@ impl PadSink {
move |imp| {
gst::log!(
RUNTIME_CAT,
obj: gst_pad,
obj = gst_pad,
"ActivateMode {:?}, {}",
mode,
active
@ -800,7 +822,7 @@ impl PadSink {
if mode == gst::PadMode::Pull {
gst::error!(
RUNTIME_CAT,
obj: gst_pad,
obj = gst_pad,
"Pull mode not supported by PadSink"
);
return Err(gst::loggable_error!(
@ -923,7 +945,7 @@ impl PadSink {
} else {
gst::fixme!(
RUNTIME_CAT,
obj: gst_pad,
obj = gst_pad,
"Serialized Query not supported"
);
false

View file

@ -33,7 +33,10 @@ use std::net::UdpSocket;
use crate::runtime::Async;
#[cfg(unix)]
use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd};
use std::os::{
fd::BorrowedFd,
unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd},
};
#[cfg(windows)]
use std::os::windows::io::{AsRawSocket, FromRawSocket, IntoRawSocket, RawSocket};
@ -74,7 +77,7 @@ impl<T: SocketRead> Socket<T> {
buffer_pool.set_active(true).map_err(|err| {
gst::error!(
SOCKET_CAT,
obj: element,
obj = element,
"Failed to prepare socket: {}",
err
);
@ -122,7 +125,7 @@ impl<T: SocketRead> Socket<T> {
pub async fn try_next(
&mut self,
) -> Result<(gst::Buffer, Option<std::net::SocketAddr>), SocketError> {
gst::log!(SOCKET_CAT, obj: self.element, "Trying to read data");
gst::log!(SOCKET_CAT, obj = self.element, "Trying to read data");
if self.mapped_buffer.is_none() {
match self.buffer_pool.acquire_buffer(None) {
@ -130,7 +133,12 @@ impl<T: SocketRead> Socket<T> {
self.mapped_buffer = Some(buffer.into_mapped_buffer_writable().unwrap());
}
Err(err) => {
gst::debug!(SOCKET_CAT, obj: self.element, "Failed to acquire buffer {:?}", err);
gst::debug!(
SOCKET_CAT,
obj = self.element,
"Failed to acquire buffer {:?}",
err
);
return Err(SocketError::Gst(err));
}
}
@ -149,7 +157,7 @@ impl<T: SocketRead> Socket<T> {
// so as to display another message
gst::debug!(
SOCKET_CAT,
obj: self.element,
obj = self.element,
"Read {} bytes at {} (clock {})",
len,
running_time.display(),
@ -157,7 +165,7 @@ impl<T: SocketRead> Socket<T> {
);
running_time
} else {
gst::debug!(SOCKET_CAT, obj: self.element, "Read {} bytes", len);
gst::debug!(SOCKET_CAT, obj = self.element, "Read {} bytes", len);
gst::ClockTime::NONE
};
@ -173,7 +181,7 @@ impl<T: SocketRead> Socket<T> {
Ok((buffer, saddr))
}
Err(err) => {
gst::debug!(SOCKET_CAT, obj: self.element, "Read error {:?}", err);
gst::debug!(SOCKET_CAT, obj = self.element, "Read error {:?}", err);
Err(SocketError::Io(err))
}
@ -184,7 +192,12 @@ impl<T: SocketRead> Socket<T> {
impl<T: SocketRead> Drop for Socket<T> {
fn drop(&mut self) {
if let Err(err) = self.buffer_pool.set_active(false) {
gst::error!(SOCKET_CAT, obj: self.element, "Failed to unprepare socket: {}", err);
gst::error!(
SOCKET_CAT,
obj = self.element,
"Failed to unprepare socket: {}",
err
);
}
}
}
@ -221,8 +234,14 @@ impl GioSocketWrapper {
}
#[cfg(any(
bsd,
linux_like,
target_os = "macos",
target_os = "ios",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "openbsd",
target_os = "netbsd",
target_os = "linux",
target_os = "android",
target_os = "aix",
target_os = "fuchsia",
target_os = "haiku",
@ -236,18 +255,30 @@ impl GioSocketWrapper {
let socket = self.as_socket();
sockopt::set_ip_tos(socket, tos)?;
sockopt::set_ip_tos(
unsafe { BorrowedFd::borrow_raw(socket.as_raw_fd()) },
tos as u8,
)?;
if socket.family() == gio::SocketFamily::Ipv6 {
sockopt::set_ipv6_tclass(socket, tos)?;
sockopt::set_ipv6_tclass(
unsafe { BorrowedFd::borrow_raw(socket.as_raw_fd()) },
tos as u32,
)?;
}
Ok(())
}
#[cfg(not(any(
bsd,
linux_like,
target_os = "macos",
target_os = "ios",
target_os = "freebsd",
target_os = "dragonfly",
target_os = "openbsd",
target_os = "netbsd",
target_os = "linux",
target_os = "android",
target_os = "aix",
target_os = "fuchsia",
target_os = "haiku",

View file

@ -31,8 +31,6 @@ use std::io;
use std::net::{IpAddr, SocketAddr, TcpStream};
use std::sync::Mutex;
use std::time::Duration;
use std::u16;
use std::u32;
use crate::runtime::prelude::*;
use crate::runtime::task;
@ -40,6 +38,8 @@ use crate::runtime::{Context, PadSrc, Task, TaskState};
use crate::runtime::Async;
use crate::socket::{Socket, SocketError, SocketRead};
use futures::channel::mpsc::{channel, Receiver, Sender};
use futures::pin_mut;
const DEFAULT_HOST: Option<&str> = Some("127.0.0.1");
const DEFAULT_PORT: i32 = 4953;
@ -48,6 +48,11 @@ const DEFAULT_BLOCKSIZE: u32 = 4096;
const DEFAULT_CONTEXT: &str = "";
const DEFAULT_CONTEXT_WAIT: Duration = Duration::ZERO;
#[derive(Debug, Default)]
struct State {
event_sender: Option<Sender<gst::Event>>,
}
#[derive(Debug, Clone)]
struct Settings {
host: Option<String>,
@ -97,7 +102,7 @@ impl PadSrcHandler for TcpClientSrcPadHandler {
type ElementImpl = TcpClientSrc;
fn src_event(self, pad: &gst::Pad, imp: &TcpClientSrc, event: gst::Event) -> bool {
gst::log!(CAT, obj: pad, "Handling {:?}", event);
gst::log!(CAT, obj = pad, "Handling {:?}", event);
use gst::EventView;
let ret = match event.view() {
@ -109,16 +114,16 @@ impl PadSrcHandler for TcpClientSrcPadHandler {
};
if ret {
gst::log!(CAT, obj: pad, "Handled {:?}", event);
gst::log!(CAT, obj = pad, "Handled {:?}", event);
} else {
gst::log!(CAT, obj: pad, "Didn't handle {:?}", event);
gst::log!(CAT, obj = pad, "Didn't handle {:?}", event);
}
ret
}
fn src_query(self, pad: &gst::Pad, imp: &TcpClientSrc, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj: pad, "Handling {:?}", query);
gst::log!(CAT, obj = pad, "Handling {:?}", query);
use gst::QueryViewMut;
let ret = match query.view_mut() {
@ -150,9 +155,9 @@ impl PadSrcHandler for TcpClientSrcPadHandler {
};
if ret {
gst::log!(CAT, obj: pad, "Handled {:?}", query);
gst::log!(CAT, obj = pad, "Handled {:?}", query);
} else {
gst::log!(CAT, obj: pad, "Didn't handle {:?}", query);
gst::log!(CAT, obj = pad, "Didn't handle {:?}", query);
}
ret
@ -166,10 +171,16 @@ struct TcpClientSrcTask {
socket: Option<Socket<TcpClientReader>>,
need_initial_events: bool,
need_segment: bool,
event_receiver: Receiver<gst::Event>,
}
impl TcpClientSrcTask {
fn new(element: super::TcpClientSrc, saddr: SocketAddr, buffer_pool: gst::BufferPool) -> Self {
fn new(
element: super::TcpClientSrc,
saddr: SocketAddr,
buffer_pool: gst::BufferPool,
event_receiver: Receiver<gst::Event>,
) -> Self {
TcpClientSrcTask {
element,
saddr,
@ -177,6 +188,7 @@ impl TcpClientSrcTask {
socket: None,
need_initial_events: true,
need_segment: true,
event_receiver,
}
}
@ -184,12 +196,12 @@ impl TcpClientSrcTask {
&mut self,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::log!(CAT, obj: self.element, "Handling {:?}", buffer);
gst::log!(CAT, obj = self.element, "Handling {:?}", buffer);
let tcpclientsrc = self.element.imp();
if self.need_initial_events {
gst::debug!(CAT, obj: self.element, "Pushing initial events");
gst::debug!(CAT, obj = self.element, "Pushing initial events");
let stream_id = format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
let stream_start_evt = gst::event::StreamStart::builder(&stream_id)
@ -228,20 +240,20 @@ impl TcpClientSrcTask {
let res = tcpclientsrc.src_pad.push(buffer).await;
match res {
Ok(_) => {
gst::log!(CAT, obj: self.element, "Successfully pushed buffer");
gst::log!(CAT, obj = self.element, "Successfully pushed buffer");
}
Err(gst::FlowError::Flushing) => {
gst::debug!(CAT, obj: self.element, "Flushing");
gst::debug!(CAT, obj = self.element, "Flushing");
}
Err(gst::FlowError::Eos) => {
gst::debug!(CAT, obj: self.element, "EOS");
gst::debug!(CAT, obj = self.element, "EOS");
tcpclientsrc
.src_pad
.push_event(gst::event::Eos::new())
.await;
}
Err(err) => {
gst::error!(CAT, obj: self.element, "Got error {}", err);
gst::error!(CAT, obj = self.element, "Got error {}", err);
gst::element_error!(
self.element,
gst::StreamError::Failed,
@ -260,7 +272,12 @@ impl TaskImpl for TcpClientSrcTask {
fn prepare(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Preparing task connecting to {:?}", self.saddr);
gst::log!(
CAT,
obj = self.element,
"Preparing task connecting to {:?}",
self.saddr
);
let socket = Async::<TcpStream>::connect(self.saddr)
.await
@ -285,7 +302,7 @@ impl TaskImpl for TcpClientSrcTask {
})?,
);
gst::log!(CAT, obj: self.element, "Task prepared");
gst::log!(CAT, obj = self.element, "Task prepared");
Ok(())
}
.boxed()
@ -313,34 +330,58 @@ impl TaskImpl for TcpClientSrcTask {
fn try_next(&mut self) -> BoxFuture<'_, Result<gst::Buffer, gst::FlowError>> {
async move {
self.socket
.as_mut()
.unwrap()
.try_next()
.await
.map(|(buffer, _saddr)| buffer)
.map_err(|err| {
gst::error!(CAT, obj: self.element, "Got error {:?}", err);
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {}", err]
);
let event_fut = self.event_receiver.next().fuse();
let socket_fut = self.socket.as_mut().unwrap().try_next().fuse();
pin_mut!(event_fut);
pin_mut!(socket_fut);
futures::select! {
event_res = event_fut => match event_res {
Some(event) => {
gst::debug!(CAT, obj = self.element, "Handling element level event {event:?}");
match event.view() {
gst::EventView::Eos(_) => Err(gst::FlowError::Eos),
ev => {
gst::error!(CAT, obj = self.element, "Unexpected event {ev:?} on channel");
Err(gst::FlowError::Error)
}
}
}
gst::FlowError::Error
})
None => {
gst::error!(CAT, obj = self.element, "Unexpected return on event channel");
Err(gst::FlowError::Error)
}
},
socket_res = socket_fut => match socket_res {
Ok((buffer, _saddr)) => Ok(buffer),
Err(err) => {
gst::error!(CAT, obj = self.element, "Got error {err:#}");
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {err}"]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {err}"]
);
}
}
Err(gst::FlowError::Error)
}
},
}
}
.boxed()
}
@ -351,9 +392,9 @@ impl TaskImpl for TcpClientSrcTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Stopping task");
gst::log!(CAT, obj = self.element, "Stopping task");
self.need_initial_events = true;
gst::log!(CAT, obj: self.element, "Task stopped");
gst::log!(CAT, obj = self.element, "Task stopped");
Ok(())
}
.boxed()
@ -361,13 +402,47 @@ impl TaskImpl for TcpClientSrcTask {
fn flush_stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Stopping task flush");
gst::log!(CAT, obj = self.element, "Stopping task flush");
self.need_initial_events = true;
gst::log!(CAT, obj: self.element, "Task flush stopped");
gst::log!(CAT, obj = self.element, "Task flush stopped");
Ok(())
}
.boxed()
}
fn handle_loop_error(&mut self, err: gst::FlowError) -> BoxFuture<'_, task::Trigger> {
async move {
match err {
gst::FlowError::Flushing => {
gst::debug!(CAT, obj = self.element, "Flushing");
task::Trigger::FlushStart
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj = self.element, "EOS");
self.element
.imp()
.src_pad
.push_event(gst::event::Eos::new())
.await;
task::Trigger::Stop
}
err => {
gst::error!(CAT, obj = self.element, "Got error {err}");
gst::element_error!(
&self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
task::Trigger::Error
}
}
}
.boxed()
}
}
pub struct TcpClientSrc {
@ -375,6 +450,7 @@ pub struct TcpClientSrc {
task: Task,
configured_caps: Mutex<Option<gst::Caps>>,
settings: Mutex<Settings>,
state: Mutex<State>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
@ -387,7 +463,7 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
impl TcpClientSrc {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Preparing");
gst::debug!(CAT, imp = self, "Preparing");
let settings = self.settings.lock().unwrap().clone();
let context =
@ -431,49 +507,59 @@ impl TcpClientSrc {
let saddr = SocketAddr::new(host, port as u16);
let (sender, receiver) = channel(1);
// Don't block on `prepare` as the socket connection takes time.
// This will be performed in the background and we'll block on
// `start` which will also ensure `prepare` completed successfully.
let fut = self
.task
.prepare(
TcpClientSrcTask::new(self.obj().clone(), saddr, buffer_pool),
TcpClientSrcTask::new(self.obj().clone(), saddr, buffer_pool, receiver),
context,
)
.check()?;
drop(fut);
gst::debug!(CAT, imp: self, "Preparing asynchronously");
let mut state = self.state.lock().unwrap();
state.event_sender = Some(sender);
drop(state);
gst::debug!(CAT, imp = self, "Preparing asynchronously");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp: self, "Unpreparing");
gst::debug!(CAT, imp = self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
gst::debug!(CAT, imp: self, "Unprepared");
gst::debug!(CAT, imp = self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Stopping");
gst::debug!(CAT, imp = self, "Stopping");
self.task.stop().block_on()?;
gst::debug!(CAT, imp: self, "Stopped");
gst::debug!(CAT, imp = self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Starting");
gst::debug!(CAT, imp = self, "Starting");
self.task.start().block_on()?;
gst::debug!(CAT, imp: self, "Started");
gst::debug!(CAT, imp = self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Pausing");
gst::debug!(CAT, imp = self, "Pausing");
self.task.pause().block_on()?;
gst::debug!(CAT, imp: self, "Paused");
gst::debug!(CAT, imp = self, "Paused");
Ok(())
}
fn state(&self) -> TaskState {
self.task.state()
}
}
#[glib::object_subclass]
@ -491,6 +577,7 @@ impl ObjectSubclass for TcpClientSrc {
task: Task::default(),
configured_caps: Default::default(),
settings: Default::default(),
state: Default::default(),
}
}
}
@ -626,7 +713,7 @@ impl ElementImpl for TcpClientSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {
@ -664,4 +751,31 @@ impl ElementImpl for TcpClientSrc {
Ok(success)
}
fn send_event(&self, event: gst::Event) -> bool {
use gst::EventView;
gst::debug!(CAT, imp = self, "Handling element level event {event:?}");
match event.view() {
EventView::Eos(_) => {
if self.state() != TaskState::Started {
if let Err(err) = self.start() {
gst::error!(CAT, imp = self, "Failed to start task thread {err:?}");
}
}
if self.state() == TaskState::Started {
let mut state = self.state.lock().unwrap();
if let Some(event_tx) = state.event_sender.as_mut() {
return event_tx.try_send(event.clone()).is_ok();
}
}
false
}
_ => self.parent_send_event(event),
}
}
}

View file

@ -37,8 +37,6 @@ use std::collections::BTreeSet;
use std::net::{IpAddr, Ipv4Addr, SocketAddr, UdpSocket};
use std::sync::{Arc, Mutex};
use std::time::Duration;
use std::u16;
use std::u8;
const DEFAULT_HOST: Option<&str> = Some("127.0.0.1");
const DEFAULT_PORT: i32 = 5004;
@ -202,17 +200,17 @@ impl UdpSinkPadHandler {
futures::executor::block_on(async move {
let mut inner = self.0.lock().await;
if inner.clients.contains(&addr) {
gst::warning!(CAT, imp: imp, "Not adding client {addr:?} again");
gst::warning!(CAT, imp = imp, "Not adding client {addr:?} again");
return;
}
match inner.configure_client(&addr) {
Ok(()) => {
gst::info!(CAT, imp: imp, "Added client {addr:?}");
gst::info!(CAT, imp = imp, "Added client {addr:?}");
inner.clients.insert(addr);
}
Err(err) => {
gst::error!(CAT, imp: imp, "Failed to add client {addr:?}: {err}");
gst::error!(CAT, imp = imp, "Failed to add client {addr:?}: {err}");
imp.obj().post_error_message(err);
}
}
@ -223,16 +221,16 @@ impl UdpSinkPadHandler {
futures::executor::block_on(async move {
let mut inner = self.0.lock().await;
if inner.clients.take(&addr).is_none() {
gst::warning!(CAT, imp: imp, "Not removing unknown client {addr:?}");
gst::warning!(CAT, imp = imp, "Not removing unknown client {addr:?}");
return;
}
match inner.unconfigure_client(&addr) {
Ok(()) => {
gst::info!(CAT, imp: imp, "Removed client {addr:?}");
gst::info!(CAT, imp = imp, "Removed client {addr:?}");
}
Err(err) => {
gst::error!(CAT, imp: imp, "Failed to remove client {addr:?}: {err}");
gst::error!(CAT, imp = imp, "Failed to remove client {addr:?}: {err}");
imp.obj().post_error_message(err);
}
}
@ -243,9 +241,9 @@ impl UdpSinkPadHandler {
futures::executor::block_on(async move {
let mut inner = self.0.lock().await;
if new_clients.is_empty() {
gst::info!(CAT, imp: imp, "Clearing clients");
gst::info!(CAT, imp = imp, "Clearing clients");
} else {
gst::info!(CAT, imp: imp, "Replacing clients");
gst::info!(CAT, imp = imp, "Replacing clients");
}
let old_clients = std::mem::take(&mut inner.clients);
@ -257,19 +255,19 @@ impl UdpSinkPadHandler {
// client is already configured
inner.clients.insert(*addr);
} else if let Err(err) = inner.unconfigure_client(addr) {
gst::error!(CAT, imp: imp, "Failed to remove client {addr:?}: {err}");
gst::error!(CAT, imp = imp, "Failed to remove client {addr:?}: {err}");
res = Err(err);
} else {
gst::info!(CAT, imp: imp, "Removed client {addr:?}");
gst::info!(CAT, imp = imp, "Removed client {addr:?}");
}
}
for addr in new_clients.into_iter() {
if let Err(err) = inner.configure_client(&addr) {
gst::error!(CAT, imp: imp, "Failed to add client {addr:?}: {err}");
gst::error!(CAT, imp = imp, "Failed to add client {addr:?}: {err}");
res = Err(err);
} else {
gst::info!(CAT, imp: imp, "Added client {addr:?}");
gst::info!(CAT, imp = imp, "Added client {addr:?}");
inner.clients.insert(addr);
}
}
@ -321,7 +319,7 @@ impl PadSinkHandler for UdpSinkPadHandler {
event: gst::Event,
) -> BoxFuture<'static, bool> {
async move {
gst::debug!(CAT, obj: elem, "Handling {event:?}");
gst::debug!(CAT, obj = elem, "Handling {event:?}");
match event.view() {
EventView::Eos(_) => {
@ -345,7 +343,7 @@ impl PadSinkHandler for UdpSinkPadHandler {
}
fn sink_event(self, _pad: &gst::Pad, imp: &UdpSink, event: gst::Event) -> bool {
gst::debug!(CAT, imp: imp, "Handling {event:?}");
gst::debug!(CAT, imp = imp, "Handling {event:?}");
if let EventView::FlushStart(..) = event.view() {
block_on_or_add_sub_task(async move {
@ -556,7 +554,7 @@ impl UdpSinkPadHandlerInner {
};
if let Some(socket) = socket.as_mut() {
gst::log!(CAT, obj: elem, "Sending to {client:?}");
gst::log!(CAT, obj = elem, "Sending to {client:?}");
socket.send_to(&data, *client).await.map_err(|err| {
gst::element_error!(
elem,
@ -577,7 +575,7 @@ impl UdpSinkPadHandlerInner {
}
}
gst::log!(CAT, obj: elem, "Sent buffer {buffer:?} to all clients");
gst::log!(CAT, obj = elem, "Sent buffer {buffer:?} to all clients");
Ok(gst::FlowSuccess::Ok)
}
@ -587,7 +585,7 @@ impl UdpSinkPadHandlerInner {
let now = elem.current_running_time();
if let Ok(Some(delay)) = running_time.opt_checked_sub(now) {
gst::trace!(CAT, obj: elem, "sync: waiting {delay}");
gst::trace!(CAT, obj = elem, "sync: waiting {delay}");
runtime::timer::delay_for(delay.into()).await;
}
}
@ -598,7 +596,7 @@ impl UdpSinkPadHandlerInner {
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
if self.is_flushing {
gst::info!(CAT, obj: elem, "Discarding {buffer:?} (flushing)");
gst::info!(CAT, obj = elem, "Discarding {buffer:?} (flushing)");
return Err(gst::FlowError::Flushing);
}
@ -614,14 +612,14 @@ impl UdpSinkPadHandlerInner {
self.sync(elem, rtime).await;
if self.is_flushing {
gst::info!(CAT, obj: elem, "Discarding {buffer:?} (flushing)");
gst::info!(CAT, obj = elem, "Discarding {buffer:?} (flushing)");
return Err(gst::FlowError::Flushing);
}
}
}
gst::debug!(CAT, obj: elem, "Handling {buffer:?}");
gst::debug!(CAT, obj = elem, "Handling {buffer:?}");
self.render(elem, buffer).await.map_err(|err| {
element_error!(
@ -700,7 +698,7 @@ impl UdpSink {
};
let saddr = SocketAddr::new(bind_addr, bind_port as u16);
gst::debug!(CAT, imp: self, "Binding to {:?}", saddr);
gst::debug!(CAT, imp = self, "Binding to {:?}", saddr);
let socket = match family {
SocketFamily::Ipv4 => socket2::Socket::new(
@ -720,7 +718,7 @@ impl UdpSink {
Err(err) => {
gst::warning!(
CAT,
imp: self,
imp = self,
"Failed to create {} socket: {}",
match family {
SocketFamily::Ipv4 => "IPv4",
@ -773,7 +771,7 @@ impl UdpSink {
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Preparing");
gst::debug!(CAT, imp = self, "Preparing");
let mut settings = self.settings.lock().unwrap();
@ -791,36 +789,36 @@ impl UdpSink {
.prepare(self, socket, socket_v6, &settings)?;
*self.ts_ctx.lock().unwrap() = Some(ts_ctx);
gst::debug!(CAT, imp: self, "Started preparation");
gst::debug!(CAT, imp = self, "Started preparation");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp: self, "Unpreparing");
gst::debug!(CAT, imp = self, "Unpreparing");
self.sink_pad_handler.unprepare();
*self.ts_ctx.lock().unwrap() = None;
gst::debug!(CAT, imp: self, "Unprepared");
gst::debug!(CAT, imp = self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Stopping");
gst::debug!(CAT, imp = self, "Stopping");
self.sink_pad_handler.stop();
gst::debug!(CAT, imp: self, "Stopped");
gst::debug!(CAT, imp = self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Starting");
gst::debug!(CAT, imp = self, "Starting");
self.sink_pad_handler.start();
gst::debug!(CAT, imp: self, "Started");
gst::debug!(CAT, imp = self, "Started");
Ok(())
}
fn try_into_socket_addr(&self, host: &str, port: i32) -> Result<SocketAddr, ()> {
let addr: IpAddr = match host.parse() {
Err(err) => {
gst::error!(CAT, imp: self, "Failed to parse host {}: {}", host, err);
gst::error!(CAT, imp = self, "Failed to parse host {}: {}", host, err);
return Err(());
}
Ok(addr) => addr,
@ -828,7 +826,7 @@ impl UdpSink {
let port: u16 = match port.try_into() {
Err(err) => {
gst::error!(CAT, imp: self, "Invalid port {}: {}", port, err);
gst::error!(CAT, imp = self, "Invalid port {}: {}", port, err);
return Err(());
}
Ok(port) => port,
@ -1090,19 +1088,19 @@ impl ObjectImpl for UdpSink {
Err(()) => {
gst::error!(
CAT,
imp: self,
imp = self,
"Invalid socket address {addr}:{port}"
);
None
}
},
Err(err) => {
gst::error!(CAT, imp: self, "Invalid port {err}");
gst::error!(CAT, imp = self, "Invalid port {err}");
None
}
}
} else {
gst::error!(CAT, imp: self, "Invalid client {client}");
gst::error!(CAT, imp = self, "Invalid client {client}");
None
}
});
@ -1217,7 +1215,7 @@ impl ElementImpl for UdpSink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {

View file

@ -27,17 +27,17 @@ use gst_net::*;
use once_cell::sync::Lazy;
use std::i32;
use std::io;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, UdpSocket};
use std::sync::Mutex;
use std::time::Duration;
use std::u16;
use crate::runtime::prelude::*;
use crate::runtime::{Async, Context, PadSrc, Task};
use crate::runtime::{task, Async, Context, PadSrc, Task, TaskState};
use crate::socket::{wrap_socket, GioSocketWrapper, Socket, SocketError, SocketRead};
use futures::channel::mpsc::{channel, Receiver, Sender};
use futures::pin_mut;
const DEFAULT_ADDRESS: Option<&str> = Some("0.0.0.0");
const DEFAULT_PORT: i32 = 5004;
@ -49,6 +49,13 @@ const DEFAULT_USED_SOCKET: Option<GioSocketWrapper> = None;
const DEFAULT_CONTEXT: &str = "";
const DEFAULT_CONTEXT_WAIT: Duration = Duration::ZERO;
const DEFAULT_RETRIEVE_SENDER_ADDRESS: bool = true;
const DEFAULT_MULTICAST_LOOP: bool = true;
const DEFAULT_BUFFER_SIZE: u32 = 0;
#[derive(Debug, Default)]
struct State {
event_sender: Option<Sender<gst::Event>>,
}
#[derive(Debug, Clone)]
struct Settings {
@ -62,6 +69,8 @@ struct Settings {
context: String,
context_wait: Duration,
retrieve_sender_address: bool,
multicast_loop: bool,
buffer_size: u32,
}
impl Default for Settings {
@ -77,6 +86,8 @@ impl Default for Settings {
context: DEFAULT_CONTEXT.into(),
context_wait: DEFAULT_CONTEXT_WAIT,
retrieve_sender_address: DEFAULT_RETRIEVE_SENDER_ADDRESS,
multicast_loop: DEFAULT_MULTICAST_LOOP,
buffer_size: DEFAULT_BUFFER_SIZE,
}
}
}
@ -114,7 +125,7 @@ impl PadSrcHandler for UdpSrcPadHandler {
type ElementImpl = UdpSrc;
fn src_event(self, pad: &gst::Pad, imp: &UdpSrc, event: gst::Event) -> bool {
gst::log!(CAT, obj: pad, "Handling {:?}", event);
gst::log!(CAT, obj = pad, "Handling {:?}", event);
use gst::EventView;
let ret = match event.view() {
@ -126,16 +137,16 @@ impl PadSrcHandler for UdpSrcPadHandler {
};
if ret {
gst::log!(CAT, obj: pad, "Handled {:?}", event);
gst::log!(CAT, obj = pad, "Handled {:?}", event);
} else {
gst::log!(CAT, obj: pad, "Didn't handle {:?}", event);
gst::log!(CAT, obj = pad, "Didn't handle {:?}", event);
}
ret
}
fn src_query(self, pad: &gst::Pad, imp: &UdpSrc, query: &mut gst::QueryRef) -> bool {
gst::log!(CAT, obj: pad, "Handling {:?}", query);
gst::log!(CAT, obj = pad, "Handling {:?}", query);
use gst::QueryViewMut;
let ret = match query.view_mut() {
@ -167,9 +178,9 @@ impl PadSrcHandler for UdpSrcPadHandler {
};
if ret {
gst::log!(CAT, obj: pad, "Handled {:?}", query);
gst::log!(CAT, obj = pad, "Handled {:?}", query);
} else {
gst::log!(CAT, obj: pad, "Didn't handle {:?}", query);
gst::log!(CAT, obj = pad, "Didn't handle {:?}", query);
}
ret
@ -182,16 +193,18 @@ struct UdpSrcTask {
retrieve_sender_address: bool,
need_initial_events: bool,
need_segment: bool,
event_receiver: Receiver<gst::Event>,
}
impl UdpSrcTask {
fn new(element: super::UdpSrc) -> Self {
fn new(element: super::UdpSrc, event_receiver: Receiver<gst::Event>) -> Self {
UdpSrcTask {
element,
socket: None,
retrieve_sender_address: DEFAULT_RETRIEVE_SENDER_ADDRESS,
need_initial_events: true,
need_segment: true,
event_receiver,
}
}
}
@ -204,7 +217,7 @@ impl TaskImpl for UdpSrcTask {
let udpsrc = self.element.imp();
let mut settings = udpsrc.settings.lock().unwrap();
gst::debug!(CAT, obj: self.element, "Preparing Task");
gst::debug!(CAT, obj = self.element, "Preparing Task");
self.retrieve_sender_address = settings.retrieve_sender_address;
@ -250,7 +263,7 @@ impl TaskImpl for UdpSrcTask {
};
let port = settings.port;
// TODO: TTL, multicast loopback, etc
// TODO: TTL etc
let saddr = if addr.is_multicast() {
let bind_addr = if addr.is_ipv4() {
IpAddr::V4(Ipv4Addr::UNSPECIFIED)
@ -261,7 +274,7 @@ impl TaskImpl for UdpSrcTask {
let saddr = SocketAddr::new(bind_addr, port as u16);
gst::debug!(
CAT,
obj: self.element,
obj = self.element,
"Binding to {:?} for multicast group {:?}",
saddr,
addr
@ -270,7 +283,7 @@ impl TaskImpl for UdpSrcTask {
saddr
} else {
let saddr = SocketAddr::new(addr, port as u16);
gst::debug!(CAT, obj: self.element, "Binding to {:?}", saddr);
gst::debug!(CAT, obj = self.element, "Binding to {:?}", saddr);
saddr
};
@ -302,6 +315,29 @@ impl TaskImpl for UdpSrcTask {
)
})?;
gst::debug!(
CAT,
obj = self.element,
"socket recv buffer size is {:?}",
socket.recv_buffer_size()
);
if settings.buffer_size != 0 {
gst::debug!(
CAT,
obj = self.element,
"changing the socket recv buffer size to {}",
settings.buffer_size
);
socket
.set_recv_buffer_size(settings.buffer_size as usize)
.map_err(|err| {
gst::error_msg!(
gst::ResourceError::OpenRead,
["Failed to set buffer_size: {}", err]
)
})?;
}
#[cfg(unix)]
{
socket.set_reuse_port(settings.reuse).map_err(|err| {
@ -339,6 +375,20 @@ impl TaskImpl for UdpSrcTask {
["Failed to join multicast group: {}", err]
)
})?;
socket
.as_ref()
.set_multicast_loop_v4(settings.multicast_loop)
.map_err(|err| {
gst::error_msg!(
gst::ResourceError::OpenWrite,
[
"Failed to set multicast loop to {}: {}",
settings.multicast_loop,
err
]
)
})?;
}
IpAddr::V6(addr) => {
socket.as_ref().join_multicast_v6(&addr, 0).map_err(|err| {
@ -347,6 +397,20 @@ impl TaskImpl for UdpSrcTask {
["Failed to join multicast group: {}", err]
)
})?;
socket
.as_ref()
.set_multicast_loop_v6(settings.multicast_loop)
.map_err(|err| {
gst::error_msg!(
gst::ResourceError::OpenWrite,
[
"Failed to set multicast loop to {}: {}",
settings.multicast_loop,
err
]
)
})?;
}
}
}
@ -400,7 +464,7 @@ impl TaskImpl for UdpSrcTask {
fn unprepare(&mut self) -> BoxFuture<'_, ()> {
async move {
gst::debug!(CAT, obj: self.element, "Unpreparing Task");
gst::debug!(CAT, obj = self.element, "Unpreparing Task");
let udpsrc = self.element.imp();
udpsrc.settings.lock().unwrap().used_socket = None;
self.element.notify("used-socket");
@ -410,12 +474,12 @@ impl TaskImpl for UdpSrcTask {
fn start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Starting task");
gst::log!(CAT, obj = self.element, "Starting task");
self.socket
.as_mut()
.unwrap()
.set_clock(self.element.clock(), self.element.base_time());
gst::log!(CAT, obj: self.element, "Task started");
gst::log!(CAT, obj = self.element, "Task started");
Ok(())
}
.boxed()
@ -423,55 +487,80 @@ impl TaskImpl for UdpSrcTask {
fn try_next(&mut self) -> BoxFuture<'_, Result<gst::Buffer, gst::FlowError>> {
async move {
self.socket
.as_mut()
.unwrap()
.try_next()
.await
.map(|(mut buffer, saddr)| {
if let Some(saddr) = saddr {
if self.retrieve_sender_address {
NetAddressMeta::add(
buffer.get_mut().unwrap(),
&gio::InetSocketAddress::from(saddr),
);
let event_fut = self.event_receiver.next().fuse();
let socket_fut = self.socket.as_mut().unwrap().try_next().fuse();
pin_mut!(event_fut);
pin_mut!(socket_fut);
futures::select! {
event_res = event_fut => match event_res {
Some(event) => {
gst::debug!(CAT, obj = self.element, "Handling element level event {event:?}");
match event.view() {
gst::EventView::Eos(_) => Err(gst::FlowError::Eos),
ev => {
gst::error!(CAT, obj = self.element, "Unexpected event {ev:?} on channel");
Err(gst::FlowError::Error)
}
}
}
buffer
})
.map_err(|err| {
gst::error!(CAT, obj: self.element, "Got error {:?}", err);
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {}", err]
);
}
None => {
gst::error!(CAT, obj = self.element, "Unexpected return on event channel");
Err(gst::FlowError::Error)
}
gst::FlowError::Error
})
},
socket_res = socket_fut => match socket_res {
Ok((mut buffer, saddr)) => {
if let Some(saddr) = saddr {
if self.retrieve_sender_address {
NetAddressMeta::add(
buffer.get_mut().unwrap(),
&gio::InetSocketAddress::from(saddr),
);
}
}
Ok(buffer)
},
Err(err) => {
gst::error!(CAT, obj = self.element, "Got error {err:#}");
match err {
SocketError::Gst(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {err}"]
);
}
SocketError::Io(err) => {
gst::element_error!(
self.element,
gst::StreamError::Failed,
("I/O error"),
["streaming stopped, I/O error {err}"]
);
}
}
Err(gst::FlowError::Error)
}
},
}
}
.boxed()
}
fn handle_item(&mut self, buffer: gst::Buffer) -> BoxFuture<'_, Result<(), gst::FlowError>> {
async {
gst::log!(CAT, obj: self.element, "Handling {:?}", buffer);
gst::log!(CAT, obj = self.element, "Handling {:?}", buffer);
let udpsrc = self.element.imp();
if self.need_initial_events {
gst::debug!(CAT, obj: self.element, "Pushing initial events");
gst::debug!(CAT, obj = self.element, "Pushing initial events");
let stream_id =
format!("{:08x}{:08x}", rand::random::<u32>(), rand::random::<u32>());
@ -502,14 +591,14 @@ impl TaskImpl for UdpSrcTask {
let res = udpsrc.src_pad.push(buffer).await.map(drop);
match res {
Ok(_) => gst::log!(CAT, obj: self.element, "Successfully pushed buffer"),
Err(gst::FlowError::Flushing) => gst::debug!(CAT, obj: self.element, "Flushing"),
Ok(_) => gst::log!(CAT, obj = self.element, "Successfully pushed buffer"),
Err(gst::FlowError::Flushing) => gst::debug!(CAT, obj = self.element, "Flushing"),
Err(gst::FlowError::Eos) => {
gst::debug!(CAT, obj: self.element, "EOS");
gst::debug!(CAT, obj = self.element, "EOS");
udpsrc.src_pad.push_event(gst::event::Eos::new()).await;
}
Err(err) => {
gst::error!(CAT, obj: self.element, "Got error {}", err);
gst::error!(CAT, obj = self.element, "Got error {}", err);
gst::element_error!(
self.element,
gst::StreamError::Failed,
@ -526,10 +615,10 @@ impl TaskImpl for UdpSrcTask {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Stopping task");
gst::log!(CAT, obj = self.element, "Stopping task");
self.need_initial_events = true;
self.need_segment = true;
gst::log!(CAT, obj: self.element, "Task stopped");
gst::log!(CAT, obj = self.element, "Task stopped");
Ok(())
}
.boxed()
@ -537,13 +626,47 @@ impl TaskImpl for UdpSrcTask {
fn flush_stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(CAT, obj: self.element, "Stopping task flush");
gst::log!(CAT, obj = self.element, "Stopping task flush");
self.need_segment = true;
gst::log!(CAT, obj: self.element, "Stopped task flush");
gst::log!(CAT, obj = self.element, "Stopped task flush");
Ok(())
}
.boxed()
}
fn handle_loop_error(&mut self, err: gst::FlowError) -> BoxFuture<'_, task::Trigger> {
async move {
match err {
gst::FlowError::Flushing => {
gst::debug!(CAT, obj = self.element, "Flushing");
task::Trigger::FlushStart
}
gst::FlowError::Eos => {
gst::debug!(CAT, obj = self.element, "EOS");
self.element
.imp()
.src_pad
.push_event(gst::event::Eos::new())
.await;
task::Trigger::Stop
}
err => {
gst::error!(CAT, obj = self.element, "Got error {err}");
gst::element_error!(
&self.element,
gst::StreamError::Failed,
("Internal data stream error"),
["streaming stopped, reason {}", err]
);
task::Trigger::Error
}
}
}
.boxed()
}
}
pub struct UdpSrc {
@ -551,6 +674,7 @@ pub struct UdpSrc {
task: Task,
configured_caps: Mutex<Option<gst::Caps>>,
settings: Mutex<Settings>,
state: Mutex<State>,
}
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
@ -563,7 +687,7 @@ static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
impl UdpSrc {
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Preparing");
gst::debug!(CAT, imp = self, "Preparing");
let settings = self.settings.lock().unwrap();
let context =
@ -575,42 +699,52 @@ impl UdpSrc {
})?;
drop(settings);
let (sender, receiver) = channel(1);
*self.configured_caps.lock().unwrap() = None;
self.task
.prepare(UdpSrcTask::new(self.obj().clone()), context)
.prepare(UdpSrcTask::new(self.obj().clone(), receiver), context)
.block_on()?;
gst::debug!(CAT, imp: self, "Prepared");
let mut state = self.state.lock().unwrap();
state.event_sender = Some(sender);
drop(state);
gst::debug!(CAT, imp = self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(CAT, imp: self, "Unpreparing");
gst::debug!(CAT, imp = self, "Unpreparing");
self.task.unprepare().block_on().unwrap();
gst::debug!(CAT, imp: self, "Unprepared");
gst::debug!(CAT, imp = self, "Unprepared");
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Stopping");
gst::debug!(CAT, imp = self, "Stopping");
self.task.stop().block_on()?;
gst::debug!(CAT, imp: self, "Stopped");
gst::debug!(CAT, imp = self, "Stopped");
Ok(())
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Starting");
gst::debug!(CAT, imp = self, "Starting");
self.task.start().block_on()?;
gst::debug!(CAT, imp: self, "Started");
gst::debug!(CAT, imp = self, "Started");
Ok(())
}
fn pause(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(CAT, imp: self, "Pausing");
gst::debug!(CAT, imp = self, "Pausing");
self.task.pause().block_on()?;
gst::debug!(CAT, imp: self, "Paused");
gst::debug!(CAT, imp = self, "Paused");
Ok(())
}
fn state(&self) -> TaskState {
self.task.state()
}
}
#[glib::object_subclass]
@ -628,6 +762,7 @@ impl ObjectSubclass for UdpSrc {
task: Task::default(),
configured_caps: Default::default(),
settings: Default::default(),
state: Default::default(),
}
}
}
@ -679,6 +814,18 @@ impl ObjectImpl for UdpSrc {
.blurb("Whether to retrieve the sender address and add it to buffers as meta. Disabling this might result in minor performance improvements in certain scenarios")
.default_value(DEFAULT_RETRIEVE_SENDER_ADDRESS)
.build(),
glib::ParamSpecBoolean::builder("loop")
.nick("Loop")
.blurb("Set the multicast loop parameter")
.default_value(DEFAULT_MULTICAST_LOOP)
.build(),
glib::ParamSpecUInt::builder("buffer-size")
.nick("Buffer Size")
.blurb("Size of the kernel receive buffer in bytes, 0=default")
.maximum(u32::MAX)
.default_value(DEFAULT_BUFFER_SIZE)
.build(),
];
#[cfg(not(windows))]
@ -745,6 +892,12 @@ impl ObjectImpl for UdpSrc {
"retrieve-sender-address" => {
settings.retrieve_sender_address = value.get().expect("type checked upstream");
}
"loop" => {
settings.multicast_loop = value.get().expect("type checked upstream");
}
"buffer-size" => {
settings.buffer_size = value.get().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
@ -770,6 +923,8 @@ impl ObjectImpl for UdpSrc {
"context" => settings.context.to_value(),
"context-wait" => (settings.context_wait.as_millis() as u32).to_value(),
"retrieve-sender-address" => settings.retrieve_sender_address.to_value(),
"loop" => settings.multicast_loop.to_value(),
"buffer-size" => settings.buffer_size.to_value(),
_ => unimplemented!(),
}
}
@ -820,7 +975,7 @@ impl ElementImpl for UdpSrc {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::trace!(CAT, imp: self, "Changing state {:?}", transition);
gst::trace!(CAT, imp = self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {
@ -858,4 +1013,31 @@ impl ElementImpl for UdpSrc {
Ok(success)
}
fn send_event(&self, event: gst::Event) -> bool {
use gst::EventView;
gst::debug!(CAT, imp = self, "Handling element level event {event:?}");
match event.view() {
EventView::Eos(_) => {
if self.state() != TaskState::Started {
if let Err(err) = self.start() {
gst::error!(CAT, imp = self, "Failed to start task thread {err:?}");
}
}
if self.state() == TaskState::Started {
let mut state = self.state.lock().unwrap();
if let Some(event_tx) = state.event_sender.as_mut() {
return event_tx.try_send(event.clone()).is_ok();
}
}
false
}
_ => self.parent_send_event(event),
}
}
}

View file

@ -88,7 +88,7 @@ mod imp_src {
type ElementImpl = ElementSrcTest;
fn src_event(self, pad: &gst::Pad, imp: &ElementSrcTest, event: gst::Event) -> bool {
gst::log!(SRC_CAT, obj: pad, "Handling {:?}", event);
gst::log!(SRC_CAT, obj = pad, "Handling {:?}", event);
let ret = match event.view() {
EventView::FlushStart(..) => {
@ -100,9 +100,9 @@ mod imp_src {
};
if ret {
gst::log!(SRC_CAT, obj: pad, "Handled {:?}", event);
gst::log!(SRC_CAT, obj = pad, "Handled {:?}", event);
} else {
gst::log!(SRC_CAT, obj: pad, "Didn't handle {:?}", event);
gst::log!(SRC_CAT, obj = pad, "Didn't handle {:?}", event);
}
ret
@ -127,7 +127,7 @@ mod imp_src {
while let Ok(Some(_item)) = self.receiver.try_next() {}
}
async fn push_item(&self, item: Item) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::debug!(SRC_CAT, obj: self.element, "Handling {:?}", item);
gst::debug!(SRC_CAT, obj = self.element, "Handling {:?}", item);
let elementsrctest = self.element.imp();
match item {
@ -148,7 +148,7 @@ mod imp_src {
fn try_next(&mut self) -> BoxFuture<'_, Result<Item, gst::FlowError>> {
async move {
self.receiver.next().await.ok_or_else(|| {
gst::log!(SRC_CAT, obj: self.element, "SrcPad channel aborted");
gst::log!(SRC_CAT, obj = self.element, "SrcPad channel aborted");
gst::FlowError::Eos
})
}
@ -159,9 +159,9 @@ mod imp_src {
async move {
let res = self.push_item(item).await.map(drop);
match res {
Ok(_) => gst::log!(SRC_CAT, obj: self.element, "Successfully pushed item"),
Ok(_) => gst::log!(SRC_CAT, obj = self.element, "Successfully pushed item"),
Err(gst::FlowError::Flushing) => {
gst::debug!(SRC_CAT, obj: self.element, "Flushing")
gst::debug!(SRC_CAT, obj = self.element, "Flushing")
}
Err(err) => panic!("Got error {err}"),
}
@ -173,9 +173,9 @@ mod imp_src {
fn stop(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(SRC_CAT, obj: self.element, "Stopping task");
gst::log!(SRC_CAT, obj = self.element, "Stopping task");
self.flush();
gst::log!(SRC_CAT, obj: self.element, "Task stopped");
gst::log!(SRC_CAT, obj = self.element, "Task stopped");
Ok(())
}
.boxed()
@ -183,9 +183,9 @@ mod imp_src {
fn flush_start(&mut self) -> BoxFuture<'_, Result<(), gst::ErrorMessage>> {
async move {
gst::log!(SRC_CAT, obj: self.element, "Starting task flush");
gst::log!(SRC_CAT, obj = self.element, "Starting task flush");
self.flush();
gst::log!(SRC_CAT, obj: self.element, "Task flush started");
gst::log!(SRC_CAT, obj = self.element, "Task flush started");
Ok(())
}
.boxed()
@ -219,7 +219,7 @@ mod imp_src {
}
fn prepare(&self) -> Result<(), gst::ErrorMessage> {
gst::debug!(SRC_CAT, imp: self, "Preparing");
gst::debug!(SRC_CAT, imp = self, "Preparing");
let settings = self.settings.lock().unwrap().clone();
let context =
@ -240,36 +240,36 @@ mod imp_src {
)
.block_on()?;
gst::debug!(SRC_CAT, imp: self, "Prepared");
gst::debug!(SRC_CAT, imp = self, "Prepared");
Ok(())
}
fn unprepare(&self) {
gst::debug!(SRC_CAT, imp: self, "Unpreparing");
gst::debug!(SRC_CAT, imp = self, "Unpreparing");
*self.sender.lock().unwrap() = None;
self.task.unprepare().block_on().unwrap();
gst::debug!(SRC_CAT, imp: self, "Unprepared");
gst::debug!(SRC_CAT, imp = self, "Unprepared");
}
fn stop(&self) {
gst::debug!(SRC_CAT, imp: self, "Stopping");
gst::debug!(SRC_CAT, imp = self, "Stopping");
self.task.stop().await_maybe_on_context().unwrap();
gst::debug!(SRC_CAT, imp: self, "Stopped");
gst::debug!(SRC_CAT, imp = self, "Stopped");
}
fn start(&self) {
gst::debug!(SRC_CAT, imp: self, "Starting");
gst::debug!(SRC_CAT, imp = self, "Starting");
self.task.start().await_maybe_on_context().unwrap();
gst::debug!(SRC_CAT, imp: self, "Started");
gst::debug!(SRC_CAT, imp = self, "Started");
}
fn pause(&self) {
gst::debug!(SRC_CAT, imp: self, "Pausing");
gst::debug!(SRC_CAT, imp = self, "Pausing");
self.task.pause().block_on().unwrap();
gst::debug!(SRC_CAT, imp: self, "Paused");
gst::debug!(SRC_CAT, imp = self, "Paused");
}
}
@ -366,7 +366,7 @@ mod imp_src {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::log!(SRC_CAT, imp: self, "Changing state {:?}", transition);
gst::log!(SRC_CAT, imp = self, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => {
@ -464,7 +464,7 @@ mod imp_sink {
}
fn sink_event(self, pad: &gst::Pad, imp: &ElementSinkTest, event: gst::Event) -> bool {
gst::debug!(SINK_CAT, obj: pad, "Handling non-serialized {:?}", event);
gst::debug!(SINK_CAT, obj = pad, "Handling non-serialized {:?}", event);
match event.view() {
EventView::FlushStart(..) => {
@ -482,7 +482,7 @@ mod imp_sink {
event: gst::Event,
) -> BoxFuture<'static, bool> {
async move {
gst::log!(SINK_CAT, obj: pad, "Handling serialized {:?}", event);
gst::log!(SINK_CAT, obj = pad, "Handling serialized {:?}", event);
let imp = elem.imp();
if let EventView::FlushStop(..) = event.view() {
@ -505,7 +505,7 @@ mod imp_sink {
impl ElementSinkTest {
async fn forward_item(&self, item: Item) -> Result<gst::FlowSuccess, gst::FlowError> {
if !self.flushing.load(Ordering::SeqCst) {
gst::debug!(SINK_CAT, imp: self, "Forwarding {:?}", item);
gst::debug!(SINK_CAT, imp = self, "Forwarding {:?}", item);
let mut sender = self
.sender
.lock()
@ -521,7 +521,7 @@ mod imp_sink {
} else {
gst::debug!(
SINK_CAT,
imp: self,
imp = self,
"Not forwarding {:?} due to flushing",
item
);
@ -530,31 +530,31 @@ mod imp_sink {
}
fn start(&self) {
gst::debug!(SINK_CAT, imp: self, "Starting");
gst::debug!(SINK_CAT, imp = self, "Starting");
self.flushing.store(false, Ordering::SeqCst);
gst::debug!(SINK_CAT, imp: self, "Started");
gst::debug!(SINK_CAT, imp = self, "Started");
}
fn stop(&self) {
gst::debug!(SINK_CAT, imp: self, "Stopping");
gst::debug!(SINK_CAT, imp = self, "Stopping");
self.flushing.store(true, Ordering::SeqCst);
gst::debug!(SINK_CAT, imp: self, "Stopped");
gst::debug!(SINK_CAT, imp = self, "Stopped");
}
pub fn push_flush_start(&self) {
gst::debug!(SINK_CAT, imp: self, "Pushing FlushStart");
gst::debug!(SINK_CAT, imp = self, "Pushing FlushStart");
self.sink_pad
.gst_pad()
.push_event(gst::event::FlushStart::new());
gst::debug!(SINK_CAT, imp: self, "FlushStart pushed");
gst::debug!(SINK_CAT, imp = self, "FlushStart pushed");
}
pub fn push_flush_stop(&self) {
gst::debug!(SINK_CAT, imp: self, "Pushing FlushStop");
gst::debug!(SINK_CAT, imp = self, "Pushing FlushStop");
self.sink_pad
.gst_pad()
.push_event(gst::event::FlushStop::new(true));
gst::debug!(SINK_CAT, imp: self, "FlushStop pushed");
gst::debug!(SINK_CAT, imp = self, "FlushStop pushed");
}
}
@ -657,7 +657,7 @@ mod imp_sink {
&self,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst::log!(SINK_CAT, imp: self, "Changing state {:?}", transition);
gst::log!(SINK_CAT, imp = self, "Changing state {:?}", transition);
if let gst::StateChange::PausedToReady = transition {
self.stop();

View file

@ -218,7 +218,7 @@ fn multiple_contexts_proxy() {
.name(format!("proxysrc-{pipeline_index}").as_str())
.property(
"context",
&format!("context-{}", (pipeline_index as u32) % CONTEXT_NB),
format!("context-{}", (pipeline_index as u32) % CONTEXT_NB),
)
.property("proxy-context", format!("proxy-{pipeline_index}"))
.build()
@ -364,7 +364,7 @@ fn eos() {
sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |appsink| {
gst::debug!(CAT, obj: appsink, "eos: pulling sample");
gst::debug!(CAT, obj = appsink, "eos: pulling sample");
let _ = appsink.pull_sample().unwrap();
sample_notifier.send(()).unwrap();
@ -376,7 +376,7 @@ fn eos() {
);
fn push_buffer(src: &gst::Element) -> bool {
gst::debug!(CAT, obj: src, "eos: pushing buffer");
gst::debug!(CAT, obj = src, "eos: pushing buffer");
src.emit_by_name::<bool>("push-buffer", &[&gst::Buffer::from_slice(vec![0; 1024])])
}
@ -498,7 +498,7 @@ fn premature_shutdown() {
sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |appsink| {
gst::debug!(CAT, obj: appsink, "premature_shutdown: pulling sample");
gst::debug!(CAT, obj = appsink, "premature_shutdown: pulling sample");
let _sample = appsink.pull_sample().unwrap();
appsink_sender.send(()).unwrap();
@ -511,7 +511,7 @@ fn premature_shutdown() {
fn push_buffer(src: &gst::Element, intent: &str) -> bool {
gst::debug!(
CAT,
obj: src,
obj = src,
"premature_shutdown: pushing buffer {}",
intent
);
@ -609,6 +609,8 @@ fn premature_shutdown() {
}
#[test]
// FIXME: racy: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/250
#[ignore]
fn socket_play_null_play() {
use gio::{
prelude::SocketExt, InetAddress, InetSocketAddress, SocketFamily, SocketProtocol,

View file

@ -76,6 +76,8 @@ fn test_client_management() {
}
#[test]
// FIXME: racy: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs/-/issues/250
#[ignore]
fn test_chain() {
init();

View file

@ -1,7 +1,7 @@
project('gst-plugins-rs',
'rust',
'c',
version: '0.12.0-alpha.1',
version: '0.14.0-alpha.1',
meson_version : '>= 1.1')
# dependencies.py needs a toml parsing module
@ -86,7 +86,7 @@ if get_option('tests').allowed()
deps += [['gstreamer-check-1.0', 'gstreamer', 'gst_check_dep', 'gst_check']]
endif
if get_option('gtk4').allowed()
deps += [['gstreamer-gl-1.0', 'gst-plugins-base', 'gst_gl_dep', 'gstgl']]
deps += [['gstreamer-gl-1.0', 'gst-plugins-base', 'gstgl_dep', 'gstgl', get_option('gtk4')]]
endif
if get_option('threadshare').allowed() or get_option('rtsp').allowed()
deps += [['gstreamer-net-1.0', 'gstreamer', 'gst_net_dep', 'gst_net']]
@ -97,7 +97,7 @@ deps_cache += {'glib-2.0': glib_dep}
foreach d: deps
dep = dependency(d[0], version: gst_req,
fallback : [d[1], d[2]])
fallback : [d[1], d[2]], required: d.get(4, true))
set_variable(d[2], dep)
deps_cache += {d[0]: dep}
if dep.type_name() == 'internal'
@ -118,6 +118,7 @@ plugins = {
'spotify': {'library': 'libgstspotify'},
'file': {'library': 'libgstrsfile'},
'originalbuffer': {'library': 'libgstoriginalbuffer'},
# sodium can have an external dependency, see below
'threadshare': {
'library': 'libgstthreadshare',
@ -144,6 +145,7 @@ plugins = {
'library': 'libgstaws',
'extra-deps': {'openssl': ['>=1.1']},
},
'mpegtslive': {'library': 'libgstmpegtslive'},
'hlssink3': {'library': 'libgsthlssink3'},
'ndi': {'library': 'libgstndi'},
'onvif': {
@ -170,6 +172,7 @@ plugins = {
'library': 'libgsturiplaylistbin',
'examples': ['playlist'],
'features': ['clap'],
'gst-version': '>=1.23.90',
},
'cdg': {'library': 'libgstcdg'},
@ -183,7 +186,7 @@ plugins = {
},
'dav1d': {
'library': 'libgstdav1d',
'extra-deps': {'dav1d': ['>=1.0', '<1.3']},
'extra-deps': {'dav1d': ['>=1.3']},
},
'ffv1': {'library': 'libgstffv1'},
'flavors': {'library': 'libgstrsflv'},
@ -202,34 +205,11 @@ plugins = {
'library': 'libgstrsvideofx',
'extra-deps': {'cairo-gobject': []},
},
'gopbuffer': {'library': 'libgstgopbuffer'},
'quinn': {'library': 'libgstquinn'},
'speechmatics': {'library': 'libgstspeechmatics'},
}
if get_option('examples').allowed()
plugins += {
'fallbackswitch': {
'library': 'libgstfallbackswitch',
'examples': ['gtk-fallbackswitch'],
'features': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
'livesync': {
'library': 'libgstlivesync',
'examples': ['gtk-livesync'],
'features': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
'togglerecord': {
'library': 'libgsttogglerecord',
'examples': ['gtk-recording'],
'features': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
}
else
plugins += {
'fallbackswitch': { 'library': 'libgstfallbackswitch'},
'livesync': { 'library': 'libgstlivesync'},
'togglerecord': { 'library': 'libgsttogglerecord'},
}
endif
# Won't build on platforms where it bundles the sources because of:
# https://github.com/qnighy/libwebp-sys2-rs/issues/12
# the fix is:
@ -284,8 +264,8 @@ endif
if get_option('gtk4').allowed()
gtk4_features = []
gl_winsys = gst_gl_dep.get_variable('gl_winsys').split()
gl_platforms = gst_gl_dep.get_variable('gl_platforms').split()
gl_winsys = gstgl_dep.get_variable('gl_winsys').split()
gl_platforms = gstgl_dep.get_variable('gl_platforms').split()
if 'wayland' in gl_winsys
gtk4_features += 'wayland'
endif
@ -301,13 +281,61 @@ if get_option('gtk4').allowed()
gtk4_features += 'winegl'
endif
endif
gst_allocators_dep = dependency('gstreamer-allocators-1.0', version: '>=1.24', required: false)
gtk_dep = dependency('gtk4', version: '>=4.6', required: get_option('gtk4'))
if gtk_dep.found()
if host_system == 'linux' and gtk_dep.version().version_compare('>=4.14') and \
gst_allocators_dep.found() and 'wayland' in gtk4_features
gtk4_features += 'dmabuf'
endif
if gtk_dep.version().version_compare('>=4.14')
gtk4_features += 'gtk_v4_14'
elif gtk_dep.version().version_compare('>=4.12')
gtk4_features += 'gtk_v4_12'
elif gtk_dep.version().version_compare('>=4.10')
gtk4_features += 'gtk_v4_10'
endif
plugins += {
'gtk4': {
'library': 'libgstgtk4',
'examples': ['gtksink'],
'extra-deps': {'gtk4': ['>=4.6']},
'features': gtk4_features,
},
}
endif
endif
examples_opt = get_option('examples')
if examples_opt.allowed() and 'gtk4' in plugins
plugins += {
'gtk4': {
'library': 'libgstgtk4',
'examples': ['gtksink'],
'extra-deps': {'gtk4': ['>=4.6']},
'features': gtk4_features,
'fallbackswitch': {
'library': 'libgstfallbackswitch',
'examples_features': {
'gtk-fallbackswitch': ['gtk', 'gio', 'gst-plugin-gtk4'],
},
},
'livesync': {
'library': 'libgstlivesync',
'examples_features': {
'gtk-livesync': ['gtk', 'gio', 'gst-plugin-gtk4'],
}
},
'togglerecord': {
'library': 'libgsttogglerecord',
'examples_features': {
'gtk-recording': ['gtk', 'gio', 'gst-plugin-gtk4'],
}
},
}
else
plugins += {
'fallbackswitch': { 'library': 'libgstfallbackswitch'},
'livesync': { 'library': 'libgstlivesync'},
'togglerecord': { 'library': 'libgsttogglerecord'},
}
endif
@ -373,51 +401,107 @@ endif
foreach plugin_name, details: plugins
plugin_opt = get_variable(f'@plugin_name@_option', get_option(plugin_name))
if plugin_opt.allowed()
plugin_deps_found = true
foreach dep_name, dep_ver: details.get('extra-deps', {})
if not plugin_opt.allowed()
debug(f'@plugin_name@ is disabled')
continue
endif
plugin_deps_found = true
# Check whether we have all needed deps
foreach dep_name, dep_ver: details.get('extra-deps', {})
if dep_ver.length() != 0
dep = dependency(dep_name, version: dep_ver, required: plugin_opt)
else
dep = dependency(dep_name, required: plugin_opt)
endif
deps_cache += {dep_name: dep}
if not dep.found()
if dep_ver.length() != 0
dep = dependency(dep_name, version: dep_ver, required: plugin_opt)
dep_ver_msg = ' '.join(dep_ver)
debug(f'@plugin_name@ dependency @dep_name@ @dep_ver_msg@ not found, skipping')
else
dep = dependency(dep_name, required: plugin_opt)
debug(f'@plugin_name@ dependency @dep_name@ not found, skipping')
endif
deps_cache += {dep_name: dep}
if not dep.found()
plugin_deps_found = false
plugin_deps_found = false
break
endif
endforeach
if not plugin_deps_found
continue
endif
# Validate gst-plugin features
plugin_features = details.get('features', [])
foreach feature: plugin_features
if feature.startswith('gst-plugin') and not packages.contains(feature)
msg = f'@plugin_name@ required feature @feature@ not found'
if plugin_opt.enabled()
error(msg)
endif
message(msg + ', skipping')
plugin_deps_found = false
break
endif
endforeach
if not plugin_deps_found
continue
endif
# Check if we have the required GStreamer version
if details.has_key('gst-version') and not \
deps_cache['gstreamer-1.0'].version().version_compare(details['gst-version'])
msg = '@0@ requires gstreamer version @1@'.format(plugin_name, details['gst-version'])
if plugin_opt.enabled()
error(msg)
endif
message(msg + ', skipping')
continue
endif
# Parse and enable examples
plugin_examples = details.get('examples', [])
foreach example: plugin_examples
examples += example
endforeach
plugin_examples_features = details.get('examples_features', {})
foreach example, examples_features: plugin_examples_features
example_deps_found = true
foreach feature: examples_features
if feature.startswith('gst-plugin') and not packages.contains(feature)
msg = f'@plugin_name@ example @example@ required feature @feature@ not found'
if plugin_opt.enabled() and examples_opt.enabled()
error(msg)
endif
message(msg + ', skipping')
example_deps_found = false
break
endif
endforeach
plugin_features = details.get('features', [])
if plugin_deps_found
# Validate gst-plugin features
foreach feature: plugin_features
if feature.startswith('gst-plugin') and not packages.contains(feature)
plugin_deps_found = false
break
endif
endforeach
features += examples_features
if example_deps_found
examples += example
endif
if plugin_deps_found
packages += f'gst-plugin-@plugin_name@'
features += plugin_features
extra_features = run_command('dependencies.py', meson.current_source_dir(), plugin_name,
'--feature', '--gst-version', gst_dep.version(), capture: true, check: true).stdout().strip()
if extra_features != ''
features += extra_features.split(',')
endif
endforeach
lib = details.get('library')
# No 'lib' suffix with MSVC
if cc.get_argument_syntax() == 'msvc'
lib = lib.substring(3)
endif
if default_library in ['shared', 'both']
output += [lib + '.' + ext_dynamic]
endif
if default_library in ['static', 'both']
output += [lib + '.' + ext_static]
endif
endif
packages += f'gst-plugin-@plugin_name@'
features += plugin_features
extra_features = run_command('dependencies.py', meson.current_source_dir(), plugin_name,
'--feature', '--gst-version', gst_dep.version(), capture: true, check: true).stdout().strip()
if extra_features != ''
features += extra_features.split(',')
endif
lib = details.get('library')
# No 'lib' suffix with MSVC
if cc.get_argument_syntax() == 'msvc'
lib = lib.substring(3)
endif
if default_library in ['shared', 'both']
output += [lib + '.' + ext_dynamic]
endif
if default_library in ['static', 'both']
output += [lib + '.' + ext_static]
endif
endforeach
@ -489,6 +573,16 @@ foreach plugin : plugins
plugin_name = plugin_name.substring(3)
endif
plugin_display_name = plugin_name
if plugin_name.startswith('gst')
plugin_display_name = plugin_name.substring(3)
endif
if plugin_display_name in plugin_names
# When default_library=both plugins are duplicated.
continue
endif
plugin_names += plugin_display_name
option_name = plugin_name.substring(3)
if option_name.startswith('rs')
option_name = option_name.substring(2)
@ -533,13 +627,7 @@ foreach plugin : plugins
warning('Static plugin @0@ is known to fail. It will not be included in libgstreamer-full.'.format(plugin_name))
else
gst_plugins += dep
pc_files += [plugin_name + '.pc']
if plugin_name.startswith('gst')
plugin_names += [plugin_name.substring(3)]
else
plugin_names += [plugin_name]
endif
endif
endforeach

View file

@ -6,9 +6,12 @@ option('claxon', type: 'feature', value: 'auto', description: 'Build claxon plug
option('csound', type: 'feature', value: 'auto', description: 'Build csound plugin')
option('lewton', type: 'feature', value: 'auto', description: 'Build lewton plugin')
option('spotify', type: 'feature', value: 'auto', description: 'Build spotify plugin')
option('speechmatics', type: 'feature', value: 'auto', description: 'Build speechmatics plugin')
# generic
option('file', type: 'feature', value: 'auto', description: 'Build file plugin')
option('originalbuffer', type: 'feature', value: 'auto', description: 'Build originalbuffer plugin')
option('gopbuffer', type: 'feature', value: 'auto', description: 'Build gopbuffer plugin')
option('sodium', type: 'feature', value: 'auto', description: 'Build sodium plugin')
option('sodium-source', type: 'combo',
choices: ['system', 'built-in'], value: 'built-in',
@ -24,6 +27,7 @@ option('mp4', type: 'feature', value: 'auto', description: 'Build mp4 plugin')
# net
option('aws', type: 'feature', value: 'auto', description: 'Build aws plugin')
option('hlssink3', type: 'feature', value: 'auto', description: 'Build hlssink3 plugin')
option('mpegtslive', type: 'feature', value: 'auto', description: 'Build mpegtslive plugin')
option('ndi', type: 'feature', value: 'auto', description: 'Build ndi plugin')
option('onvif', type: 'feature', value: 'auto', description: 'Build onvif plugin')
option('raptorq', type: 'feature', value: 'auto', description: 'Build raptorq plugin')
@ -32,6 +36,7 @@ option('rtsp', type: 'feature', value: 'auto', description: 'Build rtsp plugin')
option('rtp', type: 'feature', value: 'auto', description: 'Build rtp plugin')
option('webrtc', type: 'feature', value: 'auto', yield: true, description: 'Build webrtc plugin')
option('webrtchttp', type: 'feature', value: 'auto', description: 'Build webrtchttp plugin')
option('quinn', type: 'feature', value: 'auto', description: 'Build quinn plugin')
# text
option('textahead', type: 'feature', value: 'auto', description: 'Build textahead plugin')

View file

@ -11,6 +11,7 @@
pub use byteorder::{BigEndian, LittleEndian, ReadBytesExt, WriteBytesExt};
use std::io;
#[allow(unused)]
pub trait ReadBytesExtShort: io::Read {
fn read_u16le(&mut self) -> io::Result<u16> {
self.read_u16::<LittleEndian>()
@ -76,6 +77,7 @@ pub trait ReadBytesExtShort: io::Read {
impl<T> ReadBytesExtShort for T where T: ReadBytesExt {}
#[allow(unused)]
pub trait WriteBytesExtShort: WriteBytesExt {
fn write_u16le(&mut self, n: u16) -> io::Result<()> {
self.write_u16::<LittleEndian>(n)

View file

@ -311,10 +311,10 @@ impl FlvDemux {
// gst::SchedulingFlags::SEEKABLE,
// )
// {
// gst::debug!(CAT, obj: pad, "Activating in Pull mode");
// gst::debug!(CAT, obj = pad, "Activating in Pull mode");
// gst::PadMode::Pull
// } else {
gst::debug!(CAT, obj: pad, "Activating in Push mode");
gst::debug!(CAT, obj = pad, "Activating in Push mode");
gst::PadMode::Push
// }
};
@ -366,7 +366,7 @@ impl FlvDemux {
fn sink_event(&self, pad: &gst::Pad, event: gst::Event) -> bool {
use gst::EventView;
gst::log!(CAT, obj: pad, "Handling event {:?}", event);
gst::log!(CAT, obj = pad, "Handling event {:?}", event);
match event.view() {
EventView::Eos(..) => {
// TODO implement
@ -453,7 +453,7 @@ impl FlvDemux {
pad: &gst::Pad,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::log!(CAT, obj: pad, "Handling buffer {:?}", buffer);
gst::log!(CAT, obj = pad, "Handling buffer {:?}", buffer);
let mut adapter = self.adapter.lock().unwrap();
adapter.push(buffer);
@ -466,7 +466,7 @@ impl FlvDemux {
let header = match self.find_header(&mut adapter) {
Ok(header) => header,
Err(_) => {
gst::trace!(CAT, imp: self, "Need more data");
gst::trace!(CAT, imp = self, "Need more data");
return Ok(gst::FlowSuccess::Ok);
}
};
@ -495,7 +495,7 @@ impl FlvDemux {
} => {
let avail = adapter.available();
if avail == 0 {
gst::trace!(CAT, imp: self, "Need more data");
gst::trace!(CAT, imp = self, "Need more data");
return Ok(gst::FlowSuccess::Ok);
}
let skip = cmp::min(avail, *skip_left as usize);
@ -507,7 +507,7 @@ impl FlvDemux {
match res {
Ok(None) => {
gst::trace!(CAT, imp: self, "Need more data");
gst::trace!(CAT, imp = self, "Need more data");
return Ok(gst::FlowSuccess::Ok);
}
Ok(Some(events)) => {
@ -534,7 +534,7 @@ impl FlvDemux {
let data = adapter.map(9).unwrap();
if let Ok((_, header)) = flavors::header(&data) {
gst::debug!(CAT, imp: self, "Found FLV header: {:?}", header);
gst::debug!(CAT, imp = self, "Found FLV header: {:?}", header);
drop(data);
adapter.flush(9);
@ -597,7 +597,7 @@ impl FlvDemux {
let res = pad.push(buffer);
gst::trace!(
CAT,
imp: self,
imp = self,
"Pushing buffer for stream {:?} returned {:?}",
stream,
res
@ -687,7 +687,7 @@ impl StreamingState {
match be_u32::<_, (_, nom::error::ErrorKind)>(&data[0..4]) {
Err(_) => unreachable!(),
Ok((_, previous_size)) => {
gst::trace!(CAT, imp: imp, "Previous tag size {}", previous_size);
gst::trace!(CAT, imp = imp, "Previous tag size {}", previous_size);
// Nothing to do here, we just consume it for now
}
}
@ -703,7 +703,7 @@ impl StreamingState {
Ok((_, tag_header)) => tag_header,
};
gst::trace!(CAT, imp: imp, "Parsed tag header {:?}", tag_header);
gst::trace!(CAT, imp = imp, "Parsed tag header {:?}", tag_header);
drop(data);
@ -715,17 +715,17 @@ impl StreamingState {
match tag_header.tag_type {
flavors::TagType::Script => {
gst::trace!(CAT, imp: imp, "Found script tag");
gst::trace!(CAT, imp = imp, "Found script tag");
Ok(self.handle_script_tag(imp, &tag_header, adapter))
}
flavors::TagType::Audio => {
gst::trace!(CAT, imp: imp, "Found audio tag");
gst::trace!(CAT, imp = imp, "Found audio tag");
self.handle_audio_tag(imp, &tag_header, adapter)
}
flavors::TagType::Video => {
gst::trace!(CAT, imp: imp, "Found video tag");
gst::trace!(CAT, imp = imp, "Found video tag");
self.handle_video_tag(imp, &tag_header, adapter)
}
@ -747,10 +747,10 @@ impl StreamingState {
match flavors::script_data(&data) {
Ok((_, ref script_data)) if script_data.name == "onMetaData" => {
gst::trace!(CAT, imp: imp, "Got script tag: {:?}", script_data);
gst::trace!(CAT, imp = imp, "Got script tag: {:?}", script_data);
let metadata = Metadata::new(script_data);
gst::debug!(CAT, imp: imp, "Got metadata: {:?}", metadata);
gst::debug!(CAT, imp = imp, "Got metadata: {:?}", metadata);
let audio_changed = self
.audio
@ -778,10 +778,10 @@ impl StreamingState {
}
}
Ok((_, ref script_data)) => {
gst::trace!(CAT, imp: imp, "Got script tag: {:?}", script_data);
gst::trace!(CAT, imp = imp, "Got script tag: {:?}", script_data);
}
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
gst::error!(CAT, imp: imp, "Error parsing script tag: {:?}", err);
gst::error!(CAT, imp = imp, "Error parsing script tag: {:?}", err);
}
Err(nom::Err::Incomplete(_)) => {
// ignore
@ -801,7 +801,7 @@ impl StreamingState {
) -> SmallVec<[Event; 4]> {
let mut events = SmallVec::new();
gst::trace!(CAT, imp: imp, "Got audio data header: {:?}", data_header);
gst::trace!(CAT, imp = imp, "Got audio data header: {:?}", data_header);
let new_audio_format =
AudioFormat::new(data_header, &self.metadata, &self.aac_sequence_header);
@ -809,7 +809,7 @@ impl StreamingState {
if self.audio.as_ref() != Some(&new_audio_format) {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Got new audio format: {:?}",
new_audio_format
);
@ -827,7 +827,7 @@ impl StreamingState {
&& self.audio.is_some()
&& !self.got_all_streams
{
gst::debug!(CAT, imp: imp, "Have all expected streams now");
gst::debug!(CAT, imp = imp, "Have all expected streams now");
self.got_all_streams = true;
events.push(Event::HaveAllStreams);
}
@ -846,7 +846,7 @@ impl StreamingState {
adapter.flush((tag_header.data_size - 1) as usize);
gst::warning!(
CAT,
imp: imp,
imp = imp,
"Too small packet for AAC packet header {}",
tag_header.data_size
);
@ -857,14 +857,14 @@ impl StreamingState {
match flavors::aac_audio_packet_header(&data) {
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
gst::error!(CAT, imp: imp, "Invalid AAC audio packet header: {:?}", err);
gst::error!(CAT, imp = imp, "Invalid AAC audio packet header: {:?}", err);
drop(data);
adapter.flush((tag_header.data_size - 1) as usize);
Ok(true)
}
Err(nom::Err::Incomplete(_)) => unreachable!(),
Ok((_, header)) => {
gst::trace!(CAT, imp: imp, "Got AAC packet header {:?}", header);
gst::trace!(CAT, imp = imp, "Got AAC packet header {:?}", header);
match header.packet_type {
flavors::AACPacketType::SequenceHeader => {
drop(data);
@ -872,7 +872,7 @@ impl StreamingState {
let buffer = adapter
.take_buffer((tag_header.data_size - 1 - 1) as usize)
.unwrap();
gst::debug!(CAT, imp: imp, "Got AAC sequence header {:?}", buffer,);
gst::debug!(CAT, imp = imp, "Got AAC sequence header {:?}", buffer,);
self.aac_sequence_header = Some(buffer);
Ok(true)
@ -898,7 +898,7 @@ impl StreamingState {
let data = adapter.map(1).unwrap();
let data_header = match flavors::audio_data_header(&data) {
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
gst::error!(CAT, imp: imp, "Invalid audio data header: {:?}", err);
gst::error!(CAT, imp = imp, "Invalid audio data header: {:?}", err);
drop(data);
adapter.flush(tag_header.data_size as usize);
return Ok(SmallVec::new());
@ -943,7 +943,7 @@ impl StreamingState {
gst::trace!(
CAT,
imp: imp,
imp = imp,
"Outputting audio buffer {:?} for tag {:?}",
buffer,
tag_header,
@ -963,7 +963,7 @@ impl StreamingState {
) -> SmallVec<[Event; 4]> {
let mut events = SmallVec::new();
gst::trace!(CAT, imp: imp, "Got video data header: {:?}", data_header);
gst::trace!(CAT, imp = imp, "Got video data header: {:?}", data_header);
let new_video_format =
VideoFormat::new(data_header, &self.metadata, &self.avc_sequence_header);
@ -971,7 +971,7 @@ impl StreamingState {
if self.video.as_ref() != Some(&new_video_format) {
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Got new video format: {:?}",
new_video_format
);
@ -989,7 +989,7 @@ impl StreamingState {
&& self.video.is_some()
&& !self.got_all_streams
{
gst::debug!(CAT, imp: imp, "Have all expected streams now");
gst::debug!(CAT, imp = imp, "Have all expected streams now");
self.got_all_streams = true;
events.push(Event::HaveAllStreams);
}
@ -1008,7 +1008,7 @@ impl StreamingState {
adapter.flush((tag_header.data_size - 1) as usize);
gst::warning!(
CAT,
imp: imp,
imp = imp,
"Too small packet for AVC packet header {}",
tag_header.data_size
);
@ -1018,14 +1018,14 @@ impl StreamingState {
let data = adapter.map(4).unwrap();
match flavors::avc_video_packet_header(&data) {
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
gst::error!(CAT, imp: imp, "Invalid AVC video packet header: {:?}", err);
gst::error!(CAT, imp = imp, "Invalid AVC video packet header: {:?}", err);
drop(data);
adapter.flush((tag_header.data_size - 1) as usize);
Ok(None)
}
Err(nom::Err::Incomplete(_)) => unreachable!(),
Ok((_, header)) => {
gst::trace!(CAT, imp: imp, "Got AVC packet header {:?}", header);
gst::trace!(CAT, imp = imp, "Got AVC packet header {:?}", header);
match header.packet_type {
flavors::AVCPacketType::SequenceHeader => {
drop(data);
@ -1035,7 +1035,7 @@ impl StreamingState {
.unwrap();
gst::debug!(
CAT,
imp: imp,
imp = imp,
"Got AVC sequence header {:?} of size {}",
buffer,
tag_header.data_size - 1 - 4
@ -1071,7 +1071,7 @@ impl StreamingState {
let data = adapter.map(1).unwrap();
let data_header = match flavors::video_data_header(&data) {
Err(nom::Err::Error(err)) | Err(nom::Err::Failure(err)) => {
gst::error!(CAT, imp: imp, "Invalid video data header: {:?}", err);
gst::error!(CAT, imp = imp, "Invalid video data header: {:?}", err);
drop(data);
adapter.flush(tag_header.data_size as usize);
return Ok(SmallVec::new());
@ -1147,7 +1147,7 @@ impl StreamingState {
gst::trace!(
CAT,
imp: imp,
imp = imp,
"Outputting video buffer {:?} for tag {:?}, keyframe: {}",
buffer,
tag_header,

View file

@ -14,8 +14,9 @@ gst = { workspace = true, features = ["v1_18"] }
gst-base = { workspace = true, features = ["v1_18"] }
gst-audio = { workspace = true, features = ["v1_18"] }
gst-video = { workspace = true, features = ["v1_18"] }
gst-pbutils = { workspace = true, features = ["v1_18"] }
gst-pbutils = { workspace = true, features = ["v1_20"] }
once_cell.workspace = true
bitstream-io = "2.3"
[lib]
name = "gstfmp4"
@ -25,9 +26,10 @@ path = "src/lib.rs"
[dev-dependencies]
gst-app = { workspace = true, features = ["v1_18"] }
gst-check = { workspace = true, features = ["v1_18"] }
gst-pbutils = { workspace = true, features = ["v1_20"] }
m3u8-rs = "5.0"
chrono = "0.4"
dash-mpd = { version = "0.14", default-features = false }
chrono = "0.4.35"
dash-mpd = { version = "0.17", default-features = false }
quick-xml = { version = "0.31", features = ["serialize"] }
serde = "1"

View file

@ -86,7 +86,7 @@ fn main() -> Result<(), Error> {
drop(map);
// Remove the header from the buffer list
buffer_list.make_mut().remove(0, 1);
buffer_list.make_mut().remove(0..1);
// If the list is now empty then it only contained the media header and nothing
// else.
@ -179,19 +179,18 @@ fn main() -> Result<(), Error> {
// Write the whole segment timeline out here, compressing multiple segments with
// the same duration to a repeated segment.
let mut segments = vec![];
let mut write_segment =
|start: gst::ClockTime, duration: gst::ClockTime, repeat: usize| {
let mut s = dash_mpd::S {
t: Some(start.mseconds() as i64),
d: duration.mseconds() as i64,
..Default::default()
};
if repeat > 0 {
s.r = Some(repeat as i64);
}
segments.push(s);
let mut write_segment = |start: gst::ClockTime, duration: u64, repeat: usize| {
let mut s = dash_mpd::S {
t: Some(start.mseconds()),
d: duration,
..Default::default()
};
if repeat > 0 {
s.r = Some(repeat as i64);
}
segments.push(s);
};
let mut start = None;
let mut num_segments = 0;
@ -201,15 +200,15 @@ fn main() -> Result<(), Error> {
start = Some(segment.start_time);
}
if last_duration.is_none() {
last_duration = Some(segment.duration);
last_duration = Some(segment.duration.mseconds());
}
// If the duration of this segment is different from the previous one then we
// have to write out the segment now.
if last_duration != Some(segment.duration) {
if last_duration != Some(segment.duration.mseconds()) {
write_segment(start.unwrap(), last_duration.unwrap(), num_segments - 1);
start = Some(segment.start_time);
last_duration = Some(segment.duration);
last_duration = Some(segment.duration.mseconds());
num_segments = 1;
} else {
num_segments += 1;

View file

@ -153,7 +153,7 @@ fn trim_segments(state: &mut StreamState) {
// safe side
removal_time: segment
.date_time
.checked_add_signed(Duration::seconds(20))
.checked_add_signed(Duration::try_seconds(20).unwrap())
.unwrap(),
path: segment.path.clone(),
});
@ -267,7 +267,7 @@ fn setup_appsink(appsink: &gst_app::AppSink, name: &str, path: &Path, is_video:
drop(map);
// Remove the header from the buffer list
buffer_list.make_mut().remove(0, 1);
buffer_list.make_mut().remove(0..1);
// If the list is now empty then it only contained the media header and nothing
// else.

View file

@ -170,7 +170,7 @@ fn setup_appsink(appsink: &gst_app::AppSink, name: &str, path: &Path, is_video:
drop(map);
// Remove the header from the buffer list
buffer_list.make_mut().remove(0, 1);
buffer_list.make_mut().remove(0..1);
// If the list is now empty then it only contained the media header and nothing
// else.
@ -360,6 +360,10 @@ impl AudioStream {
.property("samplesperbuffer", 4410)
.property_from_str("wave", &self.wave)
.build()?;
let taginject = gst::ElementFactory::make("taginject")
.property_from_str("tags", &format!("language-code={}", self.lang))
.property_from_str("scope", "stream")
.build()?;
let raw_capsfilter = gst::ElementFactory::make("capsfilter")
.property(
"caps",
@ -374,9 +378,23 @@ impl AudioStream {
.build()?;
let appsink = gst_app::AppSink::builder().buffer_list(true).build();
pipeline.add_many([&src, &raw_capsfilter, &enc, &mux, appsink.upcast_ref()])?;
pipeline.add_many([
&src,
&taginject,
&raw_capsfilter,
&enc,
&mux,
appsink.upcast_ref(),
])?;
gst::Element::link_many([&src, &raw_capsfilter, &enc, &mux, appsink.upcast_ref()])?;
gst::Element::link_many([
&src,
&taginject,
&raw_capsfilter,
&enc,
&mux,
appsink.upcast_ref(),
])?;
probe_encoder(state, enc);
@ -416,7 +434,7 @@ fn main() -> Result<(), Error> {
},
AudioStream {
name: "audio_1".to_string(),
lang: "fre".to_string(),
lang: "fra".to_string(),
default: false,
wave: "white-noise".to_string(),
},

View file

@ -9,8 +9,9 @@
use gst::prelude::*;
use anyhow::{anyhow, bail, Context, Error};
use std::convert::TryFrom;
use super::Buffer;
use super::{Buffer, ImageOrientation, IDENTITY_MATRIX};
fn write_box<T, F: FnOnce(&mut Vec<u8>) -> Result<T, Error>>(
vec: &mut Vec<u8>,
@ -160,6 +161,13 @@ fn cmaf_brands_from_caps(caps: &gst::CapsRef, compatible_brands: &mut Vec<&'stat
"audio/mpeg" => {
compatible_brands.push(b"caac");
}
"audio/x-opus" => {
compatible_brands.push(b"opus");
}
"video/x-av1" => {
compatible_brands.push(b"av01");
compatible_brands.push(b"cmf2");
}
"video/x-h265" => {
let width = s.get::<i32>("width").ok();
let height = s.get::<i32>("height").ok();
@ -577,7 +585,7 @@ fn write_trak(
fn write_tkhd(
v: &mut Vec<u8>,
_cfg: &super::HeaderConfiguration,
cfg: &super::HeaderConfiguration,
idx: usize,
stream: &super::HeaderStream,
creation_time: u64,
@ -604,9 +612,8 @@ fn write_tkhd(
// Volume
let s = stream.caps.structure(0).unwrap();
match s.name().as_str() {
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
v.extend((1u16 << 8).to_be_bytes())
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => v.extend((1u16 << 8).to_be_bytes()),
_ => v.extend(0u16.to_be_bytes()),
}
@ -614,21 +621,15 @@ fn write_tkhd(
v.extend([0u8; 2]);
// Matrix
v.extend(
[
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(16384u32 << 16).to_be_bytes(),
]
.into_iter()
.flatten(),
);
let matrix = match s.name().as_str() {
x if x.starts_with("video/") || x.starts_with("image/") => cfg
.orientation
.unwrap_or(ImageOrientation::Rotate0)
.transform_matrix(),
_ => &IDENTITY_MATRIX,
};
v.extend(matrix.iter().flatten());
// Width/height
match s.name().as_str() {
@ -700,7 +701,6 @@ fn write_tref(
fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
let lang = lang.borrow();
// TODO: Need to relax this once we get the language code from tags
assert!(lang.iter().all(u8::is_ascii_lowercase));
(((lang[0] as u16 - 0x60) & 0x1F) << 10)
@ -710,7 +710,7 @@ fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
fn write_mdhd(
v: &mut Vec<u8>,
_cfg: &super::HeaderConfiguration,
cfg: &super::HeaderConfiguration,
stream: &super::HeaderStream,
creation_time: u64,
) -> Result<(), Error> {
@ -724,8 +724,11 @@ fn write_mdhd(
v.extend(0u64.to_be_bytes());
// Language as ISO-639-2/T
// TODO: get actual language from the tags
v.extend(language_code(b"und").to_be_bytes());
if let Some(lang) = cfg.language_code {
v.extend(language_code(lang).to_be_bytes());
} else {
v.extend(language_code(b"und").to_be_bytes());
}
// Pre-defined
v.extend([0u8; 2]);
@ -745,9 +748,8 @@ fn write_hdlr(
let (handler_type, name) = match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => (b"vide", b"VideoHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
(b"soun", b"SoundHandler\0".as_slice())
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => (b"soun", b"SoundHandler\0".as_slice()),
"application/x-onvif-metadata" => (b"meta", b"MetadataHandler\0".as_slice()),
_ => unreachable!(),
};
@ -777,7 +779,8 @@ fn write_minf(
// Flags are always 1 for unspecified reasons
write_full_box(v, b"vmhd", FULL_BOX_VERSION_0, 1, |v| write_vmhd(v, cfg))?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => {
write_full_box(v, b"smhd", FULL_BOX_VERSION_0, FULL_BOX_FLAGS_NONE, |v| {
write_smhd(v, cfg)
})?
@ -886,9 +889,8 @@ fn write_stsd(
match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => write_visual_sample_entry(v, cfg, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_audio_sample_entry(v, cfg, stream)?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => write_audio_sample_entry(v, cfg, stream)?,
"application/x-onvif-metadata" => write_xml_meta_data_sample_entry(v, cfg, stream)?,
_ => unreachable!(),
}
@ -1098,9 +1100,9 @@ fn write_visual_sample_entry(
"professional" => 2,
_ => unreachable!(),
};
let level = 1; // FIXME
let tier = 0; // FIXME
// TODO: Use `gst_codec_utils_av1_get_seq_level_idx` when exposed in bindings
let level = av1_seq_level_idx(s.get::<&str>("level").ok());
let tier = av1_tier(s.get::<&str>("tier").ok());
let (high_bitdepth, twelve_bit) =
match s.get::<u32>("bit-depth-luma").unwrap() {
8 => (false, false),
@ -1145,6 +1147,10 @@ fn write_visual_sample_entry(
v.extend_from_slice(&codec_data);
}
if let Some(extra_data) = &stream.extra_header_data {
// configOBUs
v.extend_from_slice(extra_data.as_slice());
}
Ok(())
})?;
}
@ -1253,6 +1259,44 @@ fn write_visual_sample_entry(
Ok(())
}
fn av1_seq_level_idx(level: Option<&str>) -> u8 {
match level {
Some("2.0") => 0,
Some("2.1") => 1,
Some("2.2") => 2,
Some("2.3") => 3,
Some("3.0") => 4,
Some("3.1") => 5,
Some("3.2") => 6,
Some("3.3") => 7,
Some("4.0") => 8,
Some("4.1") => 9,
Some("4.2") => 10,
Some("4.3") => 11,
Some("5.0") => 12,
Some("5.1") => 13,
Some("5.2") => 14,
Some("5.3") => 15,
Some("6.0") => 16,
Some("6.1") => 17,
Some("6.2") => 18,
Some("6.3") => 19,
Some("7.0") => 20,
Some("7.1") => 21,
Some("7.2") => 22,
Some("7.3") => 23,
_ => 1,
}
}
fn av1_tier(tier: Option<&str>) -> u8 {
match tier {
Some("main") => 0,
Some("high") => 1,
_ => 0,
}
}
fn write_audio_sample_entry(
v: &mut Vec<u8>,
_cfg: &super::HeaderConfiguration,
@ -1262,6 +1306,7 @@ fn write_audio_sample_entry(
let fourcc = match s.name().as_str() {
"audio/mpeg" => b"mp4a",
"audio/x-opus" => b"Opus",
"audio/x-flac" => b"fLaC",
"audio/x-alaw" => b"alaw",
"audio/x-mulaw" => b"ulaw",
"audio/x-adpcm" => {
@ -1280,6 +1325,10 @@ fn write_audio_sample_entry(
let bitrate = s.get::<i32>("bitrate").context("no ADPCM bitrate field")?;
(bitrate / 8000) as u16
}
"audio/x-flac" => with_flac_metadata(&stream.caps, |streaminfo, _| {
1 + (u16::from_be_bytes([streaminfo[16], streaminfo[17]]) >> 4 & 0b11111)
})
.context("FLAC metadata error")?,
_ => 16u16,
};
@ -1322,6 +1371,9 @@ fn write_audio_sample_entry(
"audio/x-opus" => {
write_dops(v, &stream.caps)?;
}
"audio/x-flac" => {
write_dfla(v, &stream.caps)?;
}
"audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
// Nothing to do here
}
@ -1516,6 +1568,35 @@ fn write_dops(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
})
}
fn with_flac_metadata<R>(
caps: &gst::Caps,
cb: impl FnOnce(&[u8], &[gst::glib::SendValue]) -> R,
) -> Result<R, Error> {
let caps = caps.structure(0).unwrap();
let header = caps.get::<gst::ArrayRef>("streamheader").unwrap();
let (streaminfo, remainder) = header.as_ref().split_first().unwrap();
let streaminfo = streaminfo.get::<&gst::BufferRef>().unwrap();
let streaminfo = streaminfo.map_readable().unwrap();
// 13 bytes for the Ogg/FLAC prefix and 38 for the streaminfo itself.
match <&[_; 13 + 38]>::try_from(streaminfo.as_slice()) {
Ok(i) if i.starts_with(b"\x7FFLAC\x01\x00") => Ok(cb(&i[13..], remainder)),
Ok(_) | Err(_) => bail!("Unknown streamheader format"),
}
}
fn write_dfla(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
write_full_box(v, b"dfLa", 0, 0, move |v| {
with_flac_metadata(caps, |streaminfo, remainder| {
v.extend(streaminfo);
for metadata in remainder {
let metadata = metadata.get::<&gst::BufferRef>().unwrap();
let metadata = metadata.map_readable().unwrap();
v.extend(&metadata[..]);
}
})
})
}
fn write_xml_meta_data_sample_entry(
v: &mut Vec<u8>,
_cfg: &super::HeaderConfiguration,

File diff suppressed because it is too large Load diff

View file

@ -12,6 +12,8 @@ use gst::prelude::*;
mod boxes;
mod imp;
mod obu;
glib::wrapper! {
pub(crate) struct FMP4MuxPad(ObjectSubclass<imp::FMP4MuxPad>) @extends gst_base::AggregatorPad, gst::Pad, gst::Object;
}
@ -71,6 +73,80 @@ pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
Ok(())
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum ImageOrientation {
Rotate0,
Rotate90,
Rotate180,
Rotate270,
// TODO:
// FlipRotate0,
// FlipRotate90,
// FlipRotate180,
// FlipRotate270,
}
type TransformMatrix = [[u8; 4]; 9];
const IDENTITY_MATRIX: TransformMatrix = [
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_90_MATRIX: TransformMatrix = [
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_180_MATRIX: TransformMatrix = [
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_270_MATRIX: TransformMatrix = [
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
impl ImageOrientation {
pub(crate) fn transform_matrix(&self) -> &'static TransformMatrix {
match self {
ImageOrientation::Rotate0 => &IDENTITY_MATRIX,
ImageOrientation::Rotate90 => &ROTATE_90_MATRIX,
ImageOrientation::Rotate180 => &ROTATE_180_MATRIX,
ImageOrientation::Rotate270 => &ROTATE_270_MATRIX,
}
}
}
#[derive(Debug)]
pub(crate) struct HeaderConfiguration {
variant: Variant,
@ -85,6 +161,8 @@ pub(crate) struct HeaderConfiguration {
write_mehd: bool,
duration: Option<gst::ClockTime>,
language_code: Option<[u8; 3]>,
orientation: Option<ImageOrientation>,
/// Start UTC time in ONVIF mode.
/// Since Jan 1 1601 in 100ns units.
@ -101,6 +179,9 @@ pub(crate) struct HeaderStream {
/// Pre-defined trak timescale if not 0.
trak_timescale: u32,
// More data to be included in the fragmented stream header
extra_header_data: Option<Vec<u8>>,
}
#[derive(Debug)]

303
mux/fmp4/src/fmp4mux/obu.rs Normal file
View file

@ -0,0 +1,303 @@
//
// Copyright (C) 2022 Vivienne Watermeier <vwatermeier@igalia.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(non_camel_case_types)]
use bitstream_io::{BigEndian, BitRead, BitReader, Endianness};
use std::io::{self, Cursor, Read, Seek, SeekFrom};
pub fn parse_leb128<R, E>(reader: &mut BitReader<R, E>) -> io::Result<(u32, u32)>
where
R: Read + Seek,
E: Endianness,
{
let mut value = 0;
let mut num_bytes = 0;
for i in 0..8 {
let byte = reader.read::<u32>(8)?;
value |= (byte & 0x7f) << (i * 7);
num_bytes += 1;
if byte & 0x80 == 0 {
break;
}
}
reader.byte_align();
Ok((value, num_bytes))
}
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
pub struct SizedObu {
pub obu_type: ObuType,
pub has_extension: bool,
/// If the OBU header is followed by a leb128 size field.
pub has_size_field: bool,
pub temporal_id: u8,
pub spatial_id: u8,
/// size of the OBU payload in bytes.
/// This may refer to different sizes in different contexts, not always
/// to the entire OBU payload as it is in the AV1 bitstream.
pub size: u32,
/// the number of bytes the leb128 size field will take up
/// when written with write_leb128().
/// This does not imply `has_size_field`, and does not necessarily match with
/// the length of the internal size field if present.
pub leb_size: u32,
pub header_len: u32,
/// indicates that only part of this OBU has been processed so far
pub is_fragment: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ObuType {
Reserved,
SequenceHeader,
TemporalDelimiter,
FrameHeader,
TileGroup,
Metadata,
Frame,
RedundantFrameHeader,
TileList,
Padding,
}
impl Default for ObuType {
fn default() -> Self {
Self::Reserved
}
}
impl SizedObu {
/// Parse an OBU header and size field. If the OBU is not expected to contain
/// a size field, but the size is known from external information,
/// parse as an `UnsizedObu` and use `to_sized`.
pub fn parse<R, E>(reader: &mut BitReader<R, E>) -> io::Result<Self>
where
R: Read + Seek,
E: Endianness,
{
// check the forbidden bit
if reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"forbidden bit in OBU header is set",
));
}
let obu_type = reader.read::<u8>(4)?.into();
let has_extension = reader.read_bit()?;
// require a size field
if !reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"expected a size field",
));
}
// ignore the reserved bit
let _ = reader.read_bit()?;
let (temporal_id, spatial_id) = if has_extension {
(reader.read::<u8>(3)?, reader.read::<u8>(2)?)
} else {
(0, 0)
};
reader.byte_align();
let (size, leb_size) = parse_leb128(reader)?;
Ok(Self {
obu_type,
has_extension,
has_size_field: true,
temporal_id,
spatial_id,
size,
leb_size,
header_len: has_extension as u32 + 1,
is_fragment: false,
})
}
/// The amount of bytes this OBU will take up, including the space needed for
/// its leb128 size field.
pub fn full_size(&self) -> u32 {
self.size + self.leb_size + self.header_len
}
}
pub fn read_seq_header_obu_bytes(data: &[u8]) -> io::Result<Option<Vec<u8>>> {
let mut cursor = Cursor::new(data);
while cursor.position() < data.len() as u64 {
let obu_start = cursor.position();
let Ok(obu) = SizedObu::parse(&mut BitReader::endian(&mut cursor, BigEndian)) else {
break;
};
// set reader to the beginning of the OBU
cursor.seek(SeekFrom::Start(obu_start))?;
if obu.obu_type != ObuType::SequenceHeader {
// Skip the full OBU
cursor.seek(SeekFrom::Current(obu.full_size() as i64))?;
continue;
};
// read the full OBU
let mut bytes = vec![0; obu.full_size() as usize];
cursor.read_exact(&mut bytes)?;
return Ok(Some(bytes));
}
Ok(None)
}
impl From<u8> for ObuType {
fn from(n: u8) -> Self {
assert!(n < 16);
match n {
1 => Self::SequenceHeader,
2 => Self::TemporalDelimiter,
3 => Self::FrameHeader,
4 => Self::TileGroup,
5 => Self::Metadata,
6 => Self::Frame,
7 => Self::RedundantFrameHeader,
8 => Self::TileList,
15 => Self::Padding,
_ => Self::Reserved,
}
}
}
impl From<ObuType> for u8 {
fn from(ty: ObuType) -> Self {
match ty {
ObuType::Reserved => 0,
ObuType::SequenceHeader => 1,
ObuType::TemporalDelimiter => 2,
ObuType::FrameHeader => 3,
ObuType::TileGroup => 4,
ObuType::Metadata => 5,
ObuType::Frame => 6,
ObuType::RedundantFrameHeader => 7,
ObuType::TileList => 8,
ObuType::Padding => 15,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use bitstream_io::{BigEndian, BitReader};
use once_cell::sync::Lazy;
use std::io::Cursor;
#[allow(clippy::type_complexity)]
static OBUS: Lazy<Vec<(SizedObu, Vec<u8>)>> = Lazy::new(|| {
vec![
(
SizedObu {
obu_type: ObuType::TemporalDelimiter,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 0,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0001_0010, 0b0000_0000],
),
(
SizedObu {
obu_type: ObuType::Padding,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 10,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0111_1010, 0b0000_1010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
),
(
SizedObu {
obu_type: ObuType::SequenceHeader,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0000_1110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
(
SizedObu {
obu_type: ObuType::Frame,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0011_0110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
]
});
#[test]
fn test_parse_rtp_obu() {
for (idx, (sized_obu, raw_bytes)) in (*OBUS).iter().enumerate() {
println!("running test {idx}...");
let mut reader = BitReader::endian(Cursor::new(&raw_bytes), BigEndian);
let obu_parsed = SizedObu::parse(&mut reader).unwrap();
assert_eq!(&obu_parsed, sized_obu);
if let Some(seq_header_obu_bytes) = read_seq_header_obu_bytes(raw_bytes).unwrap() {
println!("validation of sequence header obu read/write...");
assert_eq!(&seq_header_obu_bytes, raw_bytes);
}
}
}
#[test]
fn test_read_seq_header_from_bitstream() {
let mut bitstream = Vec::new();
let mut seq_header_bytes_raw = None;
for (obu, raw_bytes) in (*OBUS).iter() {
bitstream.extend(raw_bytes);
if obu.obu_type == ObuType::SequenceHeader {
seq_header_bytes_raw = Some(raw_bytes.clone());
}
}
let seq_header_obu_bytes = read_seq_header_obu_bytes(&bitstream).unwrap().unwrap();
assert_eq!(seq_header_obu_bytes, seq_header_bytes_raw.unwrap());
}
}

View file

@ -19,6 +19,33 @@ fn init() {
});
}
fn to_completion(pipeline: &gst::Pipeline) {
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in pipeline.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
fn test_buffer_flags_single_stream(cmaf: bool, set_dts: bool, caps: gst::Caps) {
let mut h = if cmaf {
gst_check::Harness::new("cmafmux")
@ -209,6 +236,26 @@ fn test_buffer_flags_single_vp9_stream_iso() {
test_buffer_flags_single_stream(false, false, caps);
}
#[test]
fn test_buffer_flags_single_av1_stream_cmaf() {
init();
let caps = gst::Caps::builder("video/x-av1")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("profile", "main")
.field("tier", "main")
.field("level", "4.1")
.field("chroma-format", "4:2:0")
.field("bit-depth-luma", 8u32)
.field("bit-depth-chroma", 8u32)
.field("colorimetry", "bt709")
.build();
test_buffer_flags_single_stream(true, false, caps);
}
#[test]
fn test_buffer_flags_multi_stream() {
init();
@ -1287,6 +1334,328 @@ fn test_buffer_multi_stream_short_gops() {
assert_eq!(ev.type_(), gst::EventType::Eos);
}
#[test]
fn test_single_stream_manual_fragment() {
init();
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::with_size(1).unwrap())
.build();
let mut h = gst_check::Harness::new("cmafmux");
// fragment duration long enough to be ignored, 1s chunk duration
h.element()
.unwrap()
.set_property("fragment-duration", 1.hours());
h.set_src_caps(caps);
h.play();
// request fragment at 4 seconds, should be created at 11th buffer
h.element()
.unwrap()
.emit_by_name::<()>("split-at-running-time", &[&4.seconds()]);
// Push 15 buffers of 0.5s each, 1st, 11th and 16th buffer without DELTA_UNIT flag
for i in 0..20 {
let mut buffer = gst::Buffer::with_size(1).unwrap();
{
let buffer = buffer.get_mut().unwrap();
buffer.set_pts(i * 500.mseconds());
buffer.set_dts(i * 500.mseconds());
buffer.set_duration(500.mseconds());
if i != 0 && i != 10 && i != 15 {
buffer.set_flags(gst::BufferFlags::DELTA_UNIT);
}
}
assert_eq!(h.push(buffer), Ok(gst::FlowSuccess::Ok));
if i == 2 {
let ev = loop {
let ev = h.pull_upstream_event().unwrap();
if ev.type_() != gst::EventType::Reconfigure
&& ev.type_() != gst::EventType::Latency
{
break ev;
}
};
assert_eq!(ev.type_(), gst::EventType::CustomUpstream);
assert_eq!(
gst_video::UpstreamForceKeyUnitEvent::parse(&ev).unwrap(),
gst_video::UpstreamForceKeyUnitEvent {
running_time: Some(4.seconds()),
all_headers: true,
count: 0
}
);
}
}
// Crank the clock: this should bring us to the end of the first fragment
h.crank_single_clock_wait().unwrap();
let header = h.pull().unwrap();
assert_eq!(
header.flags(),
gst::BufferFlags::HEADER | gst::BufferFlags::DISCONT
);
assert_eq!(header.pts(), Some(gst::ClockTime::ZERO));
assert_eq!(header.dts(), Some(gst::ClockTime::ZERO));
// first fragment
let fragment_header = h.pull().unwrap();
assert_eq!(fragment_header.flags(), gst::BufferFlags::HEADER);
assert_eq!(fragment_header.pts(), Some(gst::ClockTime::ZERO));
assert_eq!(fragment_header.dts(), Some(gst::ClockTime::ZERO));
assert_eq!(fragment_header.duration(), Some(5.seconds()));
for buffer_idx in 0..10 {
let buffer = h.pull().unwrap();
if buffer_idx == 9 {
assert_eq!(
buffer.flags(),
gst::BufferFlags::DELTA_UNIT | gst::BufferFlags::MARKER
);
} else {
assert_eq!(buffer.flags(), gst::BufferFlags::DELTA_UNIT);
}
assert_eq!(buffer.pts(), Some(buffer_idx * 500.mseconds()));
assert_eq!(buffer.dts(), Some(buffer_idx * 500.mseconds()));
assert_eq!(buffer.duration(), Some(500.mseconds()));
}
// second manual fragment
let fragment_header = h.pull().unwrap();
assert_eq!(fragment_header.flags(), gst::BufferFlags::HEADER);
assert_eq!(fragment_header.pts(), Some(5.seconds()));
assert_eq!(fragment_header.dts(), Some(5.seconds()));
assert_eq!(fragment_header.duration(), Some(2500.mseconds()));
for buffer_idx in 0..5 {
let buffer = h.pull().unwrap();
if buffer_idx == 4 {
assert_eq!(
buffer.flags(),
gst::BufferFlags::DELTA_UNIT | gst::BufferFlags::MARKER
);
} else {
assert_eq!(buffer.flags(), gst::BufferFlags::DELTA_UNIT);
}
assert_eq!(
buffer.pts(),
Some(5.seconds() + buffer_idx * 500.mseconds())
);
assert_eq!(
buffer.dts(),
Some(5.seconds() + buffer_idx * 500.mseconds())
);
assert_eq!(buffer.duration(), Some(500.mseconds()));
}
h.push_event(gst::event::Eos::new());
// There should be the second fragment now
let fragment_header = h.pull().unwrap();
assert_eq!(fragment_header.flags(), gst::BufferFlags::HEADER);
assert_eq!(fragment_header.pts(), Some(7500.mseconds()));
assert_eq!(fragment_header.dts(), Some(7500.mseconds()));
assert_eq!(fragment_header.duration(), Some(2500.mseconds()));
for buffer_idx in 0..5 {
let buffer = h.pull().unwrap();
if buffer_idx == 4 {
assert_eq!(
buffer.flags(),
gst::BufferFlags::DELTA_UNIT | gst::BufferFlags::MARKER
);
} else {
assert_eq!(buffer.flags(), gst::BufferFlags::DELTA_UNIT);
}
assert_eq!(
buffer.pts(),
Some(7500.mseconds() + buffer_idx * 500.mseconds())
);
assert_eq!(
buffer.dts(),
Some(7500.mseconds() + buffer_idx * 500.mseconds())
);
assert_eq!(buffer.duration(), Some(500.mseconds()));
}
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::StreamStart);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Caps);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Segment);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Eos);
}
#[test]
fn test_chunking_single_stream_manual_fragment() {
init();
let caps = gst::Caps::builder("video/x-h264")
.field("width", 1920i32)
.field("height", 1080i32)
.field("framerate", gst::Fraction::new(30, 1))
.field("stream-format", "avc")
.field("alignment", "au")
.field("codec_data", gst::Buffer::with_size(1).unwrap())
.build();
let mut h = gst_check::Harness::new("cmafmux");
// fragment duration long enough to be ignored, 1s chunk duration
h.element()
.unwrap()
.set_property("fragment-duration", 1.hours());
h.element()
.unwrap()
.set_property("chunk-duration", 1.seconds());
h.set_src_caps(caps);
h.play();
// request fragment at 4 seconds, should be created at 11th buffer
h.element()
.unwrap()
.emit_by_name::<()>("split-at-running-time", &[&4.seconds()]);
// Push 15 buffers of 0.5s each, 1st and 11th buffer without DELTA_UNIT flag
for i in 0..15 {
let mut buffer = gst::Buffer::with_size(1).unwrap();
{
let buffer = buffer.get_mut().unwrap();
buffer.set_pts(i * 500.mseconds());
buffer.set_dts(i * 500.mseconds());
buffer.set_duration(500.mseconds());
if i != 0 && i != 10 {
buffer.set_flags(gst::BufferFlags::DELTA_UNIT);
}
}
assert_eq!(h.push(buffer), Ok(gst::FlowSuccess::Ok));
if i == 2 {
let ev = loop {
let ev = h.pull_upstream_event().unwrap();
if ev.type_() != gst::EventType::Reconfigure
&& ev.type_() != gst::EventType::Latency
{
break ev;
}
};
assert_eq!(ev.type_(), gst::EventType::CustomUpstream);
assert_eq!(
gst_video::UpstreamForceKeyUnitEvent::parse(&ev).unwrap(),
gst_video::UpstreamForceKeyUnitEvent {
running_time: Some(4.seconds()),
all_headers: true,
count: 0
}
);
}
}
// Crank the clock: this should bring us to the end of the first fragment
h.crank_single_clock_wait().unwrap();
let header = h.pull().unwrap();
assert_eq!(
header.flags(),
gst::BufferFlags::HEADER | gst::BufferFlags::DISCONT
);
assert_eq!(header.pts(), Some(gst::ClockTime::ZERO));
assert_eq!(header.dts(), Some(gst::ClockTime::ZERO));
// There should be 7 chunks now, and the 1st and 6th are starting a fragment.
// Each chunk should have two buffers.
for chunk in 0..7 {
let chunk_header = h.pull().unwrap();
if chunk == 0 || chunk == 5 {
assert_eq!(chunk_header.flags(), gst::BufferFlags::HEADER);
} else {
assert_eq!(
chunk_header.flags(),
gst::BufferFlags::HEADER | gst::BufferFlags::DELTA_UNIT
);
}
assert_eq!(chunk_header.pts(), Some(chunk * 1.seconds()));
assert_eq!(chunk_header.dts(), Some(chunk * 1.seconds()));
assert_eq!(chunk_header.duration(), Some(1.seconds()));
for buffer_idx in 0..2 {
let buffer = h.pull().unwrap();
if buffer_idx == 1 {
assert_eq!(
buffer.flags(),
gst::BufferFlags::DELTA_UNIT | gst::BufferFlags::MARKER
);
} else {
assert_eq!(buffer.flags(), gst::BufferFlags::DELTA_UNIT);
}
assert_eq!(
buffer.pts(),
Some((chunk * 2 + buffer_idx) * 500.mseconds())
);
assert_eq!(
buffer.dts(),
Some((chunk * 2 + buffer_idx) * 500.mseconds())
);
assert_eq!(buffer.duration(), Some(500.mseconds()));
}
}
h.push_event(gst::event::Eos::new());
// There should be the remaining chunk now, containing one 500ms buffer.
for chunk in 7..8 {
let chunk_header = h.pull().unwrap();
assert_eq!(
chunk_header.flags(),
gst::BufferFlags::HEADER | gst::BufferFlags::DELTA_UNIT
);
assert_eq!(chunk_header.pts(), Some(chunk * 1.seconds()));
assert_eq!(chunk_header.dts(), Some(chunk * 1.seconds()));
assert_eq!(chunk_header.duration(), Some(500.mseconds()));
for buffer_idx in 0..1 {
let buffer = h.pull().unwrap();
assert_eq!(
buffer.flags(),
gst::BufferFlags::DELTA_UNIT | gst::BufferFlags::MARKER
);
assert_eq!(
buffer.pts(),
Some((chunk * 2 + buffer_idx) * 500.mseconds())
);
assert_eq!(
buffer.dts(),
Some((chunk * 2 + buffer_idx) * 500.mseconds())
);
assert_eq!(buffer.duration(), Some(500.mseconds()));
}
}
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::StreamStart);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Caps);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Segment);
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Eos);
}
#[test]
fn test_chunking_single_stream() {
init();
@ -1993,3 +2362,21 @@ fn test_chunking_single_stream_gops_after_fragment_end_after_next_chunk_end() {
let ev = h.pull_event().unwrap();
assert_eq!(ev.type_(), gst::EventType::Eos);
}
#[test]
fn test_roundtrip_vp9_flac() {
init();
let pipeline = gst::parse::launch(
r#"
videotestsrc num-buffers=99 ! vp9enc ! vp9parse ! mux.
audiotestsrc num-buffers=149 ! flacenc ! flacparse ! mux.
isofmp4mux name=mux ! qtdemux name=demux
demux.audio_0 ! queue ! flacdec ! fakesink
demux.video_0 ! queue ! vp9dec ! fakesink
"#,
)
.unwrap();
let pipeline = pipeline.downcast().unwrap();
to_completion(&pipeline);
}

View file

@ -16,6 +16,7 @@ gst-audio = { workspace = true, features = ["v1_18"] }
gst-video = { workspace = true, features = ["v1_18"] }
gst-pbutils = { workspace = true, features = ["v1_18"] }
once_cell.workspace = true
bitstream-io = "2.3"
[lib]
name = "gstmp4"

View file

@ -9,9 +9,11 @@
use gst::prelude::*;
use anyhow::{anyhow, bail, Context, Error};
use std::convert::TryFrom;
use std::str::FromStr;
use super::{ImageOrientation, IDENTITY_MATRIX};
fn write_box<T, F: FnOnce(&mut Vec<u8>) -> Result<T, Error>>(
vec: &mut Vec<u8>,
fourcc: impl std::borrow::Borrow<[u8; 4]>,
@ -56,18 +58,31 @@ fn write_full_box<T, F: FnOnce(&mut Vec<u8>) -> Result<T, Error>>(
}
/// Creates `ftyp` box
pub(super) fn create_ftyp(variant: super::Variant) -> Result<gst::Buffer, Error> {
pub(super) fn create_ftyp(
variant: super::Variant,
content_caps: &[&gst::CapsRef],
) -> Result<gst::Buffer, Error> {
let mut v = vec![];
let mut minor_version = 0u32;
let (brand, compatible_brands) = match variant {
let (brand, mut compatible_brands) = match variant {
super::Variant::ISO | super::Variant::ONVIF => (b"iso4", vec![b"mp41", b"mp42", b"isom"]),
};
for caps in content_caps {
let s = caps.structure(0).unwrap();
if let (super::Variant::ISO, "video/x-av1") = (variant, s.name().as_str()) {
minor_version = 1;
compatible_brands = vec![b"iso4", b"av01"];
break;
}
}
write_box(&mut v, b"ftyp", |v| {
// major brand
v.extend(brand);
// minor version
v.extend(0u32.to_be_bytes());
v.extend(minor_version.to_be_bytes());
// compatible brands
v.extend(compatible_brands.into_iter().flatten());
@ -382,9 +397,8 @@ fn write_tkhd(
// Volume
let s = stream.caps.structure(0).unwrap();
match s.name().as_str() {
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
v.extend((1u16 << 8).to_be_bytes())
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => v.extend((1u16 << 8).to_be_bytes()),
_ => v.extend(0u16.to_be_bytes()),
}
@ -392,21 +406,14 @@ fn write_tkhd(
v.extend([0u8; 2]);
// Matrix
v.extend(
[
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(16384u32 << 16).to_be_bytes(),
]
.into_iter()
.flatten(),
);
let matrix = match s.name().as_str() {
x if x.starts_with("video/") || x.starts_with("image/") => stream
.orientation
.unwrap_or(ImageOrientation::Rotate0)
.transform_matrix(),
_ => &IDENTITY_MATRIX,
};
v.extend(matrix.iter().flatten());
// Width/height
match s.name().as_str() {
@ -460,7 +467,6 @@ fn write_mdia(
fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
let lang = lang.borrow();
// TODO: Need to relax this once we get the language code from tags
assert!(lang.iter().all(u8::is_ascii_lowercase));
(((lang[0] as u16 - 0x60) & 0x1F) << 10)
@ -470,7 +476,7 @@ fn language_code(lang: impl std::borrow::Borrow<[u8; 3]>) -> u16 {
fn write_mdhd(
v: &mut Vec<u8>,
_header: &super::Header,
header: &super::Header,
stream: &super::Stream,
creation_time: u64,
) -> Result<(), Error> {
@ -493,8 +499,11 @@ fn write_mdhd(
v.extend(duration.to_be_bytes());
// Language as ISO-639-2/T
// TODO: get actual language from the tags
v.extend(language_code(b"und").to_be_bytes());
if let Some(lang) = header.language_code {
v.extend(language_code(lang).to_be_bytes());
} else {
v.extend(language_code(b"und").to_be_bytes());
}
// Pre-defined
v.extend([0u8; 2]);
@ -514,9 +523,8 @@ fn write_hdlr(
let (handler_type, name) = match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => (b"vide", b"VideoHandler\0".as_slice()),
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
(b"soun", b"SoundHandler\0".as_slice())
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => (b"soun", b"SoundHandler\0".as_slice()),
"application/x-onvif-metadata" => (b"meta", b"MetadataHandler\0".as_slice()),
_ => unreachable!(),
};
@ -546,7 +554,8 @@ fn write_minf(
// Flags are always 1 for unspecified reasons
write_full_box(v, b"vmhd", FULL_BOX_VERSION_0, 1, |v| write_vmhd(v, header))?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => {
write_full_box(v, b"smhd", FULL_BOX_VERSION_0, FULL_BOX_FLAGS_NONE, |v| {
write_smhd(v, header)
})?
@ -703,9 +712,8 @@ fn write_stsd(
match s.name().as_str() {
"video/x-h264" | "video/x-h265" | "video/x-vp8" | "video/x-vp9" | "video/x-av1"
| "image/jpeg" => write_visual_sample_entry(v, header, stream)?,
"audio/mpeg" | "audio/x-opus" | "audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
write_audio_sample_entry(v, header, stream)?
}
"audio/mpeg" | "audio/x-opus" | "audio/x-flac" | "audio/x-alaw" | "audio/x-mulaw"
| "audio/x-adpcm" => write_audio_sample_entry(v, header, stream)?,
"application/x-onvif-metadata" => write_xml_meta_data_sample_entry(v, header, stream)?,
_ => unreachable!(),
}
@ -916,8 +924,9 @@ fn write_visual_sample_entry(
_ => unreachable!(),
};
let level = 1; // FIXME
let tier = 0; // FIXME
// TODO: Use `gst_codec_utils_av1_get_seq_level_idx` when exposed in bindings
let level = av1_seq_level_idx(s.get::<&str>("level").ok());
let tier = av1_tier(s.get::<&str>("tier").ok());
let (high_bitdepth, twelve_bit) =
match s.get::<u32>("bit-depth-luma").unwrap() {
8 => (false, false),
@ -962,6 +971,10 @@ fn write_visual_sample_entry(
v.extend_from_slice(&codec_data);
}
if let Some(extra_data) = &stream.extra_header_data {
// unsigned int(8) configOBUs[];
v.extend_from_slice(extra_data.as_slice());
}
Ok(())
})?;
}
@ -1070,6 +1083,44 @@ fn write_visual_sample_entry(
Ok(())
}
fn av1_seq_level_idx(level: Option<&str>) -> u8 {
match level {
Some("2.0") => 0,
Some("2.1") => 1,
Some("2.2") => 2,
Some("2.3") => 3,
Some("3.0") => 4,
Some("3.1") => 5,
Some("3.2") => 6,
Some("3.3") => 7,
Some("4.0") => 8,
Some("4.1") => 9,
Some("4.2") => 10,
Some("4.3") => 11,
Some("5.0") => 12,
Some("5.1") => 13,
Some("5.2") => 14,
Some("5.3") => 15,
Some("6.0") => 16,
Some("6.1") => 17,
Some("6.2") => 18,
Some("6.3") => 19,
Some("7.0") => 20,
Some("7.1") => 21,
Some("7.2") => 22,
Some("7.3") => 23,
_ => 1,
}
}
fn av1_tier(tier: Option<&str>) -> u8 {
match tier {
Some("main") => 0,
Some("high") => 1,
_ => 0,
}
}
fn write_audio_sample_entry(
v: &mut Vec<u8>,
_header: &super::Header,
@ -1079,6 +1130,7 @@ fn write_audio_sample_entry(
let fourcc = match s.name().as_str() {
"audio/mpeg" => b"mp4a",
"audio/x-opus" => b"Opus",
"audio/x-flac" => b"fLaC",
"audio/x-alaw" => b"alaw",
"audio/x-mulaw" => b"ulaw",
"audio/x-adpcm" => {
@ -1097,6 +1149,10 @@ fn write_audio_sample_entry(
let bitrate = s.get::<i32>("bitrate").context("no ADPCM bitrate field")?;
(bitrate / 8000) as u16
}
"audio/x-flac" => with_flac_metadata(&stream.caps, |streaminfo, _| {
1 + (u16::from_be_bytes([streaminfo[16], streaminfo[17]]) >> 4 & 0b11111)
})
.context("FLAC metadata error")?,
_ => 16u16,
};
@ -1139,6 +1195,9 @@ fn write_audio_sample_entry(
"audio/x-opus" => {
write_dops(v, &stream.caps)?;
}
"audio/x-flac" => {
write_dfla(v, &stream.caps)?;
}
"audio/x-alaw" | "audio/x-mulaw" | "audio/x-adpcm" => {
// Nothing to do here
}
@ -1333,6 +1392,35 @@ fn write_dops(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
})
}
fn with_flac_metadata<R>(
caps: &gst::Caps,
cb: impl FnOnce(&[u8], &[gst::glib::SendValue]) -> R,
) -> Result<R, Error> {
let caps = caps.structure(0).unwrap();
let header = caps.get::<gst::ArrayRef>("streamheader").unwrap();
let (streaminfo, remainder) = header.as_ref().split_first().unwrap();
let streaminfo = streaminfo.get::<&gst::BufferRef>().unwrap();
let streaminfo = streaminfo.map_readable().unwrap();
// 13 bytes for the Ogg/FLAC prefix and 38 for the streaminfo itself.
match <&[_; 13 + 38]>::try_from(streaminfo.as_slice()) {
Ok(i) if i.starts_with(b"\x7FFLAC\x01\x00") => Ok(cb(&i[13..], remainder)),
Ok(_) | Err(_) => bail!("Unknown streamheader format"),
}
}
fn write_dfla(v: &mut Vec<u8>, caps: &gst::Caps) -> Result<(), Error> {
write_full_box(v, b"dfLa", 0, 0, move |v| {
with_flac_metadata(caps, |streaminfo, remainder| {
v.extend(streaminfo);
for metadata in remainder {
let metadata = metadata.get::<&gst::BufferRef>().unwrap();
let metadata = metadata.map_readable().unwrap();
v.extend(&metadata[..]);
}
})
})
}
fn write_xml_meta_data_sample_entry(
v: &mut Vec<u8>,
_header: &super::Header,

View file

@ -15,9 +15,10 @@ use gst_base::subclass::prelude::*;
use std::collections::VecDeque;
use std::sync::Mutex;
use crate::mp4mux::obu::read_seq_header_obu_bytes;
use once_cell::sync::Lazy;
use super::boxes;
use super::{boxes, ImageOrientation};
/// Offset between NTP and UNIX epoch in seconds.
/// NTP = UNIX + NTP_UNIX_OFFSET.
@ -108,6 +109,8 @@ struct Stream {
caps: gst::Caps,
/// Whether this stream is intra-only and has frame reordering.
delta_frames: super::DeltaFrames,
/// Whether this stream might have header frames without timestamps that should be ignored.
discard_header_buffers: bool,
/// Already written out chunks with their samples for this stream
chunks: Vec<super::Chunk>,
@ -133,6 +136,11 @@ struct Stream {
/// In ONVIF mode, the mapping between running time and UTC time (UNIX)
running_time_utc_time_mapping: Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
extra_header_data: Option<Vec<u8>>,
/// Orientation from tags
orientation: Option<ImageOrientation>,
}
#[derive(Default)]
@ -151,6 +159,9 @@ struct State {
/// Size of the `mdat` as written so far.
mdat_size: u64,
/// Language code from tags
language_code: Option<[u8; 3]>,
}
#[derive(Default)]
@ -165,19 +176,24 @@ impl MP4Mux {
buffer: &gst::BufferRef,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
discard_headers: bool,
) -> Result<(), gst::FlowError> {
if discard_headers && buffer.flags().contains(gst::BufferFlags::HEADER) {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
if delta_frames.requires_dts() && buffer.dts().is_none() {
gst::error!(CAT, obj: sinkpad, "Require DTS for video streams");
gst::error!(CAT, obj = sinkpad, "Require DTS for video streams");
return Err(gst::FlowError::Error);
}
if buffer.pts().is_none() {
gst::error!(CAT, obj: sinkpad, "Require timestamped buffers");
gst::error!(CAT, obj = sinkpad, "Require timestamped buffers");
return Err(gst::FlowError::Error);
}
if delta_frames.intra_only() && buffer.flags().contains(gst::BufferFlags::DELTA_UNIT) {
gst::error!(CAT, obj: sinkpad, "Intra-only stream with delta units");
gst::error!(CAT, obj = sinkpad, "Intra-only stream with delta units");
return Err(gst::FlowError::Error);
}
@ -188,6 +204,7 @@ impl MP4Mux {
&self,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
discard_headers: bool,
pre_queue: &mut VecDeque<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>,
running_time_utc_time_mapping: &Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
) -> Result<Option<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>, gst::FlowError> {
@ -195,17 +212,14 @@ impl MP4Mux {
return Ok(Some((segment.clone(), buffer.clone())));
}
let mut buffer = match sinkpad.peek_buffer() {
None => return Ok(None),
Some(buffer) => buffer,
let Some(mut buffer) = sinkpad.peek_buffer() else {
return Ok(None);
};
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
Self::check_buffer(&buffer, sinkpad, delta_frames, discard_headers)?;
let mut segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
None => {
gst::error!(CAT, obj: sinkpad, "Got buffer before segment");
gst::error!(CAT, obj = sinkpad, "Got buffer before segment");
return Err(gst::FlowError::Error);
}
};
@ -231,7 +245,7 @@ impl MP4Mux {
// Calculate from the mapping
running_time_to_utc_time(pts, running_time_utc_time_mapping).ok_or_else(
|| {
gst::error!(CAT, obj: sinkpad, "Stream has negative PTS UTC time");
gst::error!(CAT, obj = sinkpad, "Stream has negative PTS UTC time");
gst::FlowError::Error
},
)?
@ -241,7 +255,7 @@ impl MP4Mux {
gst::trace!(
CAT,
obj: sinkpad,
obj = sinkpad,
"Mapped PTS running time {pts} to UTC time {utc_time}"
);
@ -252,12 +266,12 @@ impl MP4Mux {
if let Some(dts) = dts {
let dts_utc_time =
running_time_to_utc_time(dts, (pts, utc_time)).ok_or_else(|| {
gst::error!(CAT, obj: sinkpad, "Stream has negative DTS UTC time");
gst::error!(CAT, obj = sinkpad, "Stream has negative DTS UTC time");
gst::FlowError::Error
})?;
gst::trace!(
CAT,
obj: sinkpad,
obj = sinkpad,
"Mapped DTS running time {dts} to UTC time {dts_utc_time}"
);
buffer.set_dts(dts_utc_time);
@ -276,19 +290,20 @@ impl MP4Mux {
fn pop_buffer(
&self,
sinkpad: &super::MP4MuxPad,
delta_frames: super::DeltaFrames,
pre_queue: &mut VecDeque<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>,
running_time_utc_time_mapping: &mut Option<(gst::Signed<gst::ClockTime>, gst::ClockTime)>,
stream: &mut Stream,
) -> Result<Option<(gst::FormattedSegment<gst::ClockTime>, gst::Buffer)>, gst::FlowError> {
let Stream {
sinkpad, pre_queue, ..
} = stream;
// In ONVIF mode we need to get UTC times for each buffer and synchronize based on that.
// Queue up to 6s of data to get the first UTC time and then backdate.
if self.obj().class().as_ref().variant == super::Variant::ONVIF
&& running_time_utc_time_mapping.is_none()
&& stream.running_time_utc_time_mapping.is_none()
{
if let Some((last, first)) = Option::zip(pre_queue.back(), pre_queue.front()) {
// Existence of PTS/DTS checked below
let (last, first) = if delta_frames.requires_dts() {
let (last, first) = if stream.delta_frames.requires_dts() {
(
last.0.to_running_time_full(last.1.dts()).unwrap(),
first.0.to_running_time_full(first.1.dts()).unwrap(),
@ -305,31 +320,32 @@ impl MP4Mux {
{
gst::error!(
CAT,
obj: sinkpad,
obj = sinkpad,
"Got no UTC time in the first 6s of the stream"
);
return Err(gst::FlowError::Error);
}
}
let buffer = match sinkpad.pop_buffer() {
None => {
if sinkpad.is_eos() {
gst::error!(CAT, obj: sinkpad, "Got no UTC time before EOS");
return Err(gst::FlowError::Error);
} else {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
let Some(buffer) = sinkpad.pop_buffer() else {
if sinkpad.is_eos() {
gst::error!(CAT, obj = sinkpad, "Got no UTC time before EOS");
return Err(gst::FlowError::Error);
} else {
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
Some(buffer) => buffer,
};
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
Self::check_buffer(
&buffer,
sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
)?;
let segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
None => {
gst::error!(CAT, obj: sinkpad, "Got buffer before segment");
gst::error!(CAT, obj = sinkpad, "Got buffer before segment");
return Err(gst::FlowError::Error);
}
};
@ -345,12 +361,12 @@ impl MP4Mux {
let running_time = segment.to_running_time_full(buffer.pts().unwrap()).unwrap();
gst::info!(
CAT,
obj: sinkpad,
obj = sinkpad,
"Got initial UTC time {utc_time} at PTS running time {running_time}",
);
let mapping = (running_time, utc_time);
*running_time_utc_time_mapping = Some(mapping);
stream.running_time_utc_time_mapping = Some(mapping);
// Push the buffer onto the pre-queue and re-timestamp it and all other buffers
// based on the mapping above.
@ -361,12 +377,12 @@ impl MP4Mux {
let pts = segment.to_running_time_full(buffer.pts().unwrap()).unwrap();
let pts_utc_time = running_time_to_utc_time(pts, mapping).ok_or_else(|| {
gst::error!(CAT, obj: sinkpad, "Stream has negative PTS UTC time");
gst::error!(CAT, obj = sinkpad, "Stream has negative PTS UTC time");
gst::FlowError::Error
})?;
gst::trace!(
CAT,
obj: sinkpad,
obj = sinkpad,
"Mapped PTS running time {pts} to UTC time {pts_utc_time}"
);
buffer.set_pts(pts_utc_time);
@ -374,12 +390,12 @@ impl MP4Mux {
if let Some(dts) = buffer.dts() {
let dts = segment.to_running_time_full(dts).unwrap();
let dts_utc_time = running_time_to_utc_time(dts, mapping).ok_or_else(|| {
gst::error!(CAT, obj: sinkpad, "Stream has negative DTS UTC time");
gst::error!(CAT, obj = sinkpad, "Stream has negative DTS UTC time");
gst::FlowError::Error
})?;
gst::trace!(
CAT,
obj: sinkpad,
obj = sinkpad,
"Mapped DTS running time {dts} to UTC time {dts_utc_time}"
);
buffer.set_dts(dts_utc_time);
@ -391,7 +407,7 @@ impl MP4Mux {
// Fall through below and pop the first buffer finally
}
if let Some((segment, buffer)) = pre_queue.pop_front() {
if let Some((segment, buffer)) = stream.pre_queue.pop_front() {
return Ok(Some((segment, buffer)));
}
@ -400,23 +416,26 @@ impl MP4Mux {
// for calculating the duration to the previous buffer, and then put into the pre-queue
// - or this is the very first buffer and we just put it into the queue overselves above
if self.obj().class().as_ref().variant == super::Variant::ONVIF {
if sinkpad.is_eos() {
if stream.sinkpad.is_eos() {
return Ok(None);
}
unreachable!();
}
let buffer = match sinkpad.pop_buffer() {
None => return Ok(None),
Some(buffer) => buffer,
let Some(buffer) = stream.sinkpad.pop_buffer() else {
return Ok(None);
};
Self::check_buffer(
&buffer,
&stream.sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
)?;
Self::check_buffer(&buffer, sinkpad, delta_frames)?;
let segment = match sinkpad.segment().downcast::<gst::ClockTime>().ok() {
let segment = match stream.sinkpad.segment().downcast::<gst::ClockTime>().ok() {
Some(segment) => segment,
None => {
gst::error!(CAT, obj: sinkpad, "Got buffer before segment");
gst::error!(CAT, obj = stream.sinkpad, "Got buffer before segment");
return Err(gst::FlowError::Error);
}
};
@ -442,6 +461,12 @@ impl MP4Mux {
Some(PendingBuffer {
duration: Some(_), ..
}) => return Ok(()),
Some(PendingBuffer { ref buffer, .. })
if stream.discard_header_buffers
&& buffer.flags().contains(gst::BufferFlags::HEADER) =>
{
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
Some(PendingBuffer {
timestamp,
pts,
@ -449,26 +474,28 @@ impl MP4Mux {
ref mut duration,
..
}) => {
// Already have a pending buffer but no duration, so try to get that now
let (segment, buffer) = match self.peek_buffer(
let peek_outcome = self.peek_buffer(
&stream.sinkpad,
stream.delta_frames,
stream.discard_header_buffers,
&mut stream.pre_queue,
&stream.running_time_utc_time_mapping,
)? {
)?;
// Already have a pending buffer but no duration, so try to get that now
let (segment, buffer) = match peek_outcome {
Some(res) => res,
None => {
if stream.sinkpad.is_eos() {
let dur = buffer.duration().unwrap_or(gst::ClockTime::ZERO);
gst::trace!(
CAT,
obj: stream.sinkpad,
obj = stream.sinkpad,
"Stream is EOS, using {dur} as duration for queued buffer",
);
let pts = pts + dur;
if stream.end_pts.map_or(true, |end_pts| end_pts < pts) {
gst::trace!(CAT, obj: stream.sinkpad, "Stream end PTS {pts}");
gst::trace!(CAT, obj = stream.sinkpad, "Stream end PTS {pts}");
stream.end_pts = Some(pts);
}
@ -476,7 +503,11 @@ impl MP4Mux {
return Ok(());
} else {
gst::trace!(CAT, obj: stream.sinkpad, "Stream has no buffer queued");
gst::trace!(
CAT,
obj = stream.sinkpad,
"Stream has no buffer queued"
);
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
}
@ -497,7 +528,7 @@ impl MP4Mux {
gst::trace!(
CAT,
obj: stream.sinkpad,
obj = stream.sinkpad,
"Stream has buffer with timestamp {next_timestamp} queued",
);
@ -507,7 +538,7 @@ impl MP4Mux {
.unwrap_or_else(|| {
gst::warning!(
CAT,
obj: stream.sinkpad,
obj = stream.sinkpad,
"Stream timestamps going backwards {next_timestamp} < {timestamp}",
);
gst::ClockTime::ZERO
@ -515,41 +546,57 @@ impl MP4Mux {
gst::trace!(
CAT,
obj: stream.sinkpad,
obj = stream.sinkpad,
"Using {dur} as duration for queued buffer",
);
let pts = pts + dur;
if stream.end_pts.map_or(true, |end_pts| end_pts < pts) {
gst::trace!(CAT, obj: stream.sinkpad, "Stream end PTS {pts}");
gst::trace!(CAT, obj = stream.sinkpad, "Stream end PTS {pts}");
stream.end_pts = Some(pts);
}
*duration = Some(dur);
// If the stream is AV1, we need to parse the SequenceHeader OBU to include in the
// extra data of the 'av1C' box. It makes the stream playable in some browsers.
let s = stream.caps.structure(0).unwrap();
if !buffer.flags().contains(gst::BufferFlags::DELTA_UNIT)
&& s.name().as_str() == "video/x-av1"
{
let buf_map = buffer.map_readable().map_err(|_| {
gst::error!(CAT, obj = stream.sinkpad, "Failed to map buffer");
gst::FlowError::Error
})?;
stream.extra_header_data = read_seq_header_obu_bytes(buf_map.as_slice())
.map_err(|_| {
gst::error!(
CAT,
obj = stream.sinkpad,
"Failed to parse AV1 SequenceHeader OBU"
);
gst::FlowError::Error
})?;
}
return Ok(());
}
None => {
// Have no buffer queued at all yet
let (segment, buffer) = match self.pop_buffer(
&stream.sinkpad,
stream.delta_frames,
&mut stream.pre_queue,
&mut stream.running_time_utc_time_mapping,
)? {
let (segment, buffer) = match self.pop_buffer(stream)? {
Some(res) => res,
None => {
if stream.sinkpad.is_eos() {
gst::trace!(
CAT,
obj: stream.sinkpad,
"Stream is EOS",
);
gst::trace!(CAT, obj = stream.sinkpad, "Stream is EOS",);
return Err(gst::FlowError::Eos);
} else {
gst::trace!(CAT, obj: stream.sinkpad, "Stream has no buffer queued");
gst::trace!(
CAT,
obj = stream.sinkpad,
"Stream has no buffer queued"
);
return Err(gst_base::AGGREGATOR_FLOW_NEED_DATA);
}
}
@ -559,9 +606,16 @@ impl MP4Mux {
let pts_position = buffer.pts().unwrap();
let dts_position = buffer.dts();
let pts = segment.to_running_time_full(pts_position).unwrap()
.positive().unwrap_or_else(|| {
gst::error!(CAT, obj: stream.sinkpad, "Stream has negative PTS running time");
let pts = segment
.to_running_time_full(pts_position)
.unwrap()
.positive()
.unwrap_or_else(|| {
gst::error!(
CAT,
obj = stream.sinkpad,
"Stream has negative PTS running time"
);
gst::ClockTime::ZERO
});
@ -573,7 +627,7 @@ impl MP4Mux {
let dts = dts.unwrap();
if stream.start_dts.is_none() {
gst::debug!(CAT, obj: stream.sinkpad, "Stream start DTS {dts}");
gst::debug!(CAT, obj = stream.sinkpad, "Stream start DTS {dts}");
stream.start_dts = Some(dts);
}
@ -586,7 +640,7 @@ impl MP4Mux {
.earliest_pts
.map_or(true, |earliest_pts| earliest_pts > pts)
{
gst::debug!(CAT, obj: stream.sinkpad, "Stream earliest PTS {pts}");
gst::debug!(CAT, obj = stream.sinkpad, "Stream earliest PTS {pts}");
stream.earliest_pts = Some(pts);
}
@ -595,7 +649,7 @@ impl MP4Mux {
let dts = dts.unwrap(); // set above
Some(i64::try_from((pts - dts).nseconds()).map_err(|_| {
gst::error!(CAT, obj: stream.sinkpad, "Too big PTS/DTS difference");
gst::error!(CAT, obj = stream.sinkpad, "Too big PTS/DTS difference");
gst::FlowError::Error
})?)
} else {
@ -604,7 +658,7 @@ impl MP4Mux {
gst::trace!(
CAT,
obj: stream.sinkpad,
obj = stream.sinkpad,
"Stream has buffer of size {} with timestamp {timestamp} pending",
buffer.size(),
);
@ -651,7 +705,7 @@ impl MP4Mux {
}))
{
gst::trace!(CAT,
obj: stream.sinkpad,
obj = stream.sinkpad,
"Continuing current chunk: single stream {single_stream}, or {} >= {} and {} >= {}",
gst::format::Bytes::from_u64(stream.queued_chunk_bytes),
settings.interleave_bytes.map(gst::format::Bytes::from_u64).display(),
@ -661,16 +715,25 @@ impl MP4Mux {
}
state.current_stream_idx = None;
gst::debug!(CAT,
obj: stream.sinkpad,
gst::debug!(
CAT,
obj = stream.sinkpad,
"Switching to next chunk: {} < {} and {} < {}",
gst::format::Bytes::from_u64(stream.queued_chunk_bytes),
settings.interleave_bytes.map(gst::format::Bytes::from_u64).display(),
stream.queued_chunk_time, settings.interleave_time.display(),
settings
.interleave_bytes
.map(gst::format::Bytes::from_u64)
.display(),
stream.queued_chunk_time,
settings.interleave_time.display(),
);
}
Err(gst::FlowError::Eos) => {
gst::debug!(CAT, obj: stream.sinkpad, "Stream is EOS, switching to next stream");
gst::debug!(
CAT,
obj = stream.sinkpad,
"Stream is EOS, switching to next stream"
);
state.current_stream_idx = None;
}
Err(err) => {
@ -699,10 +762,7 @@ impl MP4Mux {
let timestamp = stream.pending_buffer.as_ref().unwrap().timestamp;
gst::trace!(CAT,
obj: stream.sinkpad,
"Stream at timestamp {timestamp}",
);
gst::trace!(CAT, obj = stream.sinkpad, "Stream at timestamp {timestamp}",);
all_eos = false;
@ -730,21 +790,21 @@ impl MP4Mux {
}
if !all_have_data_or_eos {
gst::trace!(CAT, imp: self, "Not all streams have a buffer or are EOS");
gst::trace!(CAT, imp = self, "Not all streams have a buffer or are EOS");
Err(gst_base::AGGREGATOR_FLOW_NEED_DATA)
} else if all_eos {
gst::info!(CAT, imp: self, "All streams are EOS");
gst::info!(CAT, imp = self, "All streams are EOS");
Err(gst::FlowError::Eos)
} else if let Some((idx, stream, earliest_timestamp)) = earliest_stream {
gst::debug!(
CAT,
obj: stream.sinkpad,
obj = stream.sinkpad,
"Stream is earliest stream with timestamp {earliest_timestamp}",
);
gst::debug!(
CAT,
obj: stream.sinkpad,
obj = stream.sinkpad,
"Starting new chunk at offset {}",
state.current_offset,
);
@ -778,7 +838,7 @@ impl MP4Mux {
&& buffer.buffer.flags().contains(gst::BufferFlags::DROPPABLE)
&& buffer.buffer.size() == 0
{
gst::trace!(CAT, obj: stream.sinkpad, "Skipping gap buffer {buffer:?}");
gst::trace!(CAT, obj = stream.sinkpad, "Skipping gap buffer {buffer:?}");
// If a new chunk was just started for the gap buffer, don't bother and get rid
// of this chunk again for now and search for the next stream.
@ -796,10 +856,19 @@ impl MP4Mux {
if let Some(previous_sample) =
stream.chunks.last_mut().and_then(|c| c.samples.last_mut())
{
gst::trace!(CAT, obj: stream.sinkpad, "Adding gap duration {} to previous sample", buffer.duration.unwrap());
gst::trace!(
CAT,
obj = stream.sinkpad,
"Adding gap duration {} to previous sample",
buffer.duration.unwrap()
);
previous_sample.duration += buffer.duration.unwrap();
} else {
gst::trace!(CAT, obj: stream.sinkpad, "Resetting stream start time because it started with a gap");
gst::trace!(
CAT,
obj = stream.sinkpad,
"Resetting stream start time because it started with a gap"
);
// If there was no previous sample yet then the next sample needs to start
// earlier or alternatively we change the start PTS. We do the latter here
// as otherwise the first sample would be displayed too early.
@ -811,7 +880,12 @@ impl MP4Mux {
continue;
}
gst::trace!(CAT, obj: stream.sinkpad, "Handling buffer {buffer:?} at offset {}", state.current_offset);
gst::trace!(
CAT,
obj = stream.sinkpad,
"Handling buffer {buffer:?} at offset {}",
state.current_offset
);
let duration = buffer.duration.unwrap();
let composition_time_offset = buffer.composition_time_offset;
@ -849,7 +923,7 @@ impl MP4Mux {
}
fn create_streams(&self, state: &mut State) -> Result<(), gst::FlowError> {
gst::info!(CAT, imp: self, "Creating streams");
gst::info!(CAT, imp = self, "Creating streams");
for pad in self
.obj()
@ -860,20 +934,21 @@ impl MP4Mux {
let caps = match pad.current_caps() {
Some(caps) => caps,
None => {
gst::warning!(CAT, obj: pad, "Skipping pad without caps");
gst::warning!(CAT, obj = pad, "Skipping pad without caps");
continue;
}
};
gst::info!(CAT, obj: pad, "Configuring caps {caps:?}");
gst::info!(CAT, obj = pad, "Configuring caps {caps:?}");
let s = caps.structure(0).unwrap();
let mut delta_frames = super::DeltaFrames::IntraOnly;
let mut discard_header_buffers = false;
match s.name().as_str() {
"video/x-h264" | "video/x-h265" => {
if !s.has_field_with_type("codec_data", gst::Buffer::static_type()) {
gst::error!(CAT, obj: pad, "Received caps without codec_data");
gst::error!(CAT, obj = pad, "Received caps without codec_data");
return Err(gst::FlowError::NotNegotiated);
}
delta_frames = super::DeltaFrames::Bidirectional;
@ -883,7 +958,7 @@ impl MP4Mux {
}
"video/x-vp9" => {
if !s.has_field_with_type("colorimetry", str::static_type()) {
gst::error!(CAT, obj: pad, "Received caps without colorimetry");
gst::error!(CAT, obj = pad, "Received caps without colorimetry");
return Err(gst::FlowError::NotNegotiated);
}
delta_frames = super::DeltaFrames::PredictiveOnly;
@ -894,7 +969,7 @@ impl MP4Mux {
"image/jpeg" => (),
"audio/mpeg" => {
if !s.has_field_with_type("codec_data", gst::Buffer::static_type()) {
gst::error!(CAT, obj: pad, "Received caps without codec_data");
gst::error!(CAT, obj = pad, "Received caps without codec_data");
return Err(gst::FlowError::NotNegotiated);
}
}
@ -905,14 +980,26 @@ impl MP4Mux {
.and_then(|a| a.first().and_then(|v| v.get::<gst::Buffer>().ok()))
{
if gst_pbutils::codec_utils_opus_parse_header(&header, None).is_err() {
gst::error!(CAT, obj: pad, "Received invalid Opus header");
gst::error!(CAT, obj = pad, "Received invalid Opus header");
return Err(gst::FlowError::NotNegotiated);
}
} else if gst_pbutils::codec_utils_opus_parse_caps(&caps, None).is_err() {
gst::error!(CAT, obj: pad, "Received invalid Opus caps");
gst::error!(CAT, obj = pad, "Received invalid Opus caps");
return Err(gst::FlowError::NotNegotiated);
}
}
"audio/x-flac" => {
discard_header_buffers = true;
if let Err(e) = s.get::<gst::ArrayRef>("streamheader") {
gst::error!(
CAT,
obj = pad,
"Muxing FLAC into MP4 needs streamheader: {}",
e
);
return Err(gst::FlowError::NotNegotiated);
};
}
"audio/x-alaw" | "audio/x-mulaw" => (),
"audio/x-adpcm" => (),
"application/x-onvif-metadata" => (),
@ -924,6 +1011,7 @@ impl MP4Mux {
pre_queue: VecDeque::new(),
caps,
delta_frames,
discard_header_buffers,
chunks: Vec::new(),
pending_buffer: None,
queued_chunk_time: gst::ClockTime::ZERO,
@ -932,11 +1020,13 @@ impl MP4Mux {
earliest_pts: None,
end_pts: None,
running_time_utc_time_mapping: None,
extra_header_data: None,
orientation: None,
});
}
if state.streams.is_empty() {
gst::error!(CAT, imp: self, "No streams available");
gst::error!(CAT, imp = self, "No streams available");
return Err(gst::FlowError::Error);
}
@ -1071,7 +1161,7 @@ impl ElementImpl for MP4Mux {
if !state.streams.is_empty() {
gst::error!(
CAT,
imp: self,
imp = self,
"Can't request new pads after stream was started"
);
return None;
@ -1093,7 +1183,7 @@ impl AggregatorImpl for MP4Mux {
) -> bool {
use gst::QueryViewMut;
gst::trace!(CAT, obj: aggregator_pad, "Handling query {query:?}");
gst::trace!(CAT, obj = aggregator_pad, "Handling query {query:?}");
match query.view_mut() {
QueryViewMut::Caps(q) => {
@ -1127,14 +1217,14 @@ impl AggregatorImpl for MP4Mux {
) -> Result<gst::FlowSuccess, gst::FlowError> {
use gst::EventView;
gst::trace!(CAT, obj: aggregator_pad, "Handling event {event:?}");
gst::trace!(CAT, obj = aggregator_pad, "Handling event {event:?}");
match event.view() {
EventView::Segment(ev) => {
if ev.segment().format() != gst::Format::Time {
gst::warning!(
CAT,
obj: aggregator_pad,
obj = aggregator_pad,
"Received non-TIME segment, replacing with default TIME segment"
);
let segment = gst::FormattedSegment::<gst::ClockTime>::new();
@ -1144,6 +1234,57 @@ impl AggregatorImpl for MP4Mux {
}
self.parent_sink_event_pre_queue(aggregator_pad, event)
}
EventView::Tag(ev) => {
if let Some(tag_value) = ev.tag().get::<gst::tags::LanguageCode>() {
let lang = tag_value.get();
gst::trace!(
CAT,
imp = self,
"Received language code from tags: {:?}",
lang
);
// Language as ISO-639-2/T
if lang.len() == 3 && lang.chars().all(|c| c.is_ascii_lowercase()) {
let mut state = self.state.lock().unwrap();
let mut language_code: [u8; 3] = [0; 3];
for (out, c) in Iterator::zip(language_code.iter_mut(), lang.chars()) {
*out = c as u8;
}
state.language_code = Some(language_code);
}
} else if let Some(tag_value) = ev.tag().get::<gst::tags::ImageOrientation>() {
let orientation = tag_value.get();
gst::trace!(
CAT,
obj = aggregator_pad,
"Received image orientation from tags: {:?}",
orientation
);
let mut state = self.state.lock().unwrap();
for stream in &mut state.streams {
if &stream.sinkpad == aggregator_pad {
stream.orientation = match orientation {
"rotate-0" => Some(ImageOrientation::Rotate0),
"rotate-90" => Some(ImageOrientation::Rotate90),
"rotate-180" => Some(ImageOrientation::Rotate180),
"rotate-270" => Some(ImageOrientation::Rotate270),
// TODO:
// "flip-rotate-0" => Some(ImageOrientation::FlipRotate0),
// "flip-rotate-90" => Some(ImageOrientation::FlipRotate90),
// "flip-rotate-180" => Some(ImageOrientation::FlipRotate180),
// "flip-rotate-270" => Some(ImageOrientation::FlipRotate270),
_ => None,
};
break;
}
}
}
self.parent_sink_event_pre_queue(aggregator_pad, event)
}
_ => self.parent_sink_event_pre_queue(aggregator_pad, event),
}
}
@ -1151,7 +1292,7 @@ impl AggregatorImpl for MP4Mux {
fn sink_event(&self, aggregator_pad: &gst_base::AggregatorPad, event: gst::Event) -> bool {
use gst::EventView;
gst::trace!(CAT, obj: aggregator_pad, "Handling event {event:?}");
gst::trace!(CAT, obj = aggregator_pad, "Handling event {event:?}");
match event.view() {
EventView::Tag(_ev) => {
@ -1166,7 +1307,7 @@ impl AggregatorImpl for MP4Mux {
fn src_query(&self, query: &mut gst::QueryRef) -> bool {
use gst::QueryViewMut;
gst::trace!(CAT, imp: self, "Handling query {query:?}");
gst::trace!(CAT, imp = self, "Handling query {query:?}");
match query.view_mut() {
QueryViewMut::Seeking(q) => {
@ -1181,7 +1322,7 @@ impl AggregatorImpl for MP4Mux {
fn src_event(&self, event: gst::Event) -> bool {
use gst::EventView;
gst::trace!(CAT, imp: self, "Handling event {event:?}");
gst::trace!(CAT, imp = self, "Handling event {event:?}");
match event.view() {
EventView::Seek(_ev) => false,
@ -1190,7 +1331,7 @@ impl AggregatorImpl for MP4Mux {
}
fn flush(&self) -> Result<gst::FlowSuccess, gst::FlowError> {
gst::info!(CAT, imp: self, "Flushing");
gst::info!(CAT, imp = self, "Flushing");
let mut state = self.state.lock().unwrap();
for stream in &mut state.streams {
@ -1204,7 +1345,7 @@ impl AggregatorImpl for MP4Mux {
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
gst::trace!(CAT, imp: self, "Stopping");
gst::trace!(CAT, imp = self, "Stopping");
let _ = self.parent_stop();
@ -1214,7 +1355,7 @@ impl AggregatorImpl for MP4Mux {
}
fn start(&self) -> Result<(), gst::ErrorMessage> {
gst::trace!(CAT, imp: self, "Starting");
gst::trace!(CAT, imp = self, "Starting");
self.parent_start()?;
@ -1255,7 +1396,7 @@ impl AggregatorImpl for MP4Mux {
}
} else {
// Can't query downstream, have to assume downstream is seekable
gst::warning!(CAT, imp: self, "Can't query downstream for seekability");
gst::warning!(CAT, imp = self, "Can't query downstream for seekability");
}
state = self.state.lock().unwrap();
@ -1270,15 +1411,23 @@ impl AggregatorImpl for MP4Mux {
gst::info!(
CAT,
imp: self,
imp = self,
"Creating ftyp box at offset {}",
state.current_offset
);
// ... and then create the ftyp box plus mdat box header so we can start outputting
// actual data
let ftyp = boxes::create_ftyp(self.obj().class().as_ref().variant).map_err(|err| {
gst::error!(CAT, imp: self, "Failed to create ftyp box: {err}");
let ftyp = boxes::create_ftyp(
self.obj().class().as_ref().variant,
&state
.streams
.iter()
.map(|s| s.caps.as_ref())
.collect::<Vec<_>>(),
)
.map_err(|err| {
gst::error!(CAT, imp = self, "Failed to create ftyp box: {err}");
gst::FlowError::Error
})?;
state.current_offset += ftyp.size() as u64;
@ -1286,13 +1435,13 @@ impl AggregatorImpl for MP4Mux {
gst::info!(
CAT,
imp: self,
imp = self,
"Creating mdat box header at offset {}",
state.current_offset
);
state.mdat_offset = Some(state.current_offset);
let mdat = boxes::create_mdat_header(None).map_err(|err| {
gst::error!(CAT, imp: self, "Failed to create mdat box header: {err}");
gst::error!(CAT, imp = self, "Failed to create mdat box header: {err}");
gst::FlowError::Error
})?;
state.current_offset += mdat.size() as u64;
@ -1313,7 +1462,7 @@ impl AggregatorImpl for MP4Mux {
gst::info!(
CAT,
imp: self,
imp = self,
"Creating moov box now, mdat ends at offset {} with size {}",
state.current_offset,
state.mdat_size
@ -1336,6 +1485,8 @@ impl AggregatorImpl for MP4Mux {
earliest_pts,
end_pts,
chunks: stream.chunks,
extra_header_data: stream.extra_header_data.clone(),
orientation: stream.orientation,
});
}
@ -1343,9 +1494,10 @@ impl AggregatorImpl for MP4Mux {
variant: self.obj().class().as_ref().variant,
movie_timescale: settings.movie_timescale,
streams,
language_code: state.language_code,
})
.map_err(|err| {
gst::error!(CAT, imp: self, "Failed to create moov box: {err}");
gst::error!(CAT, imp = self, "Failed to create moov box: {err}");
gst::FlowError::Error
})?;
state.current_offset += moov.size() as u64;
@ -1360,7 +1512,7 @@ impl AggregatorImpl for MP4Mux {
if !buffers.is_empty() {
if let Err(err) = self.obj().finish_buffer_list(buffers) {
gst::error!(CAT, imp: self, "Failed pushing buffers: {err:?}");
gst::error!(CAT, imp = self, "Failed pushing buffers: {err:?}");
return Err(err);
}
}
@ -1371,7 +1523,7 @@ impl AggregatorImpl for MP4Mux {
if let Some(mdat_offset) = state.mdat_offset {
gst::info!(
CAT,
imp: self,
imp = self,
"Rewriting mdat box header at offset {mdat_offset} with size {} now",
state.mdat_size,
);
@ -1379,7 +1531,7 @@ impl AggregatorImpl for MP4Mux {
segment.set_start(gst::format::Bytes::from_u64(mdat_offset));
state.current_offset = mdat_offset;
let mdat = boxes::create_mdat_header(Some(state.mdat_size)).map_err(|err| {
gst::error!(CAT, imp: self, "Failed to create mdat box header: {err}");
gst::error!(CAT, imp = self, "Failed to create mdat box header: {err}");
gst::FlowError::Error
})?;
drop(state);
@ -1388,7 +1540,7 @@ impl AggregatorImpl for MP4Mux {
if let Err(err) = self.obj().finish_buffer(mdat) {
gst::error!(
CAT,
imp: self,
imp = self,
"Failed pushing updated mdat box header buffer downstream: {err:?}",
);
}
@ -1523,6 +1675,11 @@ impl ElementImpl for ISOMP4Mux {
.field("channels", gst::IntRange::new(1i32, 8))
.field("rate", gst::IntRange::new(1, i32::MAX))
.build(),
gst::Structure::builder("audio/x-flac")
.field("framed", true)
.field("channels", gst::IntRange::<i32>::new(1, 8))
.field("rate", gst::IntRange::<i32>::new(1, 10 * u16::MAX as i32))
.build(),
]
.into_iter()
.collect::<gst::Caps>(),
@ -1709,7 +1866,7 @@ impl AggregatorPadImpl for MP4MuxPad {
let mux = aggregator.downcast_ref::<super::MP4Mux>().unwrap();
let mut mux_state = mux.imp().state.lock().unwrap();
gst::info!(CAT, imp: self, "Flushing");
gst::info!(CAT, imp = self, "Flushing");
for stream in &mut mux_state.streams {
if stream.sinkpad == *self.obj() {

View file

@ -11,6 +11,7 @@ use gst::prelude::*;
mod boxes;
mod imp;
mod obu;
glib::wrapper! {
pub(crate) struct MP4MuxPad(ObjectSubclass<imp::MP4MuxPad>) @extends gst_base::AggregatorPad, gst::Pad, gst::Object;
@ -50,6 +51,80 @@ pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
Ok(())
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum ImageOrientation {
Rotate0,
Rotate90,
Rotate180,
Rotate270,
// TODO:
// FlipRotate0,
// FlipRotate90,
// FlipRotate180,
// FlipRotate270,
}
type TransformMatrix = [[u8; 4]; 9];
const IDENTITY_MATRIX: TransformMatrix = [
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_90_MATRIX: TransformMatrix = [
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_180_MATRIX: TransformMatrix = [
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
const ROTATE_270_MATRIX: TransformMatrix = [
0u32.to_be_bytes(),
(-1i32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 16).to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
0u32.to_be_bytes(),
(1u32 << 30).to_be_bytes(),
];
impl ImageOrientation {
pub(crate) fn transform_matrix(&self) -> &'static TransformMatrix {
match self {
ImageOrientation::Rotate0 => &IDENTITY_MATRIX,
ImageOrientation::Rotate90 => &ROTATE_90_MATRIX,
ImageOrientation::Rotate180 => &ROTATE_180_MATRIX,
ImageOrientation::Rotate270 => &ROTATE_270_MATRIX,
}
}
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum DeltaFrames {
/// Only single completely decodable frames
@ -126,6 +201,12 @@ pub(crate) struct Stream {
/// All the chunks stored for this stream
chunks: Vec<Chunk>,
// More data to be included in the fragmented stream header
extra_header_data: Option<Vec<u8>>,
/// Orientation from tags
orientation: Option<ImageOrientation>,
}
#[derive(Debug)]
@ -135,6 +216,7 @@ pub(crate) struct Header {
/// Pre-defined movie timescale if not 0.
movie_timescale: u32,
streams: Vec<Stream>,
language_code: Option<[u8; 3]>,
}
#[allow(clippy::upper_case_acronyms)]

303
mux/mp4/src/mp4mux/obu.rs Normal file
View file

@ -0,0 +1,303 @@
//
// Copyright (C) 2022 Vivienne Watermeier <vwatermeier@igalia.com>
//
// This Source Code Form is subject to the terms of the Mozilla Public License, v2.0.
// If a copy of the MPL was not distributed with this file, You can obtain one at
// <https://mozilla.org/MPL/2.0/>.
//
// SPDX-License-Identifier: MPL-2.0
#![allow(non_camel_case_types)]
use bitstream_io::{BigEndian, BitRead, BitReader, Endianness};
use std::io::{self, Cursor, Read, Seek, SeekFrom};
pub fn parse_leb128<R, E>(reader: &mut BitReader<R, E>) -> io::Result<(u32, u32)>
where
R: Read + Seek,
E: Endianness,
{
let mut value = 0;
let mut num_bytes = 0;
for i in 0..8 {
let byte = reader.read::<u32>(8)?;
value |= (byte & 0x7f) << (i * 7);
num_bytes += 1;
if byte & 0x80 == 0 {
break;
}
}
reader.byte_align();
Ok((value, num_bytes))
}
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
pub struct SizedObu {
pub obu_type: ObuType,
pub has_extension: bool,
/// If the OBU header is followed by a leb128 size field.
pub has_size_field: bool,
pub temporal_id: u8,
pub spatial_id: u8,
/// size of the OBU payload in bytes.
/// This may refer to different sizes in different contexts, not always
/// to the entire OBU payload as it is in the AV1 bitstream.
pub size: u32,
/// the number of bytes the leb128 size field will take up
/// when written with write_leb128().
/// This does not imply `has_size_field`, and does not necessarily match with
/// the length of the internal size field if present.
pub leb_size: u32,
pub header_len: u32,
/// indicates that only part of this OBU has been processed so far
pub is_fragment: bool,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ObuType {
Reserved,
SequenceHeader,
TemporalDelimiter,
FrameHeader,
TileGroup,
Metadata,
Frame,
RedundantFrameHeader,
TileList,
Padding,
}
impl Default for ObuType {
fn default() -> Self {
Self::Reserved
}
}
impl SizedObu {
/// Parse an OBU header and size field. If the OBU is not expected to contain
/// a size field, but the size is known from external information,
/// parse as an `UnsizedObu` and use `to_sized`.
pub fn parse<R, E>(reader: &mut BitReader<R, E>) -> io::Result<Self>
where
R: Read + Seek,
E: Endianness,
{
// check the forbidden bit
if reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"forbidden bit in OBU header is set",
));
}
let obu_type = reader.read::<u8>(4)?.into();
let has_extension = reader.read_bit()?;
// require a size field
if !reader.read_bit()? {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"expected a size field",
));
}
// ignore the reserved bit
let _ = reader.read_bit()?;
let (temporal_id, spatial_id) = if has_extension {
(reader.read::<u8>(3)?, reader.read::<u8>(2)?)
} else {
(0, 0)
};
reader.byte_align();
let (size, leb_size) = parse_leb128(reader)?;
Ok(Self {
obu_type,
has_extension,
has_size_field: true,
temporal_id,
spatial_id,
size,
leb_size,
header_len: has_extension as u32 + 1,
is_fragment: false,
})
}
/// The amount of bytes this OBU will take up, including the space needed for
/// its leb128 size field.
pub fn full_size(&self) -> u32 {
self.size + self.leb_size + self.header_len
}
}
pub fn read_seq_header_obu_bytes(data: &[u8]) -> io::Result<Option<Vec<u8>>> {
let mut cursor = Cursor::new(data);
while cursor.position() < data.len() as u64 {
let obu_start = cursor.position();
let Ok(obu) = SizedObu::parse(&mut BitReader::endian(&mut cursor, BigEndian)) else {
break;
};
// set reader to the beginning of the OBU
cursor.seek(SeekFrom::Start(obu_start))?;
if obu.obu_type != ObuType::SequenceHeader {
// Skip the full OBU
cursor.seek(SeekFrom::Current(obu.full_size() as i64))?;
continue;
};
// read the full OBU
let mut bytes = vec![0; obu.full_size() as usize];
cursor.read_exact(&mut bytes)?;
return Ok(Some(bytes));
}
Ok(None)
}
impl From<u8> for ObuType {
fn from(n: u8) -> Self {
assert!(n < 16);
match n {
1 => Self::SequenceHeader,
2 => Self::TemporalDelimiter,
3 => Self::FrameHeader,
4 => Self::TileGroup,
5 => Self::Metadata,
6 => Self::Frame,
7 => Self::RedundantFrameHeader,
8 => Self::TileList,
15 => Self::Padding,
_ => Self::Reserved,
}
}
}
impl From<ObuType> for u8 {
fn from(ty: ObuType) -> Self {
match ty {
ObuType::Reserved => 0,
ObuType::SequenceHeader => 1,
ObuType::TemporalDelimiter => 2,
ObuType::FrameHeader => 3,
ObuType::TileGroup => 4,
ObuType::Metadata => 5,
ObuType::Frame => 6,
ObuType::RedundantFrameHeader => 7,
ObuType::TileList => 8,
ObuType::Padding => 15,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use bitstream_io::{BigEndian, BitReader};
use once_cell::sync::Lazy;
use std::io::Cursor;
#[allow(clippy::type_complexity)]
static OBUS: Lazy<Vec<(SizedObu, Vec<u8>)>> = Lazy::new(|| {
vec![
(
SizedObu {
obu_type: ObuType::TemporalDelimiter,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 0,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0001_0010, 0b0000_0000],
),
(
SizedObu {
obu_type: ObuType::Padding,
has_extension: false,
has_size_field: true,
temporal_id: 0,
spatial_id: 0,
size: 10,
leb_size: 1,
header_len: 1,
is_fragment: false,
},
vec![0b0111_1010, 0b0000_1010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
),
(
SizedObu {
obu_type: ObuType::SequenceHeader,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0000_1110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
(
SizedObu {
obu_type: ObuType::Frame,
has_extension: true,
has_size_field: true,
temporal_id: 4,
spatial_id: 3,
size: 5,
leb_size: 1,
header_len: 2,
is_fragment: false,
},
vec![0b0011_0110, 0b1001_1000, 0b0000_0101, 1, 2, 3, 4, 5],
),
]
});
#[test]
fn test_parse_rtp_obu() {
for (idx, (sized_obu, raw_bytes)) in (*OBUS).iter().enumerate() {
println!("running test {idx}...");
let mut reader = BitReader::endian(Cursor::new(&raw_bytes), BigEndian);
let obu_parsed = SizedObu::parse(&mut reader).unwrap();
assert_eq!(&obu_parsed, sized_obu);
if let Some(seq_header_obu_bytes) = read_seq_header_obu_bytes(raw_bytes).unwrap() {
println!("validation of sequence header obu read/write...");
assert_eq!(&seq_header_obu_bytes, raw_bytes);
}
}
}
#[test]
fn test_read_seq_header_from_bitstream() {
let mut bitstream = Vec::new();
let mut seq_header_bytes_raw = None;
for (obu, raw_bytes) in (*OBUS).iter() {
bitstream.extend(raw_bytes);
if obu.obu_type == ObuType::SequenceHeader {
seq_header_bytes_raw = Some(raw_bytes.clone());
}
}
let seq_header_obu_bytes = read_seq_header_obu_bytes(&bitstream).unwrap().unwrap();
assert_eq!(seq_header_obu_bytes, seq_header_bytes_raw.unwrap());
}
}

View file

@ -7,6 +7,8 @@
// SPDX-License-Identifier: MPL-2.0
//
use std::path::Path;
use gst::prelude::*;
use gst_pbutils::prelude::*;
@ -20,33 +22,57 @@ fn init() {
});
}
#[test]
fn test_basic() {
init();
struct Pipeline(gst::Pipeline);
impl std::ops::Deref for Pipeline {
type Target = gst::Pipeline;
struct Pipeline(gst::Pipeline);
impl std::ops::Deref for Pipeline {
type Target = gst::Pipeline;
fn deref(&self) -> &Self::Target {
&self.0
}
fn deref(&self) -> &Self::Target {
&self.0
}
impl Drop for Pipeline {
fn drop(&mut self) {
let _ = self.0.set_state(gst::State::Null);
}
}
impl Drop for Pipeline {
fn drop(&mut self) {
let _ = self.0.set_state(gst::State::Null);
}
}
let pipeline = match gst::parse::launch(
"videotestsrc num-buffers=99 ! x264enc ! mux. \
audiotestsrc num-buffers=140 ! fdkaacenc ! mux. \
isomp4mux name=mux ! filesink name=sink \
",
) {
Ok(pipeline) => Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap()),
Err(_) => return,
impl Pipeline {
fn into_completion(self) {
self.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in self.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
}
self.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
}
fn test_basic_with(video_enc: &str, audio_enc: &str, cb: impl FnOnce(&Path)) {
let Ok(pipeline) = gst::parse::launch(&format!(
"videotestsrc num-buffers=99 ! {video_enc} ! mux. \
audiotestsrc num-buffers=140 ! {audio_enc} ! mux. \
isomp4mux name=mux ! filesink name=sink"
)) else {
println!("could not build encoding pipeline");
return;
};
let pipeline = Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap());
let dir = tempfile::TempDir::new().unwrap();
let mut location = dir.path().to_owned();
@ -54,73 +80,95 @@ fn test_basic() {
let sink = pipeline.by_name("sink").unwrap();
sink.set_property("location", location.to_str().expect("Non-UTF8 filename"));
pipeline.into_completion();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in pipeline.bus().unwrap().iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
panic!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
drop(pipeline);
let discoverer = gst_pbutils::Discoverer::new(gst::ClockTime::from_seconds(5))
.expect("Failed to create discoverer");
let info = discoverer
.discover_uri(
url::Url::from_file_path(&location)
.expect("Failed to convert filename to URL")
.as_str(),
)
.expect("Failed to discover MP4 file");
assert_eq!(info.duration(), Some(gst::ClockTime::from_mseconds(3_300)));
let audio_streams = info.audio_streams();
assert_eq!(audio_streams.len(), 1);
let audio_stream = &audio_streams[0];
assert_eq!(audio_stream.channels(), 1);
assert_eq!(audio_stream.sample_rate(), 44_100);
let caps = audio_stream.caps().unwrap();
assert!(
caps.can_intersect(
&gst::Caps::builder("audio/mpeg")
.any_features()
.field("mpegversion", 4i32)
.build()
),
"Unexpected audio caps {caps:?}"
);
let video_streams = info.video_streams();
assert_eq!(video_streams.len(), 1);
let video_stream = &video_streams[0];
assert_eq!(video_stream.width(), 320);
assert_eq!(video_stream.height(), 240);
assert_eq!(video_stream.framerate(), gst::Fraction::new(30, 1));
assert_eq!(video_stream.par(), gst::Fraction::new(1, 1));
assert!(!video_stream.is_interlaced());
let caps = video_stream.caps().unwrap();
assert!(
caps.can_intersect(&gst::Caps::builder("video/x-h264").any_features().build()),
"Unexpected video caps {caps:?}"
);
cb(&location)
}
#[test]
fn test_basic_x264_aac() {
init();
test_basic_with("x264enc", "fdkaacenc", |location| {
let discoverer = gst_pbutils::Discoverer::new(gst::ClockTime::from_seconds(5))
.expect("Failed to create discoverer");
let info = discoverer
.discover_uri(
url::Url::from_file_path(location)
.expect("Failed to convert filename to URL")
.as_str(),
)
.expect("Failed to discover MP4 file");
assert_eq!(info.duration(), Some(gst::ClockTime::from_mseconds(3_300)));
let audio_streams = info.audio_streams();
assert_eq!(audio_streams.len(), 1);
let audio_stream = &audio_streams[0];
assert_eq!(audio_stream.channels(), 1);
assert_eq!(audio_stream.sample_rate(), 44_100);
let caps = audio_stream.caps().unwrap();
assert!(
caps.can_intersect(
&gst::Caps::builder("audio/mpeg")
.any_features()
.field("mpegversion", 4i32)
.build()
),
"Unexpected audio caps {caps:?}"
);
let video_streams = info.video_streams();
assert_eq!(video_streams.len(), 1);
let video_stream = &video_streams[0];
assert_eq!(video_stream.width(), 320);
assert_eq!(video_stream.height(), 240);
assert_eq!(video_stream.framerate(), gst::Fraction::new(30, 1));
assert_eq!(video_stream.par(), gst::Fraction::new(1, 1));
assert!(!video_stream.is_interlaced());
let caps = video_stream.caps().unwrap();
assert!(
caps.can_intersect(&gst::Caps::builder("video/x-h264").any_features().build()),
"Unexpected video caps {caps:?}"
);
})
}
#[test]
fn test_roundtrip_vp9_flac() {
init();
test_basic_with("vp9enc ! vp9parse", "flacenc ! flacparse", |location| {
let Ok(pipeline) = gst::parse::launch(
"filesrc name=src ! qtdemux name=demux \
demux.audio_0 ! queue ! flacdec ! fakesink \
demux.video_0 ! queue ! vp9dec ! fakesink",
) else {
panic!("could not build decoding pipeline")
};
let pipeline = Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap());
pipeline
.by_name("src")
.unwrap()
.set_property("location", location.display().to_string());
pipeline.into_completion();
})
}
#[test]
fn test_roundtrip_av1_aac() {
init();
test_basic_with("av1enc ! av1parse", "avenc_aac ! aacparse", |location| {
let Ok(pipeline) = gst::parse::launch(
"filesrc name=src ! qtdemux name=demux \
demux.audio_0 ! queue ! avdec_aac ! fakesink \
demux.video_0 ! queue ! av1dec ! fakesink",
) else {
panic!("could not build decoding pipeline")
};
let pipeline = Pipeline(pipeline.downcast::<gst::Pipeline>().unwrap());
pipeline
.by_name("src")
.unwrap()
.set_property("location", location.display().to_string());
pipeline.into_completion();
})
}

View file

@ -12,7 +12,7 @@ rust-version.workspace = true
[dependencies]
async-stream = "0.3.4"
base32 = "0.4"
base32 = "0.5"
aws-config = "1.0"
aws-sdk-s3 = "1.0"
aws-sdk-transcribestreaming = "1.0"
@ -32,10 +32,12 @@ serde_derive = "1"
serde_json = "1"
url = "2"
once_cell.workspace = true
gst-video = { workspace = true, features = ["v1_22"] }
sprintf = "0.2"
[dev-dependencies]
chrono = { version = "0.4", features = [ "alloc" ] }
env_logger = "0.10"
env_logger = "0.11"
gst-check = { workspace = true, features = ["v1_18"] }
rand = "0.8"
test-with = { version = "0.12", default-features = false }

View file

@ -18,7 +18,7 @@ mod s3hlssink;
mod s3sink;
mod s3src;
mod s3url;
mod s3utils;
pub mod s3utils;
mod transcribe_parse;
mod transcriber;

Some files were not shown because too many files have changed in this diff Show more