Compare commits

..

76 commits
main ... 0.12

Author SHA1 Message Date
Sami Jaktholm
44ce6b8a66 tags: fix album tag definition
The Album tag should be defined using the GST_TAG_ALBUM constant instead
of GST_TAG_ARTIST_SORTNAME.
2019-01-13 14:51:12 +02:00
Sebastian Dröge
3aac87c07f Remove "video editing" keyword from GES
They must be single words.
2018-11-26 17:01:15 +01:00
Sebastian Dröge
ff2fb01184 Fixup last commit to actually change the pbutils instead of the base dependency 2018-11-26 16:54:35 +01:00
Sebastian Dröge
ecb45814c2 gstreamer-editing-services requires pbutils 0.12.2 for the encoding profile API 2018-11-26 16:51:00 +01:00
Sebastian Dröge
b20981abc1 Fix some remaining references to github 2018-11-26 16:12:59 +01:00
Sebastian Dröge
6f6452a0c7 Various fixes for the README.md of the separate crates 2018-11-26 16:11:36 +01:00
Sebastian Dröge
7ccfc2a07e Update README.md with changes from the main one 2018-11-26 16:09:17 +01:00
Sebastian Dröge
c4bda55f3c Update gst-plugin-rs link to gst-plugins-rs 2018-11-26 16:09:05 +01:00
Sebastian Dröge
3779eba607 Update versions to 0.12.2 2018-11-26 15:57:39 +01:00
Sebastian Dröge
d2181def6a Update documentation 2018-11-26 15:57:39 +01:00
Sebastian Dröge
b3057d1185 Update CHANGELOG.md for 0.12.2 2018-11-26 15:57:39 +01:00
Sebastian Dröge
6cb567e855 Remove GitLab badge
It only works for gitlab.com currently
2018-11-26 15:48:04 +01:00
Sebastian Dröge
8fd859a45f Update documentation to GitLab pages 2018-11-26 15:45:38 +01:00
Sebastian Dröge
5d40a3ba2d Run everything through rustfmt again 2018-11-26 14:37:42 +01:00
Sebastian Dröge
65b77971ef Clean up imports a bit 2018-11-26 12:57:46 +01:00
Sebastian Dröge
5b98f9def0 Onle re-export traits from the pbutils prelude 2018-11-26 12:57:46 +01:00
Sebastian Dröge
00a5fecdbb Remove EncodingTarget::add_profile() and consider it immutable 2018-11-26 12:57:46 +01:00
Sebastian Dröge
1cecb1c8ab Add copyright header 2018-11-26 12:57:46 +01:00
Sebastian Dröge
1b7c8868a9 Store encoding profiles inside the container builder as a Vec 2018-11-26 12:57:46 +01:00
Sebastian Dröge
2df623c09a Remove unnecessary mut 2018-11-26 12:57:46 +01:00
Sebastian Dröge
682ca91070 Remove automatically inserted docs from git 2018-11-26 12:57:46 +01:00
Thiago Santos
e09f23e689 More encoding_profile binding improvements
- enable is_equal function again (unsure why it was disabled)
- remove restriction-caps property, encoding-profile objects are
immutable
- remove cast need by using IsA<EncodingProfile> in parameters and
returning the correct type of encodingprofile subclass from the build()
functions. It used a internal hack for storing those IsA objects in
order to keep the API clean and ready to be used, this should be sorted
out as soon as we figure out how to store them in the buidlers.
- encodebin example: remove Result error propagation when it is caused
by programming mistakes. A panic will happen in those cases.
- run rustfmt
2018-11-26 12:57:46 +01:00
Thiago Santos
6cb7f602d1 encoding_profile: set_restriction is transfer-full not transfer-none
Avoid refcounting mistakes by using the right converting functions
before calling the ffi layer functions
2018-11-26 12:57:46 +01:00
Thiago Santos
15da7bca0e EncodingProfile: remove setters and constructors, use builders
Provide builders for the EncodingProfile so that the created objects are
imutable and can have the Send and Sync traits
2018-11-26 12:57:46 +01:00
Thiago Santos
69ae94eb37 encoding_profile: add builders for all types 2018-11-26 12:57:46 +01:00
Thiago Santos
2a1d5e65bb encodebin: add an example for encodebin usage 2018-11-26 12:57:46 +01:00
Sebastian Dröge
72a30a869a Update GES version to 0.12.1 2018-11-26 12:57:46 +01:00
Thiago Santos
307637c7bd pbutils: adding encoding-profile bindings 2018-11-26 12:57:46 +01:00
Thibault Saunier
f9c95b62a4 ges: Work around trait naming conflicts
Fixes #151
2018-11-26 12:57:46 +01:00
Thibault Saunier
4ec7288f56 ges: Mark more method as returning BoolError 2018-11-26 12:57:46 +01:00
Thibault Saunier
0d5ea8e452 Generate bindings for the GStreamer Editing Services 2018-11-26 12:57:46 +01:00
Sebastian Dröge
a17449e2be Run everything through rustfmt 2018-11-26 12:57:45 +01:00
Sebastian Dröge
9dd6053dbd Update gstreamer-check version to 0.12.1 2018-11-26 12:57:45 +01:00
Sebastian Dröge
5aa3b3908e harness: Work around floating references issues in GstHarness
See https://gitlab.freedesktop.org/gstreamer/gstreamer/merge_requests/31
2018-11-26 12:57:45 +01:00
Sebastian Dröge
2a222987d1 Fix accidential usage of Rust 1.30 feature
error[E0658]: access to extern crates through prelude is experimental (see issue #44660)
   --> gstreamer-check/src/harness.rs:180:76
    |
180 |     pub fn crank_multiple_clock_waits(&mut self, waits: u32) -> Result<(), glib::BoolError> {
    |                                                                            ^^^^
2018-11-26 12:57:45 +01:00
Sebastian Dröge
5ae517d093 Use std::path to directly refer to path::Path 2018-11-26 12:57:45 +01:00
Sebastian Dröge
fcb3ee9045 Keep harness::{Ref, RefMut} in the module instead of re-exporting at the top-level 2018-11-26 12:57:45 +01:00
Sebastian Dröge
ea12ab3cf0 Add accessors for the GstHarness struct fields 2018-11-26 12:57:45 +01:00
Sebastian Dröge
8c37ad1372 Fix harness compilation with v1_16 feature 2018-11-26 12:57:45 +01:00
Sebastian Dröge
0799647874 Fix gstreamer-check build with default features and docs 2018-11-26 12:57:45 +01:00
Sebastian Dröge
2a2ffa45c9 Add initial version of gstreamer-check
Covers GstHarness and GstTestClock

Fixes https://github.com/sdroege/gstreamer-rs/issues/148
2018-11-26 12:57:45 +01:00
Sebastian Dröge
be8f29c9f7 WIP initial version of harness/testclock bindings 2018-11-26 12:57:45 +01:00
Sebastian Dröge
499a99070e Update gir-files from gstreamer-sys 2018-11-26 12:57:45 +01:00
Sebastian Dröge
7e57b36f00 Run everything through rustfmt 2018-11-26 12:56:01 +01:00
Sebastian Dröge
135afc7e9c Implement glib::SendUnique on Adapter
It can be safely sent between threads as long as all references are in
the same thread.
2018-11-26 12:56:01 +01:00
Sebastian Dröge
855f721230 Add the to-dot-file functions to the gst::Bin trait
This allows directly calling them on bins instead of using a global
function.
2018-11-26 12:56:01 +01:00
Christian Meissl
13a1d5d7d0 net: Actually create an PTP clock instead of NTP clock 2018-11-26 12:56:01 +01:00
Sebastian Dröge
574ad233b4 Implement Send/Sync for VideoFrame
Closes #153
2018-11-26 12:08:08 +01:00
Sebastian Dröge
9a059583c9 Run everything through rustfmt 2018-11-07 23:59:08 +02:00
Sebastian Dröge
d7d965c437 Run gitlab-ci also with Rust 1.28 2018-11-07 23:55:20 +02:00
Thibault Saunier
85e1e196c4 Fix WebRTC crate link in the README 2018-11-07 23:55:11 +02:00
Thibault Saunier
3c5b4ade92 ci: Lint before running test
No reason to run tests if linting didn't pass
2018-11-07 23:55:05 +02:00
Sebastian Dröge
f5c7ae06de Fix repository name in migration note in README.md 2018-11-07 23:54:58 +02:00
Sebastian Dröge
859ea53117 Call gst::init() in message::test_other_fields() test at the beginning 2018-11-07 23:54:49 +02:00
Sebastian Dröge
f53f5f92a4 Change all references from GitHub to freedesktop.org GitLab 2018-11-07 23:54:16 +02:00
Arun Raghavan
d0c124db15 enums: Implement the From trait for error/success
This implements the From trait for all *Error/*Success enums to the
corresponding *Return enum.
2018-11-07 23:51:39 +02:00
Jan Alexander Steffens (heftig)
b1c538dee2 examples: Make gstreamer-rtsp-server-sys properly optional
Fixes https://github.com/sdroege/gstreamer-rs/issues/136
2018-11-07 23:51:29 +02:00
Jordan Petridis
16574e87d6 Add a rustfmt config
We stick to the default rustfmt style, but the config is needed
to ignore the auto-generated code.
2018-11-07 23:50:52 +02:00
Jordan Petridis
c3e7ab2f61 CI: Add a rustfmt check 2018-11-07 23:50:52 +02:00
Jordan Petridis
947ddc809d CI: Cache the cargo registry 2018-11-07 23:50:52 +02:00
Jordan Petridis
4c9cf04f8a CI: add color to the cargo output 2018-11-07 23:50:52 +02:00
Jordan Petridis
ef9591807f CI: Add initial gitlab-ci config
Duplicates the functionality of the travis config
2018-11-07 23:50:52 +02:00
Sebastian Dröge
23900d6099 Remove travis cargo cache 2018-11-07 23:48:56 +02:00
Sebastian Dröge
513914c735 Add functions to get the FFI pointer behind a VideoFrame/VideoFrameRef 2018-11-07 23:48:21 +02:00
Sebastian Dröge
e37be09eeb Run tests on travis with G_DEBUG=fatal_warnings 2018-11-07 23:48:12 +02:00
Sebastian Dröge
d317e15c2e Structure is Sync, and StructureRef is Send+Sync 2018-11-07 23:48:00 +02:00
Sebastian Dröge
c21f98d2c3 Implement Sync for VideoInfo and AudioInfo
They are immutable on the Rust side after all.
2018-11-07 23:48:00 +02:00
Sebastian Dröge
c1d56ce94e Update travis to use GStreamer 1.14.3 binaries (including OpenGL library) 2018-11-07 23:47:47 +02:00
Sebastian Dröge
93eb0b715c Update versions to 0.12.1 2018-09-21 11:41:52 +03:00
Sebastian Dröge
be35bf465f Update CHANGELOG.md for 0.12.1 2018-09-21 11:41:14 +03:00
Sebastian Dröge
edfb3050c4 Add bindings for is_video_overlay_prepare_window_handle_message() 2018-09-13 22:57:54 +03:00
Sebastian Dröge
c41a8edae7 Add VideoOverlay::prepare_window_handle() and got_window_handle() 2018-09-13 22:57:53 +03:00
Sebastian Dröge
20906d41e2 Update gstreamer-webrtc version to 0.12.0 2018-09-10 01:51:57 +03:00
Sebastian Dröge
82f51cfe43 Add README.md to gstreamer-webrtc 2018-09-10 01:51:43 +03:00
Sebastian Dröge
d2b520ec31 Update versions of all dependencies and point to releases instead of GIT 2018-09-10 01:41:17 +03:00
Sebastian Dröge
108f08dea4 Update CHANGELOG.md for 0.12.0 2018-09-10 01:37:36 +03:00
1438 changed files with 371523 additions and 250423 deletions

3
.gitignore vendored
View file

@ -1,2 +1,3 @@
target/
/target/
**/*.rs.bk
Cargo.lock

View file

@ -1,583 +1,76 @@
# We use https://gitlab.freedesktop.org/freedesktop/ci-templates
# to build the images used by the ci.
#
# Here is how to properly update those images:
# - new Rust stable version: update GST_RS_IMG_TAG and update Rust version
# - add dependencies: update FDO_DISTRIBUTION_PACKAGES and update GST_RS_IMG_TAG
# - update GStreamer version: update the tag in ci/install-gst.sh and update GST_RS_IMG_TAG
#
# GST_RS_IMG_TAG is defined in ci/images_template.yml and should be updated
# either by:
# - setting it to the current date and the version suffix to 0
# - incrementing the version suffix
#
# After each update commit your changes and push to your personal repo.
# After review and ci approval merge the branch as usual.
#
# Updating the nightly image should be done by simply running a scheduled ci
# pipeline on the upstream repo with the $UPDATE_NIGHTLY variable defined.
.templates_sha: &templates_sha b2e24205598dc1d80b5f2c88cf7618051e30e9fd
include:
- project: 'freedesktop/ci-templates'
ref: *templates_sha
file: '/templates/debian.yml'
- local: "ci/images_template.yml"
workflow:
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
- if: $CI_MERGE_REQUEST_IID
# don't create a pipeline if its a commit pipeline, on a branch and that branch has
# open merge requests (bc we will get a MR build instead)
- if: $CI_OPEN_MERGE_REQUESTS
when: never
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH
default:
interruptible: true
variables:
FDO_UPSTREAM_REPO: gstreamer/gstreamer-rs
# DIY CI-templates like setup for windows
WINDOWS_RUST_MINIMUM_IMAGE: "$CI_REGISTRY_IMAGE/windows:$GST_RS_IMG_TAG-main-$GST_RS_MSRV"
WINDOWS_RUST_MINIMUM_UPSTREAM_IMAGE: "$CI_REGISTRY/$FDO_UPSTREAM_REPO/windows:$GST_RS_IMG_TAG-main-$GST_RS_MSRV"
WINDOWS_RUST_STABLE_IMAGE: "$CI_REGISTRY_IMAGE/windows:$GST_RS_IMG_TAG-main-$GST_RS_STABLE"
WINDOWS_RUST_STABLE_UPSTREAM_IMAGE: "$CI_REGISTRY/$FDO_UPSTREAM_REPO/windows:$GST_RS_IMG_TAG-main-$GST_RS_STABLE"
RUST_DOCS_FLAGS: "--cfg docsrs --extern-html-root-url=muldiv=https://docs.rs/muldiv/1.0.0/muldiv/ -Z unstable-options --generate-link-to-definition"
NAMESPACE: gstreamer
# format is <branch>=<name>
# the name is used in the URL
# latest release must be at the top
# (only relevant on main branch)
RELEASES:
0.22=0.22
stages:
- "trigger"
- "container-base"
- "container-final"
- "lint"
- "test"
- "extras"
- "deploy"
# This is an empty job that is used to trigger the pipeline.
trigger:
image: alpine:latest
stage: 'trigger'
variables:
GIT_STRATEGY: none
tags: [ 'placeholder-job' ]
script:
- echo "Trigger job done, now running the pipeline."
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
# If the MR is assigned to the Merge bot, trigger the pipeline automatically
- if: '$CI_MERGE_REQUEST_ASSIGNEES == "gstreamer-merge-bot"'
# Require explicit action to trigger tests post merge
- if: '$CI_PROJECT_NAMESPACE == "gstreamer" && $CI_COMMIT_BRANCH == "main"'
when: 'manual'
# When the assignee isn't the merge bot, require an explicit action to trigger the pipeline
# to avoid wasting CI resources
- if: '$CI_MERGE_REQUEST_ASSIGNEES != "gstreamer-merge-bot"'
when: 'manual'
allow_failure: false
.cargo_cache: &cache
cache:
key: "gst"
paths:
- ".cargo_cache/"
.debian:12:
needs: []
variables:
FDO_DISTRIBUTION_VERSION: 'bookworm-slim'
before_script:
- source ./ci/env.sh
- mkdir .cargo && echo -e "[net]\ngit-fetch-with-cli = true" > .cargo/config
.debian:12-base:
extends: .debian:12
variables:
FDO_DISTRIBUTION_TAG: 'base-$GST_RS_IMG_TAG'
.debian:12-stable:
extends: .debian:12
variables:
RUST_IMAGE_FULL: "1"
FDO_DISTRIBUTION_TAG: '$GST_RS_STABLE-$GST_RS_IMG_TAG'
FDO_DISTRIBUTION_EXEC: 'bash ci/install-rust.sh $GST_RS_STABLE $RUST_IMAGE_FULL'
.debian:12-msrv:
extends: .debian:12
variables:
FDO_DISTRIBUTION_TAG: '$GST_RS_MSRV-$GST_RS_IMG_TAG'
FDO_DISTRIBUTION_EXEC: 'bash ci/install-rust.sh $GST_RS_MSRV $RUST_IMAGE_FULL'
.debian:12-nightly:
extends: .debian:12
variables:
FDO_DISTRIBUTION_TAG: 'nightly-$GST_RS_IMG_TAG'
FDO_DISTRIBUTION_EXEC: 'bash ci/install-rust.sh nightly $RUST_IMAGE_FULL'
.build-base-image:
extends:
- .fdo.container-build@debian
stage: container-base
variables:
FDO_DISTRIBUTION_PACKAGES: >-
build-essential curl python3-setuptools libglib2.0-dev libxml2-dev
libdrm-dev libegl1-mesa-dev libgl1-mesa-dev libgbm-dev libgles2-mesa-dev
libgl1-mesa-dri libegl-dev libgl1-mesa-glx libwayland-egl1-mesa xz-utils
libssl-dev git wget ca-certificates ninja-build python3-pip flex bison
libglib2.0-dev libx11-dev libx11-xcb-dev libsoup2.4-dev libvorbis-dev
libogg-dev libtheora-dev libmatroska-dev libvpx-dev libopus-dev
libgraphene-1.0-dev libjpeg-dev libwayland-dev wayland-protocols
python3-gi libavcodec-dev libavformat-dev libavutil-dev libavfilter-dev
libswscale-dev yasm libx264-dev libfontconfig-dev libfreetype-dev
libxkbcommon-dev libxi-dev libxcb-render0-dev libxcb-shm0-dev
libxcb1-dev libxext-dev libxrender-dev libxrandr-dev libxcursor-dev
libxdamage-dev libxfixes-dev libxinerama-dev libgudev-1.0-dev
libpango1.0-dev libcairo2-dev libjson-glib-dev libgdk-pixbuf-2.0-dev
libtiff-dev libpng-dev libjpeg-dev libepoxy-dev libsass-dev sassc
libcsound64-dev llvm clang nasm libsodium-dev libwebp-dev
libflac-dev
FDO_DISTRIBUTION_EXEC: >-
bash ci/install-gst.sh &&
bash ci/install-dav1d.sh &&
pip3 install --break-system-packages git+http://gitlab.freedesktop.org/freedesktop/ci-templates &&
pip3 install --break-system-packages tomli
.build-final-image:
extends:
- .fdo.container-build@debian
stage: container-final
variables:
FDO_BASE_IMAGE: '$CI_REGISTRY_IMAGE/debian/bookworm-slim:base-$GST_RS_IMG_TAG'
build-base:
extends:
- .build-base-image
- .debian:12-base
build-stable:
needs: ["build-base"]
extends:
- .build-final-image
- .debian:12-stable
build-msrv:
needs: ["build-base"]
extends:
- .build-final-image
- .debian:12-msrv
build-nightly:
needs: ["build-base"]
extends:
- .build-final-image
- .debian:12-nightly
update-nightly:
extends: build-nightly
rules:
- if: $UPDATE_NIGHTLY == "1"
variables:
FDO_FORCE_REBUILD: 1
.dist-debian-container:
extends:
- .fdo.distribution-image@debian
after_script:
- rm -rf target
.img-stable:
extends:
- .debian:12-stable
- .dist-debian-container
.img-msrv:
extends:
- .debian:12-msrv
- .dist-debian-container
.img-nightly:
extends:
- .debian:12-nightly
- .dist-debian-container
.cargo_test_var: &cargo_test
- ./ci/run-cargo-test.sh
.cargo test:
.cargo_test_template: &cargo_test
stage: "test"
script:
- *cargo_test
variables:
DEPENDENCIES: |
curl
liborc-dev
libglib2.0-dev
libxml2-dev
libgtk-3-dev
libegl1-mesa
libgles2-mesa
libgl1-mesa-dri
libgl1-mesa-glx
libwayland-egl1-mesa
test msrv:
extends:
- '.cargo test'
- .img-msrv
needs:
- job: 'trigger'
artifacts: false
- job: 'build-msrv'
artifacts: false
<<: *cache
before_script:
- apt-get update -yqq
- apt-get install -yqq --no-install-recommends $DEPENDENCIES
# Only stuff inside the repo directory can be cached
# Override the CARGO_HOME variable to force its location
- export CARGO_HOME="${PWD}/.cargo_cache"
- mkdir -p precompiled-gst && cd precompiled-gst
- curl -L https://people.freedesktop.org/~slomo/gstreamer-1.14.3.tar.gz | tar xz
- sed -i "s;prefix=/root/gstreamer;prefix=$PWD/gstreamer;g" $PWD/gstreamer/lib/x86_64-linux-gnu/pkgconfig/*.pc
- export PKG_CONFIG_PATH=$PWD/gstreamer/lib/x86_64-linux-gnu/pkgconfig
- export GST_PLUGIN_SYSTEM_PATH=$PWD/gstreamer/lib/x86_64-linux-gnu/gstreamer-1.0
- export GST_PLUGIN_SCANNER=$PWD/gstreamer/libexec/gstreamer-1.0/gst-plugin-scanner
- export PATH=$PATH:$PWD/gstreamer/bin
- export LD_LIBRARY_PATH=$PWD/gstreamer/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH
script:
- rustc --version
- cargo build --color=always --all
- G_DEBUG=fatal_warnings cargo test --color=always --all
- cargo build --color=always --all --all-features
- G_DEBUG=fatal_warnings cargo test --color=always --all --all-features
test 1.28:
# 1.28 img
# https://hub.docker.com/_/rust/
image: "rust:1.28-slim"
<<: *cargo_test
test stable:
extends:
- '.cargo test'
- .img-stable
needs:
- job: 'trigger'
artifacts: false
- job: 'build-stable'
artifacts: false
test stable all-features:
variables:
ALL_FEATURES: 'yes'
EXAMPLES_TUTORIALS: 'yes'
extends:
- '.cargo test'
- .img-stable
needs:
- job: 'trigger'
artifacts: false
- job: 'build-stable'
artifacts: false
# Stable img
# https://hub.docker.com/_/rust/
image: "rust:slim"
<<: *cargo_test
test nightly:
# Nightly
# https://hub.docker.com/r/rustlang/rust/
image: "rustlang/rust:nightly-slim"
allow_failure: true
extends:
- '.cargo test'
- .img-nightly
needs:
- job: 'trigger'
artifacts: false
- job: 'build-nightly'
artifacts: false
test nightly all-features:
allow_failure: true
variables:
ALL_FEATURES: 'yes'
EXAMPLES_TUTORIALS: 'yes'
extends:
- '.cargo test'
- .img-nightly
needs:
- job: 'trigger'
artifacts: false
- job: 'build-nightly'
artifacts: false
.cargo test sys:
stage: "test"
script:
- ./ci/run-sys-cargo-test.sh
test stable sys:
extends:
- '.cargo test sys'
- .img-stable
needs:
- job: 'trigger'
artifacts: false
- job: 'build-stable'
artifacts: false
test msrv sys:
extends:
- '.cargo test sys'
- .img-msrv
needs:
- job: 'trigger'
artifacts: false
- job: 'build-msrv'
artifacts: false
test nightly sys:
extends:
- '.cargo test sys'
- .img-nightly
needs:
- job: 'trigger'
artifacts: false
- job: 'build-nightly'
artifacts: false
<<: *cargo_test
rustfmt:
extends: .img-stable
image: "rustlang/rust:nightly-slim"
stage: "lint"
tags: [ 'placeholder-job' ]
script:
- rustup component add rustfmt-preview
- cargo fmt --version
- cargo fmt -- --color=always --check
needs:
- job: 'build-stable'
artifacts: false
check commits:
extends: .img-stable
stage: "lint"
tags: [ 'placeholder-job' ]
script:
- ci-fairy check-commits --textwidth 0 --no-signed-off-by
needs:
- job: 'build-stable'
artifacts: false
typos:
extends: .img-stable
stage: "lint"
tags: [ 'placeholder-job' ]
script:
- typos
needs:
- job: 'build-stable'
artifacts: false
clippy:
extends: .img-stable
stage: 'extras'
variables:
CLIPPY_LINTS: -D warnings -W unknown-lints
needs:
- job: 'trigger'
artifacts: false
- job: 'build-stable'
artifacts: false
script:
- ./ci/run-clippy.sh
deny:
extends: .img-stable
stage: 'extras'
needs:
- job: 'build-stable'
artifacts: false
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
script:
- cargo update --color=always
- cargo deny --color=always --workspace --all-features check all
gir-checks:
variables:
GIT_SUBMODULE_STRATEGY: recursive
extends: .img-stable
stage: 'extras'
tags: [ 'placeholder-job' ]
needs:
- job: 'build-stable'
artifacts: false
script:
- git submodule update --checkout
- python3 ci/gir-checks.py
outdated:
extends: .img-stable
stage: 'extras'
needs:
- job: 'build-stable'
artifacts: false
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
script:
- cargo update --color=always
- cargo outdated --color=always --root-deps-only --exit-code 1 -v
coverage:
allow_failure: true
extends:
- '.cargo test'
- .img-stable
stage: 'extras'
needs:
- job: 'trigger'
artifacts: false
- job: 'build-stable'
artifacts: false
variables:
ALL_FEATURES: 'yes'
RUSTFLAGS: "-Cinstrument-coverage"
LLVM_PROFILE_FILE: "gstreamer-rs-%p-%m.profraw"
script:
- *cargo_test
# generate html report
- grcov . --binary-path ./target/debug/ -s . -t html --branch --ignore-not-existing --ignore "*target*" --ignore "*/sys/*" --ignore "examples/*" --ignore "tutorials/*" --ignore "*/build.rs" -o ./coverage/
# generate cobertura report for gitlab integration
- grcov . --binary-path ./target/debug/ -s . -t cobertura --branch --ignore-not-existing --ignore "*target*" --ignore "*/sys/*" --ignore "examples/*" --ignore "tutorials/*" --ignore "*/build.rs" -o coverage.xml
# output coverage summary for gitlab parsing.
# TODO: use grcov once https://github.com/mozilla/grcov/issues/556 is fixed
- grep % coverage/index.html | head -1 ; true
artifacts:
paths:
- 'coverage'
reports:
coverage_report:
coverage_format: cobertura
path: coverage.xml
doc-stripping:
variables:
GIT_SUBMODULE_STRATEGY: recursive
extends: .img-nightly
stage: 'extras'
needs:
- job: 'build-nightly'
artifacts: false
script:
- git submodule update --checkout
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --embed-docs
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --strip-docs
- git diff --quiet || (echo 'Files changed after running `rustdoc-stripper -s`, make sure all documentation is protected with `// rustdoc-stripper-ignore-next`!'; git diff; false)
regen-check:
variables:
GIT_SUBMODULE_STRATEGY: recursive
extends: .img-nightly
stage: 'extras'
needs:
- job: 'build-nightly'
artifacts: false
script:
- git submodule update --checkout
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --yes
- git diff --quiet || (echo 'Files changed after running `generator.py`, make sure all submodules and generated files are in the correct version!'; git diff; false)
docs:
variables:
GIT_SUBMODULE_STRATEGY: recursive
extends: .img-nightly
stage: 'extras'
needs:
- job: 'build-nightly'
artifacts: false
script:
- git submodule update --checkout
- curl --proto '=https' --tlsv1.2 -sSf -o gir-rustdoc.py
https://gitlab.gnome.org/World/Rust/gir-rustdoc/-/raw/main/gir-rustdoc.py
- chmod +x gir-rustdoc.py
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --embed-docs --no-fmt
- |
RUSTDOCFLAGS="$RUST_DOCS_FLAGS"
RUSTFLAGS="--cfg docsrs"
eval $(./gir-rustdoc.py pre-docs)
cargo +nightly doc --workspace --exclude examples --exclude tutorials --all-features --color=always --no-deps
- mv target/doc docs
artifacts:
paths:
- 'docs'
# https://docs.gitlab.com/ee/user/project/pages/#how-it-works
# GitLab automatically deploys the `public/` folder from an
# artifact generated by the job named `pages`. This step
# re-uses the docs from the build-test `docs` step above.
pages:
extends: .img-nightly
stage: 'deploy'
needs: [ 'docs' ]
interruptible: false
script:
- curl --proto '=https' --tlsv1.2 -sSf -o gir-rustdoc.py
https://gitlab.gnome.org/World/Rust/gir-rustdoc/-/raw/main/gir-rustdoc.py
- chmod +x gir-rustdoc.py
- ./gir-rustdoc.py html-index
# development docs
- mkdir public/git
- mv docs public/git/docs
# stable docs
- ./gir-rustdoc.py docs-from-artifacts
- ls public/
artifacts:
paths:
- 'public'
rules:
- if: ($CI_DEFAULT_BRANCH == $CI_COMMIT_BRANCH) && ($CI_PROJECT_NAMESPACE == $NAMESPACE)
when: 'manual'
.windows rust docker build:
stage: 'container-final'
timeout: '2h'
needs: []
variables:
# Unlike the buildah/linux jobs, this file
# needs to be relative to windows-docker/ subdir
# as it makes life easier in the powershell script
#
# We also don't need a CONTEXT_DIR var as its also
# hardcoded to be windows-docker/
DOCKERFILE: 'ci/windows-docker/Dockerfile'
tags:
- 'windows'
- 'shell'
- '2022'
script:
# We need to pass an array and to resolve the env vars, so we can't use a variable:
- $DOCKER_BUILD_ARGS = @("--build-arg", "DEFAULT_BRANCH=$GST_UPSTREAM_BRANCH", "--build-arg", "RUST_VERSION=$RUST_VERSION")
- "& ci/windows-docker/container.ps1 $CI_REGISTRY $CI_REGISTRY_USER $CI_REGISTRY_PASSWORD $RUST_IMAGE $RUST_UPSTREAM_IMAGE $DOCKERFILE"
- |
if (!($?)) {
echo "Failed to build the image"
Exit 1
}
windows rust docker stable:
extends: '.windows rust docker build'
variables:
RUST_IMAGE: !reference [variables, "WINDOWS_RUST_STABLE_IMAGE"]
RUST_UPSTREAM_IMAGE: !reference [variables, "WINDOWS_RUST_STABLE_UPSTREAM_IMAGE"]
RUST_VERSION: !reference [variables, "GST_RS_STABLE"]
windows rust docker msrv:
extends: '.windows rust docker build'
when: 'manual'
variables:
RUST_IMAGE: !reference [variables, "WINDOWS_RUST_MINIMUM_IMAGE"]
RUST_UPSTREAM_IMAGE: !reference [variables, "WINDOWS_RUST_MINIMUM_UPSTREAM_IMAGE"]
RUST_VERSION: !reference [variables, "GST_RS_MSRV"]
.msvc2019 build:
stage: 'test'
tags:
- 'docker'
- 'windows'
- '2022'
script:
# Skip -sys tests as they don't work
# https://github.com/gtk-rs/gtk3-rs/issues/54
#
# We need to build each crate separately to avoid crates like -egl,-wayland etc on windows
- cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 &&
powershell ./ci/run_windows_tests.ps1"
- |
if (!$?) {
Write-Host "Tests Failed!"
Exit 1
}
test windows msrv:
image: $WINDOWS_RUST_MINIMUM_IMAGE
needs:
- job: 'trigger'
artifacts: false
- job: 'windows rust docker msrv'
artifacts: false
extends: '.msvc2019 build'
test windows stable:
needs:
- job: 'trigger'
artifacts: false
- job: 'windows rust docker stable'
artifacts: false
image: "$WINDOWS_RUST_STABLE_IMAGE"
extends: '.msvc2019 build'

View file

@ -1,33 +0,0 @@
### Describe your issue
<!-- a clear and concise summary of the bug. -->
<!-- For any GStreamer usage question, please contact the community using the #gstreamer channel on IRC https://www.oftc.net/ or the mailing list on https://gstreamer.freedesktop.org/lists/ -->
#### Expected Behavior
<!-- What did you expect to happen -->
#### Observed Behavior
<!-- What actually happened -->
#### Setup
- **Operating System:**
- **Device:** Computer / Tablet / Mobile / Virtual Machine <!-- Delete as appropriate !-->
- **gstreamer-rs Version:**
- **GStreamer Version:**
- **Command line:**
### Steps to reproduce the bug
<!-- please fill in exact steps which reproduce the bug on your system, for example: -->
1. open terminal
2. type `command`
### How reproducible is the bug?
<!-- The reproducibility of the bug is Always/Intermittent/Only once after doing a very specific set of steps-->
### Screenshots if relevant
### Solutions you have tried
### Related non-duplicate issues
### Additional Information
<!-- Any other information such as logs. Make use of <details> for long output -->

12
.gitmodules vendored
View file

@ -1,12 +0,0 @@
[submodule "gir"]
path = gir
url = https://github.com/gtk-rs/gir
update = none
[submodule "gir-files"]
path = gir-files
url = https://github.com/gtk-rs/gir-files
update = none
[submodule "gst-gir-files"]
path = gst-gir-files
url = https://gitlab.freedesktop.org/gstreamer/gir-files-rs.git
update = none

15
.rustfmt.toml Normal file
View file

@ -0,0 +1,15 @@
ignore = [
"gstreamer/src/auto/",
"gstreamer-app/src/auto",
"gstreamer-audio/src/auto",
"gstreamer-base/src/auto",
"gstreamer-net/src/auto",
"gstreamer-pbutils/src/auto",
"gstreamer-player/src/auto",
"gstreamer-rtsp/src/auto",
"gstreamer-rtsp-server/src/auto",
"gstreamer-sdp/src/auto",
"gstreamer-video/src/auto",
"gstreamer-webrtc/src/auto",
"gstreamer-editing-services/src/auto",
]

39
.travis.yml Normal file
View file

@ -0,0 +1,39 @@
dist: trusty
sudo: required
language: rust
rust:
- stable
- beta
- nightly
addons:
apt:
packages:
- libglib2.0-dev
- libxml2-dev
- libgtk-3-dev
- libegl1-mesa
- libgles2-mesa
- libgl1-mesa-dri
- libgl1-mesa-glx
- libwayland-egl1-mesa
matrix:
allow_failures:
- rust: nightly
script:
- rustc --version
- cargo build --all
- G_DEBUG=fatal_warnings cargo test --all
- cargo build --all --all-features
- G_DEBUG=fatal_warnings cargo test --all --all-features
before_install:
- curl -L https://people.freedesktop.org/~slomo/gstreamer-1.14.3.tar.gz | tar xz
- sed -i "s;prefix=/root/gstreamer;prefix=$PWD/gstreamer;g" $PWD/gstreamer/lib/x86_64-linux-gnu/pkgconfig/*.pc
- export PKG_CONFIG_PATH=$PWD/gstreamer/lib/x86_64-linux-gnu/pkgconfig
- export GST_PLUGIN_SYSTEM_PATH=$PWD/gstreamer/lib/x86_64-linux-gnu/gstreamer-1.0
- export GST_PLUGIN_SCANNER=$PWD/gstreamer/libexec/gstreamer-1.0/gst-plugin-scanner
- export PATH=$PATH:$PWD/gstreamer/bin
- export LD_LIBRARY_PATH=$PWD/gstreamer/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH

View file

@ -1,23 +0,0 @@
The gstreamer-rs project is dual-licensed under Apache 2.0 and MIT terms, with
the exception of the sys crates which are licensed only under the terms of the
MIT license.
Copyrights in the gstreamer-rs project are retained by their contributors. No
copyright assignment is required to contribute to the gstreamer-rs project.
Some files include explicit copyright notices and/or license notices. For full
authorship information, see the version control history.
Except as otherwise noted (below and/or in individual files), gstreamer-rs is
licensed under the Apache License, Version 2.0 <LICENSE-APACHE> or
<http://www.apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT>
or <http://opensource.org/licenses/MIT>, at your option.
All the sys crates (e.g. gstreamer/sys and gstreamer-base/sys) are licensed
only under the terms of the MIT license.
This project provides interoperability with various GStreamer libraries but
doesn't distribute any parts of them. Distributing compiled libraries and
executables that link to those libraries may be subject to terms of the GNU
LGPL or other licenses. For more information check the license of each
GStreamer library.

3067
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,152 +1,20 @@
[workspace]
resolver = "2"
default-members = [
"gstreamer/sys",
"gstreamer-analytics/sys",
"gstreamer-app/sys",
"gstreamer-audio/sys",
"gstreamer-base/sys",
"gstreamer-check/sys",
"gstreamer-controller/sys",
"gstreamer-editing-services/sys",
"gstreamer-mpegts/sys",
"gstreamer-net/sys",
"gstreamer-pbutils/sys",
"gstreamer-play/sys",
"gstreamer-player/sys",
"gstreamer-rtp/sys",
"gstreamer-rtsp/sys",
"gstreamer-rtsp-server/sys",
"gstreamer-sdp/sys",
"gstreamer-tag/sys",
"gstreamer-video/sys",
"gstreamer-webrtc/sys",
"gstreamer",
"gstreamer-analytics",
"gstreamer-app",
"gstreamer-audio",
"gstreamer-base",
"gstreamer-check",
"gstreamer-controller",
"gstreamer-editing-services",
"gstreamer-mpegts",
"gstreamer-net",
"gstreamer-pbutils",
"gstreamer-play",
"gstreamer-player",
"gstreamer-rtp",
"gstreamer-rtsp",
"gstreamer-rtsp-server",
"gstreamer-sdp",
"gstreamer-tag",
"gstreamer-validate",
"gstreamer-video",
"gstreamer-webrtc",
"examples",
"tutorials",
]
members = [
"gstreamer/sys",
"gstreamer-analytics/sys",
"gstreamer-app/sys",
"gstreamer-audio/sys",
"gstreamer-base/sys",
"gstreamer-check/sys",
"gstreamer-controller/sys",
"gstreamer-editing-services/sys",
"gstreamer-gl/sys",
"gstreamer-gl/egl/sys",
"gstreamer-gl/wayland/sys",
"gstreamer-gl/x11/sys",
"gstreamer-mpegts/sys",
"gstreamer-net/sys",
"gstreamer-pbutils/sys",
"gstreamer-play/sys",
"gstreamer-player/sys",
"gstreamer-rtp/sys",
"gstreamer-rtsp/sys",
"gstreamer-rtsp-server/sys",
"gstreamer-sdp/sys",
"gstreamer-tag/sys",
"gstreamer-video/sys",
"gstreamer-webrtc/sys",
"gstreamer-allocators/sys",
"gstreamer",
"gstreamer-analytics",
"gstreamer-app",
"gstreamer-audio",
"gstreamer-base",
"gstreamer-check",
"gstreamer-controller",
"gstreamer-editing-services",
"gstreamer-gl",
"gstreamer-gl/egl",
"gstreamer-gl/wayland",
"gstreamer-gl/x11",
"gstreamer-mpegts",
"gstreamer-net",
"gstreamer-pbutils",
"gstreamer-play",
"gstreamer-player",
"gstreamer-rtp",
"gstreamer-rtsp",
"gstreamer-rtsp-server",
"gstreamer-sdp",
"gstreamer-tag",
"gstreamer-validate",
"gstreamer-video",
"gstreamer-pbutils",
"gstreamer-webrtc",
"gstreamer-allocators",
"gstreamer-utils",
"gstreamer-check",
"gstreamer-editing-services",
"examples",
"tutorials",
]
exclude = ["gir"]
[workspace.package]
version = "0.23.0"
categories = ["api-bindings", "multimedia"]
repository = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs"
homepage = "https://gstreamer.freedesktop.org"
edition = "2021"
rust-version = "1.70"
[workspace.dependencies]
gio = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
gio-sys = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
glib = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
glib-sys = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
gobject-sys = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
cairo-rs = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
pango = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
pangocairo = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
gstreamer-audio-sys = { path = "./gstreamer-audio/sys"}
gstreamer-base-sys = { path = "./gstreamer-base/sys"}
gstreamer-gl-sys = { path = "./gstreamer-gl/sys"}
gstreamer-net-sys = { path = "./gstreamer-net/sys"}
gstreamer-pbutils-sys = { path = "./gstreamer-pbutils/sys"}
gstreamer-rtsp-sys = { path = "./gstreamer-rtsp/sys"}
gstreamer-sdp-sys = { path = "./gstreamer-sdp/sys"}
gstreamer-sys = { path = "./gstreamer/sys"}
gstreamer-video-sys = { path = "./gstreamer-video/sys"}
ges = { package = "gstreamer-editing-services", path = "./gstreamer-editing-services" }
gst = { package = "gstreamer", path = "./gstreamer" }
gst-allocators = { package = "gstreamer-allocators", path = "./gstreamer-allocators" }
gst-app = { package = "gstreamer-app", path = "./gstreamer-app" }
gst-audio = { package = "gstreamer-audio", path = "./gstreamer-audio" }
gst-base = { package = "gstreamer-base", path = "./gstreamer-base" }
gst-check = { package = "gstreamer-check", path = "./gstreamer-check" }
gst-gl = { package = "gstreamer-gl", path = "./gstreamer-gl" }
gst-gl-egl = { package = "gstreamer-gl-egl", path = "./gstreamer-gl/egl" }
gst-gl-x11 = { package = "gstreamer-gl-x11", path = "./gstreamer-gl/x11" }
gst-net = { package = "gstreamer-net", path = "./gstreamer-net" }
gst-pbutils = { package = "gstreamer-pbutils", path = "./gstreamer-pbutils" }
gst-play = { package = "gstreamer-play", path = "./gstreamer-play" }
gst-player = { package = "gstreamer-player", path = "./gstreamer-player" }
gst-rtsp = { package = "gstreamer-rtsp", path = "./gstreamer-rtsp" }
gst-rtsp-server = { package = "gstreamer-rtsp-server", path = "./gstreamer-rtsp-server" }
gst-sdp = { package = "gstreamer-sdp", path = "./gstreamer-sdp" }
gst-video = { package = "gstreamer-video", path = "./gstreamer-video" }

View file

@ -1,12 +0,0 @@
# How to update the bindings
* Take the updated .gir files (e.g. from your gst-build checkout) and put
them in the gir-files directory
* In the gir-files directory, run ./fix.sh
* If there is a new GStreamer version: Manually update `gst*/Cargo.toml`
* Run generator.py
* Investigate the diff, fix any mess-ups, look at commented functions and
implement them manually
* `cargo build`
* `for f in (ls |grep gstreamer); cd $f; cargo build --features v1_18; cd ..; end`
(or with the new version you just added)

1190
Gir_Gst.toml Normal file

File diff suppressed because it is too large Load diff

144
Gir_GstApp.toml Normal file
View file

@ -0,0 +1,144 @@
[options]
girs_dir = "gir-files"
library = "GstApp"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-app"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
external_libraries = [
"GLib",
"GObject",
"Gst",
"GstBase",
]
generate = [
"GstApp.AppStreamType",
]
manual = [
"GObject.Object",
"Gst.Object",
"Gst.Element",
"Gst.URIHandler",
"Gst.FlowReturn",
"Gst.Format",
"GstBase.BaseSrc",
"GstBase.BaseSink",
]
[[object]]
name = "GstApp.AppSink"
status = "generate"
trait = false
[[object.signal]]
name = "pull-sample"
# Action signal
ignore = true
[[object.signal]]
name = "pull-preroll"
# Action signal
ignore = true
[[object.signal]]
name = "try-pull-sample"
# Action signal
ignore = true
[[object.signal]]
name = "try-pull-preroll"
# Action signal
ignore = true
[[object.function]]
name = "set_caps"
[[object.function.parameter]]
name = "caps"
nullable = true
[[object]]
name = "GstApp.AppSrc"
status = "generate"
trait = false
[[object.function]]
name = "push_buffer"
# Pass by value
ignore = true
[[object.function]]
name = "push_buffer_list"
# Pass by value
ignore = true
[[object.signal]]
name = "end-of-stream"
# Action signal
ignore = true
[[object.signal]]
name = "push-buffer"
# Action signal
ignore = true
[[object.signal]]
name = "push-buffer-list"
# Action signal
ignore = true
[[object.signal]]
name = "push-sample"
# Action signal
ignore = true
[[object.function]]
name = "set_latency"
# ClockTime
ignore = true
[[object.function]]
name = "get_latency"
# ClockTime
ignore = true
[[object.function]]
name = "set_caps"
[[object.function.parameter]]
name = "caps"
nullable = true
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.BufferList"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Sample"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"

126
Gir_GstAudio.toml Normal file
View file

@ -0,0 +1,126 @@
[options]
girs_dir = "gir-files"
library = "GstAudio"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-audio"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
external_libraries = [
"GLib",
"GObject",
"Gst",
]
generate = [
"GstAudio.AudioFlags",
"GstAudio.AudioFormatFlags",
"GstAudio.AudioLayout",
"GstAudio.AudioPackFlags",
"GstAudio.AudioChannelPosition",
"GstAudio.StreamVolume",
"GstAudio.StreamVolumeFormat",
]
manual = [
"GObject.Object",
"Gst.Object",
"GstAudio.AudioInfo",
"GstAudio.AudioFormatInfo",
]
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "GstAudio.AudioFormat"
status = "generate"
[[object.member]]
name = "s16"
# Platform dependant
ignore = true
[[object.member]]
name = "u16"
# Platform dependant
ignore = true
[[object.member]]
name = "s24_32"
# Platform dependant
ignore = true
[[object.member]]
name = "u24_32"
# Platform dependant
ignore = true
[[object.member]]
name = "s32"
# Platform dependant
ignore = true
[[object.member]]
name = "u32"
# Platform dependant
ignore = true
[[object.member]]
name = "s24"
# Platform dependant
ignore = true
[[object.member]]
name = "u24"
# Platform dependant
ignore = true
[[object.member]]
name = "s20"
# Platform dependant
ignore = true
[[object.member]]
name = "u20"
# Platform dependant
ignore = true
[[object.member]]
name = "s18"
# Platform dependant
ignore = true
[[object.member]]
name = "u18"
# Platform dependant
ignore = true
[[object.member]]
name = "f32"
# Platform dependant
ignore = true
[[object.member]]
name = "f64"
# Platform dependant
ignore = true
[[object]]
name = "GstAudio.AudioStreamAlign"
status = "generate"
[[object.function]]
name = "process"
# bool does not signal error
ignore = true

145
Gir_GstBase.toml Normal file
View file

@ -0,0 +1,145 @@
[options]
girs_dir = "gir-files"
library = "GstBase"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-base"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
external_libraries = [
"GLib",
"GObject",
"Gst",
]
generate = [
"GstBase.BaseSink",
"GstBase.BaseTransform",
"GstBase.PushSrc",
]
manual = [
"GObject.Object",
"GLib.Bytes",
"Gst.Object",
"Gst.Element",
"Gst.BufferPool",
"Gst.ClockTimeDiff",
"Gst.ClockReturn",
"Gst.FlowReturn",
"Gst.Format",
"Gst.Pad",
"Gst.TypeFindProbability",
]
[[object]]
name = "GstBase.Adapter"
status = "generate"
trait = false
concurrency = "send-unique"
[[object.function]]
name = "map"
# Unsafe
ignore = true
[[object.function]]
name = "unmap"
# Unsafe
ignore = true
[[object.function]]
name = "copy"
# Unsafe
ignore = true
[[object.function]]
name = "push"
# Move Buffer
ignore = true
[[object.function]]
name = "take"
# Useless copying of data
ignore = true
[[object]]
name = "GstBase.FlowCombiner"
# Manual because ref/unref functions were added much later
status = "manual"
trait = false
concurrency = "none"
[[object]]
name = "GstBase.BaseSrc"
status = "generate"
[[object.function]]
name = "submit_buffer_list"
# Pass by value, to be added manually
ignore = true
[[object]]
name = "GstBase.Aggregator"
status = "generate"
version = "1.14"
[[object.function]]
name = "finish_buffer"
# Takes ownership
ignore = true
[[object.property]]
name = "latency"
version = "1.14"
[[object]]
name = "GstBase.AggregatorPad"
status = "generate"
version = "1.14"
[[object]]
name = "GstBase.*"
status = "generate"
# We'll opt-in for constants at a later time
[[object.constant]]
pattern = ".+"
ignore = true
[[object.function]]
name = "type_find_helper_for_data"
# broken data type
ignore = true
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.BufferList"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Sample"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"

37
Gir_GstCheck.toml Normal file
View file

@ -0,0 +1,37 @@
[options]
girs_dir = "gir-files"
library = "GstCheck"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-check"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
external_libraries = [
"GLib",
"GObject",
"Gst",
]
generate = [
]
manual = [
"GObject.Object",
"Gst.Object",
"Gst.Clock",
"Gst.ClockTimeDiff",
"Gst.ClockType",
]
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "GstCheck.TestClock"
status = "generate"
trait = false

172
Gir_GstEditingServices.toml Normal file
View file

@ -0,0 +1,172 @@
[options]
girs_dir = "gir-files"
library = "GES"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-editing-services"
work_mode = "normal"
generate_safety_asserts = true
single_version_file = true
external_libraries = [
"GLib",
"GObject",
"Gio",
"Gst",
"GstPbutils",
]
manual = [
"GLib.Error",
"GLib.Source",
"GLib.DateTime",
"Gio.Cancellable",
"Gio.AsyncReadyCallback",
"GObject.Object",
"Gst.Segment",
"Gst.StaticCaps",
"Gst.StaticPadTemplate",
"GstPbutils.EncodingProfile",
"GstPbutils.DiscovererInfo",
"GstPbutils.DiscovererStreamInfo",
"Gst.Object",
"Gst.Element",
"Gst.Pad",
"Gst.Pipeline",
]
generate = [
"GES.EditMode",
"GES.PipelineFlags",
"GES.Edge",
"GES.Effect",
"GES.TrackType",
"GES.BaseEffect",
"GES.TimelineElement",
"GES.Group",
"GES.TrackElement",
"GES.Layer",
"GES.Clip",
"GES.UriClip",
"GES.Asset",
"GES.UriClipAsset",
"GES.UriSourceAsset",
"GES.Extractable",
"GES.Project",
]
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.BufferList"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Sample"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "GES.Timeline"
status = "generate"
[[object.function]]
name = "append_layer"
[object.function.return]
nullable = false
[[object.function]]
name = "load_from_uri"
[object.function.return]
bool_return_is_error = "Failed to load from uri"
[[object.function]]
name = "add_layer"
[object.function.return]
bool_return_is_error = "Failed to add layer"
[[object.function]]
name = "remove_layer"
[object.function.return]
bool_return_is_error = "Failed to remove layer"
[[object.function]]
name = "add_track"
[object.function.return]
bool_return_is_error = "Failed to add track"
[[object.function]]
name = "move_layer"
[object.function.return]
bool_return_is_error = "Failed to move layer"
[[object]]
name = "GES.Container"
status = "generate"
trait_name = "GESContainerExt"
[[object.function]]
name = "add"
[object.function.return]
bool_return_is_error = "Failed to add element"
[[object.function]]
name = "remove"
[object.function.return]
bool_return_is_error = "Failed to remove element"
[[object.function]]
name = "edit"
[object.function.return]
bool_return_is_error = "Failed to edit container"
[[object]]
name = "GES.Pipeline"
status = "generate"
trait_name = "GESPipelineExt"
[[object.function]]
name = "set_render_settings"
[object.function.return]
bool_return_is_error = "Failed to set render settings"
[[object.function]]
name = "set_mode"
[object.function.return]
bool_return_is_error = "Failed to set mode"
[[object.function]]
name = "save_thumbnail"
[object.function.return]
bool_return_is_error = "Failed to save thumbnail"
[[object]]
name = "GES.Track"
status = "generate"
trait_name = "GESTrackExt"
[[object.function]]
name = "add_element"
[object.function.return]
bool_return_is_error = "Failed to add element"
[[object.function]]
name = "remove_element"
[object.function.return]
bool_return_is_error = "Failed to remove element"

View file

@ -1,20 +1,19 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstNet"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.8"
target_path = "gstreamer-net"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"Gio",
"GLib",
"GObject",
"Gst",
"Gio",
]
generate = [
@ -22,44 +21,52 @@ generate = [
manual = [
"GObject.Object",
"Gst.Bus",
"Gst.Clock",
"Gst.Object",
"Gst.Pipeline",
"Gst.Structure",
"Gst.Clock",
"Gst.Bus",
]
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "Option"
[[object]]
name = "GstNet.NetClientClock"
status = "generate"
final_type = true
[[object]]
name = "GstNet.NetTimeProvider"
status = "generate"
final_type = true
[[object.function]]
name = "new"
[object.function.return]
nullable_return_is_error = "Failed to create NetTimeProvider"
conversion_type = "scalar"
[[object]]
name = "GstNet.NtpClock"
status = "generate"
final_type = true
trait = false
[[object.function]]
name = "new"
# Floating reference handling
ignore = true
[[object]]
name = "GstNet.PtpClock"
status = "generate"
final_type = true
trait = false
[[object.function]]
name = "new"
[[object.function.parameter]]
name = "name"
[object.function.return]
nullable_return_is_error = "Can't create gst::PtpClock"
# Floating reference handling
ignore = true
[[object]]
name = "GstNet.NetClientClock"
status = "generate"
trait = false
[[object.function]]
name = "new"
# Floating reference handling
ignore = true
[[object]]
name = "GstNet.NetTimeProvider"
status = "generate"
trait = false
[[object.function]]
name = "new"
# Floating reference handling
ignore = true

236
Gir_GstPbutils.toml Normal file
View file

@ -0,0 +1,236 @@
[options]
girs_dir = "gir-files"
library = "GstPbutils"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-pbutils"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
external_libraries = [
"GLib",
"GObject",
"Gst",
]
generate = [
"GstPbutils.DiscovererResult",
"GstPbutils.DiscovererSerializeFlags",
]
manual = [
"GLib.Error",
"GLib.Source",
"GLib.DateTime",
"GLib.Variant",
"GObject.Object",
"Gst.Object",
"Gst.Element",
]
[[object]]
name = "Gst.Toc"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Message"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Event"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.TagList"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "GstPbutils.Discoverer"
status = "generate"
trait = false
[[object.function]]
name = "discover_uri_async"
[object.function.return]
bool_return_is_error = "Failed to add URI to list of discovers"
# Manually implemented to use ClockTime
[[object.property]]
name = "timeout"
ignore = true
[[object]]
name = "GstPbutils.DiscovererInfo"
status = "generate"
[[object.function]]
name = "copy"
[object.function.return]
nullable = false
[[object]]
name = "GstPbutils.DiscovererStreamInfo"
status = "generate"
# Not useful
[[object.function]]
name = "list_free"
ignore = true
[[object.function]]
name = "get_stream_type_nick"
[object.function.return]
nullable = false
[[object]]
name = "GstPbutils.DiscovererAudioInfo"
status = "generate"
trait = false
[[object]]
name = "GstPbutils.DiscovererVideoInfo"
status = "generate"
trait = false
# Implement manually to expose as gst::Fraction
[[object.function]]
name = "get_framerate_num"
ignore = true
[[object.function]]
name = "get_framerate_denom"
ignore = true
[[object.function]]
name = "get_par_num"
ignore = true
[[object.function]]
name = "get_par_denom"
ignore = true
[[object]]
name = "GstPbutils.DiscovererSubtitleInfo"
status = "generate"
trait = false
[[object]]
name = "GstPbutils.DiscovererContainerInfo"
status = "generate"
trait = false
[[object]]
name = "GstPbutils.EncodingProfile"
status = "generate"
# Ignore all setters, making it immutable. A builder will be provided.
[[object.function]]
name = "new"
ignore = true
[[object.function]]
pattern = "set_.*"
ignore = true
[[object.function]]
name = "copy"
[object.function.return]
nullable = false
[[object.function]]
name = "get_input_caps"
[object.function.return]
nullable = false
[[object.function]]
name = "get_format"
[object.function.return]
nullable = false
[[object.property]]
name = "restriction-caps"
# encodingprofile is immutable after constructed
ignore = true
[[object]]
name = "GstPbutils.EncodingContainerProfile"
status = "generate"
# Make it immutable, only able to be constructed for a builder
[[object.function]]
name = "new"
ignore = true
[[object.function]]
name = "add_profile"
ignore = true
[[object]]
name = "GstPbutils.EncodingTarget"
status = "generate"
[[object.function]]
name = "add_profile"
# can be provided on constructor and we better
# consider this immutable
ignore = true
[[object.function]]
name = "get_category"
[object.function.return]
nullable = false
[[object.function]]
name = "get_description"
[object.function.return]
nullable = false
[[object.function]]
name = "get_name"
[object.function.return]
nullable = false
[[object]]
name = "GstPbutils.EncodingAudioProfile"
status = "generate"
trait = false
# Ignore all setters, making it immutable. A builder will be provided.
[[object.function]]
name = "new"
ignore = true
[[object.function]]
pattern = "set_.*"
ignore = true
[[object]]
name = "GstPbutils.EncodingVideoProfile"
status = "generate"
trait = false
# Ignore all setters, making it immutable. A builder will be provided.
[[object.function]]
name = "new"
ignore = true
[[object.function]]
pattern = "set_.*"
ignore = true

View file

@ -1,14 +1,13 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstPlayer"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.12"
target_path = "gstreamer-player"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"GLib",
@ -19,11 +18,9 @@ external_libraries = [
generate = [
"GstPlayer.PlayerColorBalanceType",
"GstPlayer.PlayerError",
"GstPlayer.PlayerMediaInfo",
"GstPlayer.PlayerSignalDispatcher",
"GstPlayer.PlayerSnapshotFormat",
"GstPlayer.PlayerState",
"GstPlayer.PlayerStreamInfo",
"GstPlayer.PlayerVideoRenderer",
"GstPlayer.PlayerVisualization",
]
@ -32,8 +29,8 @@ manual = [
"GLib.Error",
"GLib.MainContext",
"GObject.Object",
"Gst.Element",
"Gst.Object",
"Gst.Element",
"GstVideo.VideoMultiviewFlags",
"GstVideo.VideoMultiviewFramePacking",
]
@ -44,29 +41,25 @@ status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
name = "Gst.TagList"
status = "manual"
conversion_type = "Option"
ref_mode = "ref"
[[object]]
name = "Gst.Sample"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.TagList"
status = "manual"
ref_mode = "ref"
ref_mode = "ref-mut"
[[object]]
name = "GstPlayer.Player"
status = "generate"
final_type = true
trait = false
[[object.function]]
name = "visualizations_free"
@ -76,12 +69,16 @@ final_type = true
[[object.function]]
name = "set_config"
# Custom type
manual = true
ignore = true
[[object.function]]
name = "new"
ignore = true
[[object.function]]
name = "get_config"
# Custom type
manual = true
ignore = true
[[object.function]]
pattern = "config_.*"
@ -89,10 +86,9 @@ final_type = true
ignore = true
[[object.function]]
name = "seek"
[[object.function.parameter]]
name = "position"
mandatory = true
name = "get_pipeline"
[object.function.return]
nullable = false
[[object.function]]
name = "set_audio_track"
@ -111,6 +107,10 @@ final_type = true
[[object.function]]
name = "set_visualization"
[[object.function.parameter]]
name = "name"
nullable = true
[object.function.return]
bool_return_is_error = "Failed to set visualization"
@ -122,7 +122,7 @@ final_type = true
name = "duration-changed"
concurrency = "send"
# Pass ClockTime instead of u64
manual = true
ignore = true
[[object.signal]]
name = "end-of-stream"
@ -144,13 +144,13 @@ final_type = true
name = "position-updated"
concurrency = "send"
# Pass ClockTime instead of u64
manual = true
ignore = true
[[object.signal]]
name = "seek-done"
concurrency = "send"
# Pass ClockTime instead of u64
manual = true
ignore = true
[[object.signal]]
name = "state-changed"
@ -172,24 +172,60 @@ final_type = true
name = "warning"
concurrency = "send"
[[object.property]]
name = "subtitle-video-offset"
version = "1.16"
[[object]]
name = "GstPlayer.PlayerStreamInfo"
status = "generate"
[[object.function]]
name = "get_video_snapshot"
# Use &StructureRef
manual = true
name = "get_stream_type"
[object.function.return]
nullable = false
[[object]]
name = "GstPlayer.PlayerAudioInfo"
status = "generate"
final_type = true
trait = false
[[object]]
name = "GstPlayer.PlayerVideoInfo"
status = "generate"
trait = false
[[object.function]]
name = "get_framerate"
# Fraction
ignore = true
[[object.function]]
name = "get_pixel_aspect_ratio"
# Fraction
ignore = true
[[object]]
name = "GstPlayer.PlayerSubtitleInfo"
status = "generate"
trait = false
[[object]]
name = "GstPlayer.PlayerMediaInfo"
status = "generate"
trait = false
[[object.function]]
name = "get_uri"
[object.function.return]
nullable = false
[[object]]
name = "GstPlayer.PlayerVideoOverlayVideoRenderer"
status = "generate"
trait = false
[[object]]
name = "GstPlayer.PlayerGMainContextSignalDispatcher"
status = "generate"
final_type = true
trait = false
[[object.function]]
name = "new"
@ -197,44 +233,6 @@ final_type = true
ignore = true
[[object]]
name = "GstPlayer.PlayerSubtitleInfo"
status = "generate"
final_type = true
[[object]]
name = "GstPlayer.PlayerVideoInfo"
status = "generate"
final_type = true
[[object.function]]
name = "get_framerate"
# Fraction
manual = true
[[object.function]]
name = "get_pixel_aspect_ratio"
# Fraction
manual = true
[[object]]
name = "GstPlayer.PlayerVideoOverlayVideoRenderer"
status = "generate"
final_type = true
[[object.function]]
name = "new"
manual = true
[[object.function]]
name = "new_with_sink"
# with_handle_and_sink() also calls this.
rename = "with_sink"
manual = true
[[object.function]]
name = "get_window_handle"
manual = true
[[object.function]]
name = "set_window_handle"
manual = true
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"

View file

@ -1,19 +1,19 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstRtsp"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.8"
target_path = "gstreamer-rtsp"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
doc_target_path = "docs/gstreamer-rtsp/docs.md"
external_libraries = [
"Gio",
"GLib",
"GObject",
"Gio",
"GstSdp"
]
@ -22,71 +22,22 @@ generate = [
"GstRtsp.RTSPEvent",
"GstRtsp.RTSPFamily",
"GstRtsp.RTSPHeaderField",
"GstRtsp.RTSPLowerTrans",
"GstRtsp.RTSPMethod",
"GstRtsp.RTSPMsgType",
"GstRtsp.RTSPProfile",
"GstRtsp.RTSPRangeUnit",
"GstRtsp.RTSPResult",
"GstRtsp.RTSPState",
"GstRtsp.RTSPStatusCode",
"GstRtsp.RTSPTimeType",
"GstRtsp.RTSPTransMode"
]
manual = [
"GLib.MainContext",
]
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "Option"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstRtsp.RTSPAuthCredential"
status = "generate"
concurrency = "send"
[[object]]
name = "GstRtsp.RTSPAuthParam"
status = "generate"
concurrency = "send"
[[object]]
name = "GstRtsp.RTSPLowerTrans"
status = "generate"
[[object.member]]
name = "unknown"
ignore = true
[[object]]
name = "GstRtsp.RTSPMethod"
status = "generate"
[[object.member]]
name = "invalid"
ignore = true
[[object]]
name = "GstRtsp.RTSPProfile"
status = "generate"
[[object.member]]
name = "unknown"
ignore = true
[[object]]
name = "GstRtsp.RTSPTransMode"
status = "generate"
[[object.member]]
name = "unknown"
ignore = true
[[object]]
name = "GstRtsp.RTSPUrl"
status = "generate"
@ -95,3 +46,23 @@ concurrency = "send"
[[object.function]]
name = "get_port"
ignore = true
[[object]]
name = "GstRtsp.RTSPAuthParam"
status = "generate"
concurrency="send"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"

View file

@ -1,19 +1,19 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstRtspServer"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.8"
target_path = "gstreamer-rtsp-server"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
doc_target_path = "docs/gstreamer-rtsp-server/docs.md"
external_libraries = [
"Gio",
"GLib",
"GObject",
"Gio",
"Gst",
"GstNet",
"GstRtsp",
@ -21,52 +21,61 @@ external_libraries = [
]
generate = [
"GstRtspServer.RTSPAddressFlags",
"GstRtspServer.RTSPAddressPoolResult",
"GstRtspServer.RTSPFilterResult",
"GstRtspServer.RTSPMediaFactoryURI",
"GstRtspServer.RTSPMediaStatus",
"GstRtspServer.RTSPOnvifClient",
"GstRtspServer.RTSPOnvifMedia",
"GstRtspServer.RTSPOnvifServer",
"GstRtspServer.RTSPMountPoints",
"GstRtspServer.RTSPPublishClockMode",
"GstRtspServer.RTSPSession",
"GstRtspServer.RTSPSuspendMode",
"GstRtspServer.RTSPThreadPool",
"GstRtspServer.RTSPThreadType",
"GstRtspServer.RTSPTransportMode",
"GstRtspServer.RTSPTransportMode"
]
manual = [
"Gio.Cancellable",
"Gio.Socket",
"Gio.SocketFamily",
"Gio.TlsAuthenticationMode",
"Gio.TlsCertificate",
"Gio.TlsCertificateFlags",
"Gio.TlsConnection",
"Gio.TlsDatabase",
"GLib.Error",
"GLib.IOCondition",
"GLib.MainContext",
"GLib.MainLoop",
"GLib.IOCondition",
"GLib.Source",
"GLib.MainLoop",
"GLib.ThreadPool",
"GLib.Error",
"GObject.Object",
"Gst.Bin",
"Gst.Clock",
"Gio.TlsCertificateFlags",
"Gio.TlsCertificate",
"Gio.TlsDatabase",
"Gio.TlsConnection",
"Gio.TlsAuthenticationMode",
"Gio.Socket",
"Gio.Cancellable",
"Gio.SocketFamily",
"Gst.Element",
"Gst.Pad",
"Gst.Pipeline",
"Gst.State",
"GstNet.NetTimeProvider",
"Gst.Clock",
"Gst.Pipeline",
"Gst.Pad",
"Gst.Bin",
"Gst.FlowReturn",
"GstRtsp.RTSPAuthMethod",
"GstRtsp.RTSPLowerTrans",
"GstRtsp.RTSPProfile",
"GstRtsp.RTSPRangeUnit",
"GstRtsp.RTSPUrl",
"GstRtsp.RTSPResult",
"GstRtsp.RTSPStatusCode",
"GstRtsp.RTSPUrl",
"GstRtsp.RTSPRangeUnit",
"GstRtsp.RTSPProfile",
"GstRtsp.RTSPLowerTrans"
]
[[object]]
name = "Gst.Message"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"
[[object]]
name = "Gst.Buffer"
status = "manual"
@ -77,231 +86,36 @@ name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "Option"
[[object]]
name = "Gst.FlowReturn"
status = "manual"
must_use = true
[object.conversion_type]
variant = "Result"
ok_type = "gst::FlowSuccess"
err_type = "gst::FlowError"
[[object]]
name = "Gst.Message"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Object"
# For renaming the trait...
status = "manual"
trait_name = "GstObjectExt"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstRtspServer.RTSPAddress"
status = "generate"
concurrency = "send"
[[object]]
name = "GstRtspServer.RTSPAddressFlags"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object]]
name = "GstRtspServer.RTSPAddressPool"
status = "generate"
manual_traits = ["RTSPAddressPoolExtManual"]
[[object.function]]
name = "add_range"
[object.function.return]
bool_return_is_error = "Failed to add address range"
[[object.function]]
name = "acquire_address"
[object.function.return]
nullable_return_is_error = "Failed to acquire address"
[[object.function]]
name = "reserve_address"
manual = true
[[object]]
name="GstRtspServer.RTSPAuth"
status="generate"
manual_traits = ["RTSPAuthExtManual"]
[[object.function]]
name = "check"
[object.function.return]
bool_return_is_error = "Check failed"
[[object.function]]
name = "connect_accept_certificate"
# Use Result<(), LoggableError>
manual = true
[[object.function]]
name = "set_default_token"
# gir forgets mut
manual = true
[[object]]
name = "GstRtspServer.RTSPClient"
status = "generate"
manual_traits = ["RTSPClientExtManual"]
[[object.function]]
name = "attach"
manual = true
[[object.function]]
name = "set_send_func"
# user_data takes raw pointer
ignore = true
[[object]]
name = "GstRtspServer.RTSPContext"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstRtspServer.RTSPMedia"
status = "generate"
manual_traits = ["RTSPMediaExtManual"]
[[object.function]]
name = "suspend"
[object.function.return]
bool_return_is_error = "Failed to suspend media"
[[object.function]]
name = "unprepare"
[object.function.return]
bool_return_is_error = "Failed to unprepare media"
[[object.function]]
name = "prepare"
[object.function.return]
bool_return_is_error = "Failed to prepare media"
[[object.function]]
name = "unsuspend"
[object.function.return]
bool_return_is_error = "Failed to unsuspend media"
[[object.function]]
name = "take_pipeline"
manual = true
name = "GstRtspServer.RTSPToken"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstRtspServer.RTSPMediaFactory"
status = "generate"
manual_traits = ["RTSPMediaFactoryExtManual"]
[[object.function]]
name = "add_role_from_structure"
# Different structure mutability needed
manual = true
[[object.function]]
name = "construct"
[object.function.return]
nullable_return_is_error = "Failed to construct media"
[[object.function]]
name = "create_element"
[object.function.return]
nullable_return_is_error = "Failed to create media element"
[[object]]
name="GstRtspServer.RTSPMountPoints"
status="generate"
[[object.function]]
name = "make_path"
[object.function.return]
nullable_return_is_error = "Failed to make path"
[[object]]
name = "GstRtspServer.RTSPOnvifMediaFactory"
status = "generate"
manual_traits = ["RTSPMediaExtManual"]
[[object.function]]
name = "requires_backchannel"
# on extension trait
manual = true
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "GstRtspServer.RTSPServer"
status = "generate"
manual_traits = ["RTSPServerExtManual"]
[[object.function]]
name = "attach"
manual = true
[[object.function]]
name = "io_func"
[object.function.return]
bool_return_is_error = "Failed to connect the source"
[[object.function]]
name = "transfer_connection"
[object.function.return]
bool_return_is_error = "Failed to transfer to the connection"
[[object]]
name="GstRtspServer.RTSPSession"
status="generate"
[[object.function]]
name = "get_media"
# Map to dup_media if new enough as get_media is not thread-safe
manual = true
[[object.function]]
name = "dup_media"
ignore = true
[[object]]
name = "GstRtspServer.RTSPSessionMedia"
name = "GstRtspServer.RTSPClient"
status = "generate"
[[object.function]]
name = "set_state"
[object.function.return]
bool_return_is_error = "Failed to set state of session media"
[[object]]
name = "GstRtspServer.RTSPSessionPool"
status = "generate"
manual_traits = ["RTSPSessionPoolExtManual"]
[[object.function]]
name = "remove"
[object.function.return]
bool_return_is_error = "Failed to remove session from pool"
[[object.function]]
name = "create_watch"
# GSource return
manual = true
[[object.function]]
name = "create"
[object.function.return]
nullable_return_is_error = "Failed to create session pool"
name = "attach"
ignore = true
[[object]]
name = "GstRtspServer.RTSPStream"
@ -310,7 +124,7 @@ status = "generate"
[[object.function]]
name = "query_position"
ignore = true
[[object.function]]
name = "query_stop"
ignore = true
@ -335,20 +149,10 @@ status = "generate"
[object.function.return]
bool_return_is_error = "Failed to leave bin"
[[object.function]]
name = "set_blocked"
[object.function.return]
bool_return_is_error = "Failed to block/unblock the dataflow"
[[object.function]]
name = "unblock_linked"
[object.function.return]
bool_return_is_error = "Failed to unblock the dataflow"
[[object.function]]
name = "update_crypto"
[object.function.return]
bool_return_is_error = "Failed to update crypto"
[[object]]
name = "GstRtspServer.RTSPAddress"
status = "generate"
concurrency = "send"
[[object]]
name = "GstRtspServer.RTSPStreamTransport"
@ -371,16 +175,79 @@ concurrency = "none"
bool_return_is_error = "Failed to set active"
[[object]]
name = "GstRtspServer.RTSPThread"
status = "manual"
ref_mode = "ref"
name = "GstRtspServer.RTSPAddressPool"
status = "generate"
[[object.function]]
name = "stop"
# Moved to RTSPThreadRef
name = "add_range"
[object.function.return]
bool_return_is_error = "Failed to add address range"
[[object.function]]
name = "reserve_address"
ignore = true
[[object]]
name = "GstRtspServer.RTSPToken"
status = "manual"
ref_mode = "ref"
name = "GstRtspServer.RTSPMedia"
status = "generate"
[[object.function]]
name = "suspend"
[object.function.return]
bool_return_is_error = "Failed to suspend media"
[[object.function]]
name = "unprepare"
[object.function.return]
bool_return_is_error = "Failed to unprepare media"
[[object.function]]
name = "unsuspend"
[object.function.return]
bool_return_is_error = "Failed to unsuspend media"
[[object]]
name = "GstRtspServer.RTSPMediaFactory"
status = "generate"
[[object.function]]
name = "add_role_from_structure"
# Different structure mutability needed
ignore = true
[[object]]
name = "GstRtspServer.RTSPSessionMedia"
status = "generate"
[[object.function]]
name = "set_state"
[object.function.return]
bool_return_is_error = "Failed to set state of session media"
[[object]]
name = "GstRtspServer.RTSPSessionPool"
status = "generate"
[[object.function]]
name = "remove"
[object.function.return]
bool_return_is_error = "Failed to remove session from pool"
[[object.function]]
name = "create_watch"
# GSource return
ignore = true
[[object]]
name="GstRtspServer.RTSPAuth"
status="generate"
[[object.function]]
name = "make_basic"
[object.function.return]
nullable = false
[[object.function]]
name = "set_default_token"
# gir forgets mut
ignore = true

79
Gir_GstSdp.toml Normal file
View file

@ -0,0 +1,79 @@
[options]
girs_dir = "gir-files"
library = "GstSdp"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-sdp"
work_mode = "normal"
concurrency = "send"
generate_safety_asserts = true
single_version_file = true
doc_target_path = "docs/gstreamer-sdp/docs.md"
external_libraries = [
"GLib",
"GObject",
"Gst",
]
generate = [
"GstSdp.MIKEYCacheType",
"GstSdp.MIKEYEncAlg",
"GstSdp.MIKEYKVType",
"GstSdp.MIKEYKeyDataType",
"GstSdp.MIKEYMacAlg",
"GstSdp.MIKEYMapType",
"GstSdp.MIKEYPRFFunc",
"GstSdp.MIKEYPayloadType",
"GstSdp.MIKEYSecProto",
"GstSdp.MIKEYSecSRTP",
"GstSdp.MIKEYTSType",
"GstSdp.MIKEYType",
]
manual = [
"GLib.Bytes",
]
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstSdp.MIKEYMessage"
status = "generate"
[[object.function]]
name = "replace_payload"
ignore = true
[[object.function]]
name = "insert_payload"
ignore = true
[[object.function]]
name = "add_payload"
ignore = true
[[object.function]]
name = "get_payload"
ignore = true
[[object.function]]
name = "base64_encode"
ignore = true
[[object]]
name = "GstSdp.MIKEYPayload"
status = "generate"
[[object.function]]
name = "kemac_add_sub"
ignore = true
[[object.function]]
name = "kemac_get_sub"
ignore = true
[[object.function]]
name = "key_data_set_interval"
ignore = true

61
Gir_GstVideo.toml Normal file
View file

@ -0,0 +1,61 @@
[options]
girs_dir = "gir-files"
library = "GstVideo"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-video"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
external_libraries = [
"GLib",
"GObject",
"Gst",
"GstBase",
]
generate = [
"GstVideo.VideoFormat",
"GstVideo.VideoFormatFlags",
"GstVideo.VideoTileMode",
"GstVideo.VideoInterlaceMode",
"GstVideo.VideoFlags",
"GstVideo.VideoChromaSite",
"GstVideo.VideoColorMatrix",
"GstVideo.VideoTransferFunction",
"GstVideo.VideoColorPrimaries",
"GstVideo.VideoMultiviewMode",
"GstVideo.VideoMultiviewFlags",
"GstVideo.VideoFieldOrder",
"GstVideo.VideoFrameFlags",
"GstVideo.VideoMultiviewFramePacking",
"GstVideo.VideoFilter",
]
manual = [
"GObject.Object",
"Gst.Object",
"Gst.Element",
"GstBase.BaseTransform",
"GstVideo.VideoInfo",
"GstVideo.VideoFormatInfo",
"GstVideo.VideoColorimetry",
"GstVideo.VideoColorRange",
"GstVideo.VideoFrame",
]
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstVideo.VideoOverlay"
status = "generate"
[[object.function]]
name = "set_render_rectangle"
[object.function.return]
bool_return_is_error = "Failed to set render rectangle"

View file

@ -1,14 +1,13 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstWebRTC"
version = "1.0"
min_cfg_version = "1.14"
target_path = "gstreamer-webrtc"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"GLib",
@ -18,90 +17,54 @@ external_libraries = [
]
generate = [
"GstWebRTC.WebRTCBundlePolicy",
"GstWebRTC.WebRTCDataChannelState",
"GstWebRTC.WebRTCDTLSSetup",
"GstWebRTC.WebRTCDTLSTransportState",
"GstWebRTC.WebRTCError",
"GstWebRTC.WebRTCFECType",
"GstWebRTC.WebRTCICECandidateStats",
"GstWebRTC.WebRTCICEComponent",
"GstWebRTC.WebRTCICEConnectionState",
"GstWebRTC.WebRTCICEGatheringState",
"GstWebRTC.WebRTCICEConnectionState",
"GstWebRTC.WebRTCICERole",
"GstWebRTC.WebRTCICEStream",
"GstWebRTC.WebRTCICETransportPolicy",
"GstWebRTC.WebRTCKind",
"GstWebRTC.WebRTCPeerConnectionState",
"GstWebRTC.WebRTCPriorityType",
"GstWebRTC.WebRTCRTPTransceiverDirection",
"GstWebRTC.WebRTCSCTPTransportState",
"GstWebRTC.WebRTCICEComponent",
"GstWebRTC.WebRTCSDPType",
"GstWebRTC.WebRTCDTLSSetup",
"GstWebRTC.WebRTCPeerConnectionState",
"GstWebRTC.WebRTCRTPTransceiverDirection",
"GstWebRTC.WebRTCSignalingState",
"GstWebRTC.WebRTCStatsType",
]
manual = [
"GLib.Bytes",
"GLib.Error",
"GObject.Object",
"Gst.Caps",
"Gst.Structure",
"GstSdp.SDPMessage",
]
[[object]]
name = "GstWebRTC.WebRTCDataChannel"
status = "generate"
final_type = true
[[object.function]]
name = "on_error"
# takes ownership of SDP message
manual = true
[[object]]
name = "GstWebRTC.WebRTCDTLSTransport"
status = "generate"
final_type = true
[[object]]
name = "GstWebRTC.WebRTCICE"
status = "generate"
[[object.function]]
name = "add_candidate"
# ABI breakage in 1.24 needs working around
manual = true
trait = false
[[object]]
name = "GstWebRTC.WebRTCICETransport"
status = "generate"
final_type = true
trait = false
[[object]]
name = "GstWebRTC.WebRTCRTPReceiver"
status = "generate"
final_type = true
trait = false
[[object]]
name = "GstWebRTC.WebRTCRTPSender"
status = "generate"
final_type = true
trait = false
[[object]]
name = "GstWebRTC.WebRTCRTPTransceiver"
status = "generate"
final_type = true
[[object]]
name = "GstWebRTC.WebRTCSCTPTransport"
status = "generate"
version = "1.20"
trait = false
[[object]]
name = "GstWebRTC.WebRTCSessionDescription"
status = "generate"
final_type = true
trait = false
[[object.function]]
name = "new"

143
README.md
View file

@ -1,14 +1,22 @@
# gstreamer-rs [![crates.io](https://img.shields.io/crates/v/gstreamer.svg)](https://crates.io/crates/gstreamer) [![pipeline status](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/badges/main/pipeline.svg)](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/commits/main)
# NOTE: The canonical repository for gstreamer-rs has moved to [freedesktop.org GitLab](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs)!
# gstreamer-rs [![crates.io](https://img.shields.io/crates/v/gstreamer.svg)](https://crates.io/crates/gstreamer) [![pipeline status](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/badges/master/pipeline.svg)](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/commits/master)
[GStreamer](https://gstreamer.freedesktop.org/) bindings for Rust.
Documentation can be found [here](https://gstreamer.pages.freedesktop.org/gstreamer-rs/stable/latest/docs/gstreamer/).
Documentation can be found [here](https://slomo.pages.freedesktop.org/rustdocs/gstreamer/gstreamer/).
These bindings are providing a safe API that can be used to interface with
GStreamer, e.g. for writing GStreamer-based applications and GStreamer plugins.
GStreamer, e.g. for writing GStreamer-based applications.
The bindings are mostly autogenerated with [gir](https://github.com/gtk-rs/gir/)
For background and motivation, see the [announcement blogpost](https://coaxion.net/blog/2017/07/writing-gstreamer-applications-in-rust/).
The bindings (since 0.8.0) are autogenerated with [gir](https://github.com/gtk-rs/gir/)
based on the [GObject-Introspection](https://wiki.gnome.org/Projects/GObjectIntrospection/)
API metadata provided by the GStreamer project.
API metadata provided by the GStreamer project. Older versions before 0.8.0 were manually
written and the repository can be found [here](https://github.com/arturoc/gstreamer1.0-rs).
The API of the two is incompatible.
A crate for writing GStreamer plugins in Rust can be found here: https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs
## Table of Contents
1. [Installation](#installation)
@ -24,7 +32,7 @@ API metadata provided by the GStreamer project.
## Installation
To build the GStreamer bindings or anything depending on them, you need to
have at least GStreamer 1.14 and gst-plugins-base 1.14 installed. In addition,
have at least GStreamer 1.8 and gst-plugins-base 1.8 installed. In addition,
some of the examples/tutorials require various GStreamer plugins to be
available, which can be found in gst-plugins-base, gst-plugins-good,
gst-plugins-bad, gst-plugins-ugly and/or gst-libav.
@ -38,20 +46,23 @@ package manager, or build them from source.
On Debian/Ubuntu they can be installed with
```console
```
$ apt-get install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \
gstreamer1.0-plugins-base gstreamer1.0-plugins-good \
gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly \
gstreamer1.0-libav libgstrtspserver-1.0-dev libges-1.0-dev
gstreamer1.0-libav libgstrtspserver-1.0-dev
```
The minimum required version of the above libraries is >= 1.14. If you
The minimum required version of the above libraries is >= 1.8. If you
build the gstreamer-player sub-crate, or any of the examples that
depend on gstreamer-player, you must ensure that in addition to the above
packages, `libgstreamer-plugins-bad1.0-dev` is installed. See the `Cargo.toml`
files for the full details,
depend on gstreamer-player, you must ensure that in addition to the
above packages, `libgstreamer-plugins-bad1.0-dev` is installed and
that the version is >= 1.12. See the `Cargo.toml` files for the full
details,
```console
```
# Only if you wish to install gstreamer-player, make sure the version
# of this package is >= 1.12.
$ apt-get install libgstreamer-plugins-bad1.0-dev
```
@ -66,40 +77,34 @@ You can install GStreamer and the plugins via [Homebrew](https://brew.sh/) or
by installing the [binaries](https://gstreamer.freedesktop.org/data/pkg/osx/)
provided by the GStreamer project.
We recommend using the official GStreamer binaries over Homebrew, especially
as GStreamer in Homebrew is [currently broken](https://github.com/orgs/Homebrew/discussions/3740#discussioncomment-3804964).
#### GStreamer Binaries
You need to download the *two* `.pkg` files from the GStreamer website and
install them, e.g. `gstreamer-1.0-1.20.4-universal.pkg` and
`gstreamer-1.0-devel-1.20.4-universal.pkg`.
After installation, you also need to set the `PATH` environment variable as
follows
```console
$ export PATH="/Library/Frameworks/GStreamer.framework/Versions/1.0/bin${PATH:+:$PATH}"
```
Also note that the `pkg-config` from GStreamer should be the first one in
the `PATH` as other versions have all kinds of quirks that will cause
problems.
#### Homebrew
Homebrew only installs various plugins if explicitly enabled, so some extra
`--with-*` flags may be required.
```console
```
$ brew install gstreamer gst-plugins-base gst-plugins-good \
gst-plugins-bad gst-plugins-ugly gst-libav gst-rtsp-server \
gst-editing-services --with-orc --with-libogg --with-opus \
--with-pango --with-theora --with-libvorbis --with-libvpx \
--enable-gtk3
--with-orc -with-libogg --with-opus --with-pango --with-theora \
--with-libvorbis
```
Make sure the version of these libraries is >= 1.14.
If you wish to install the gstreamer-player sub-crate, make sure the
version of these libraries is >= 1.12. Otherwise, a version >= 1.8 is
sufficient.
#### GStreamer Binaries
You need to download the *two* `.pkg` files from the GStreamer website and
install them, e.g. `gstreamer-1.0-1.12.3-x86_64.pkg` and
`gstreamer-1.0-devel-1.12.3-x86_64.pkg`.
After installation, you also need to install `pkg-config` (e.g. via Homebrew)
and set the `PKG_CONFIG_PATH` environment variable
```
$ export PKG_CONFIG_PATH="/Library/Frameworks/GStreamer.framework/Versions/Current/lib/pkgconfig${PKG_CONFIG_PATH:+:$PKG_CONFIG_PATH}"
```
<a name="installation-windows"/>
@ -110,55 +115,39 @@ with `pacman` or by installing the
[binaries](https://gstreamer.freedesktop.org/data/pkg/windows/) provided by
the GStreamer project.
We recommend using the official GStreamer binaries over MSYS2.
#### GStreamer Binaries
You need to download the *two* `.msi` files for your platform from the
GStreamer website and install them, e.g. `gstreamer-1.0-x86_64-1.20.4.msi` and
`gstreamer-1.0-devel-x86_64-1.20.4.msi`. Make sure to select the version that
matches your Rust toolchain, i.e. MinGW or MSVC.
After installation set the ``PATH` environment variable as follows:
```console
# For a UNIX-style shell:
$ export PATH="c:/gstreamer/1.0/msvc_x86_64/bin${PATH:+:$PATH}"
# For cmd.exe:
$ set PATH=C:\gstreamer\1.0\msvc_x86_64\bin;%PATH%
```
Make sure to update the path to where you have actually installed GStreamer
and for the corresponding toolchain.
Also note that the `pkg-config.exe` from GStreamer should be the first one in
the `PATH` as other versions have all kinds of quirks that will cause
problems.
#### MSYS2 / pacman
```console
$ pacman -S glib2-devel pkg-config \
mingw-w64-x86_64-gstreamer mingw-w64-x86_64-gst-plugins-base \
```
$ pacman -S pkg-config mingw-w64-x86_64-gstreamer mingw-w64-x86_64-gst-plugins-base \
mingw-w64-x86_64-gst-plugins-good mingw-w64-x86_64-gst-plugins-bad \
mingw-w64-x86_64-gst-plugins-ugly mingw-w64-x86_64-gst-libav \
mingw-w64-x86_64-gst-rtsp-server
```
Make sure the version of these libraries is >= 1.14.
If you wish to install the gstreamer-player sub-crate, make sure the
version of these libraries is >= 1.12. Otherwise, a version >= 1.8 is
sufficient.
Note that the version of `pkg-config` included in `MSYS2` is
[known to have problems](https://github.com/rust-lang/pkg-config-rs/issues/51#issuecomment-346300858)
compiling GStreamer, so you may need to install another version. One option
would be [`pkg-config-lite`](https://sourceforge.net/projects/pkgconfiglite/).
#### GStreamer Binaries
You need to download the *two* `.msi` files for your platform from the
GStreamer website and install them, e.g. `gstreamer-1.0-x86_64-1.12.3.msi` and
`gstreamer-1.0-devel-x86_64-1.12.3.msi`.
After installation, you also need to install `pkg-config` (e.g. via MSYS2 or
from [here](https://sourceforge.net/projects/pkgconfiglite/))
and set the `PKG_CONFIG_PATH` environment variable
```
$ export PKG_CONFIG_PATH="c:\\gstreamer\\1.0\\x86_64\\lib\\pkgconfig${PKG_CONFIG_PATH:+:$PKG_CONFIG_PATH}"
```
<a name="getting-started"/>
## Getting Started
The API reference can be found
[here](https://gstreamer.pages.freedesktop.org/gstreamer-rs/stable/latest/docs/gstreamer/), however it is
[here](https://slomo.pages.freedesktop.org/rustdocs/gstreamer/gstreamer/), however it is
only the Rust API reference and does not explain any of the concepts.
For getting started with GStreamer development, the best would be to follow
@ -174,18 +163,14 @@ In addition there are
[tutorials](https://gstreamer.freedesktop.org/documentation/tutorials/) on the
GStreamer website. Many of them were ported to Rust already and the code can
be found in the
[tutorials](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/main/tutorials)
[tutorials](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/master/tutorials)
directory.
Some further examples for various aspects of GStreamer and how to use it from
Rust can be found in the
[examples](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/main/examples)
[examples](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/master/examples)
directory.
Various GStreamer plugins written in Rust can be found in the
[gst-plugins-rs](https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs)
repository.
<a name="license"/>
## LICENSE

View file

@ -1,8 +0,0 @@
export RUSTUP_HOME='/usr/local/rustup'
export PATH=$PATH:/usr/local/cargo/bin
export PKG_CONFIG_PATH=/usr/local/lib/x86_64-linux-gnu/pkgconfig
export GST_PLUGIN_SYSTEM_PATH=/usr/local/lib/x86_64-linux-gnu/gstreamer-1.0
export GST_PLUGIN_SCANNER=/usr/local/libexec/gstreamer-1.0/gst-plugin-scanner
export PATH=$PATH:/usr/local/bin
export LD_LIBRARY_PATH=/usr/local/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH

View file

@ -1,42 +0,0 @@
from itertools import chain
import os
import sys
from pathlib import Path as P
from subprocess import check_call as exec
NATIVE_CRATES = ["gstreamer-utils"]
def git(*args):
exec(["git"] + list(args))
def check_no_git_diff():
git("diff", "--exit-code")
check_no_git_diff()
git("clone", "--depth", "1", "https://github.com/gtk-rs/checker")
check_no_git_diff()
rootdir = P(".")
checker_dir = P("checker")
with (checker_dir / "Cargo.toml").open("a") as f:
f.write("[workspace]\n")
check_no_git_diff()
exec(['cargo', 'build', '--locked', '--color=always', '--release'], cwd=checker_dir)
check_no_git_diff()
exec('cargo run --color=always --release -- ../gstreamer* ../gstreamer-gl/{egl,wayland,x11}', cwd=checker_dir, shell=True)
gl_dir = rootdir / 'gstreamer-gl'
for crate in chain(rootdir.glob('gstreamer*'), [gl_dir / 'egl', gl_dir / 'wayland', gl_dir / 'x11']):
# Ignore "native" crates
if crate.name in NATIVE_CRATES:
continue
print(f'--> Checking doc aliases in {crate.absolute()}')
exec(['python3', 'doc_aliases.py', crate.absolute()], cwd=checker_dir)
print(f'--> {crate.absolute()}')
exec(['./checker/check_init_asserts', crate.absolute()])
check_no_git_diff()

View file

@ -1,7 +0,0 @@
variables:
GST_RS_IMG_TAG: "2024-05-10.0"
GST_RS_STABLE: "1.78.0"
GST_RS_MSRV: "1.70.0"
# The branch we use to build GStreamer from in the docker images
# Ex. main, 1.24, my-test-branch
GST_UPSTREAM_BRANCH: 'main'

View file

@ -1,11 +0,0 @@
set -e
RELEASE=1.4.1
git clone https://code.videolan.org/videolan/dav1d.git --branch $RELEASE
cd dav1d
meson build -D prefix=/usr/local
ninja -C build
ninja -C build install
cd ..
rm -rf dav1d

View file

@ -1,49 +0,0 @@
#! /bin/bash
set -e
DEFAULT_BRANCH="$GST_UPSTREAM_BRANCH"
pip3 install meson==1.1.1 --break-system-packages
# gstreamer-rs already has a 'gstreamer' directory so don't clone there
pushd .
cd ..
git clone https://gitlab.freedesktop.org/gstreamer/gstreamer.git \
--depth 1 \
--branch "$DEFAULT_BRANCH"
cd gstreamer
# plugins required by tests
PLUGINS="-D gst-plugins-base:ogg=enabled \
-D gst-plugins-base:vorbis=enabled \
-D gst-plugins-base:theora=enabled \
-D gst-plugins-good:matroska=enabled \
-D gst-plugins-good:vpx=enabled \
-D gst-plugins-bad:opus=enabled \
-D gst-plugins-ugly:x264=enabled"
echo "subproject('gtk')" >> meson.build
meson setup build \
-D prefix=/usr/local \
-D gpl=enabled \
-D ugly=enabled \
-D examples=disabled \
-D gtk_doc=disabled \
-D introspection=disabled \
-D libav=disabled \
-D python=disabled \
-D vaapi=disabled \
$PLUGINS
meson compile -C build
meson install -C build
ldconfig
cd ..
rm -rf gstreamer/
# Check what plugins we installed
gst-inspect-1.0
popd

View file

@ -1,51 +0,0 @@
#! /bin/bash
source ./ci/env.sh
set -e
export CARGO_HOME='/usr/local/cargo'
RUSTUP_VERSION=1.27.1
RUST_VERSION=$1
RUST_IMAGE_FULL=$2
RUST_ARCH="x86_64-unknown-linux-gnu"
RUSTUP_URL=https://static.rust-lang.org/rustup/archive/$RUSTUP_VERSION/$RUST_ARCH/rustup-init
wget $RUSTUP_URL
chmod +x rustup-init;
./rustup-init -y --no-modify-path --profile minimal --default-toolchain $RUST_VERSION;
rm rustup-init;
chmod -R a+w $RUSTUP_HOME $CARGO_HOME
rustup --version
cargo --version
rustc --version
if [ "$RUST_IMAGE_FULL" = "1" ]; then
rustup component add clippy-preview
rustup component add rustfmt
cargo install --locked --force cargo-deny
cargo install --locked --force cargo-outdated
cargo install --locked --force typos-cli --version "1.19.0"
# Coverage tools
rustup component add llvm-tools-preview
cargo install --locked --force grcov
fi
if [ "$RUST_VERSION" = "nightly" ]; then
# FIXME: Don't build cargo-c with --locked for now because otherwise a
# version of ahash is used that doesn't build on nightly anymore
cargo install cargo-c --version 0.9.22+cargo-0.72
else
cargo install --locked cargo-c --version 0.9.22+cargo-0.72
fi
if [ "$RUST_VERSION" = "nightly" ]; then
rustup component add rustfmt --toolchain nightly
# Documentation tools
cargo install --locked --force rustdoc-stripper
fi

View file

@ -1,30 +0,0 @@
#! /bin/bash
set -ex
rustc --version
cargo --version
for crate in gstreamer* gstreamer-gl/{egl,wayland,x11}; do
if [ -e "$crate/Cargo.toml" ]; then
if [ -n "$ALL_FEATURES" ]; then
FEATURES="--all-features"
else
FEATURES=""
fi
echo "Building and testing $crate with $FEATURES"
cargo build --locked --color=always --manifest-path "$crate/Cargo.toml" $FEATURES
G_DEBUG=fatal_warnings cargo test --color=always --manifest-path "$crate/Cargo.toml" $FEATURES
fi
done
if [ -n "$EXAMPLES_TUTORIALS" ]; then
# Keep in sync with examples/Cargo.toml
# List all features except windows/win32
EXAMPLES_FEATURES="--features=rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gl,gst-gl-x11,gst-gl-egl,allocators,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18"
cargo build --locked --color=always --manifest-path examples/Cargo.toml --bins --examples "$EXAMPLES_FEATURES"
cargo build --locked --color=always --manifest-path tutorials/Cargo.toml --bins --examples --all-features
fi

View file

@ -1,38 +0,0 @@
#! /bin/bash
set -ex
rustc --version
cargo --version
cargo clippy --version
# Keep features in sync with run-cargo-test.sh
get_features() {
crate=$1
case "$crate" in
gstreamer-audio|gstreamer-editing-services|gstreamer-gl|gstreamer-pbutils|gstreamer-rtp|gstreamer-rtsp|gstreamer-video|gstreamer)
echo "--features=serde,v1_26"
;;
*)
echo "--features=v1_26"
;;
esac
}
for crate in gstreamer* gstreamer-gl/{egl,wayland,x11}; do
if [ -e "$crate/Cargo.toml" ]; then
FEATURES=$(get_features "$crate")
echo "Running clippy on $crate with $FEATURES"
cargo clippy --locked --color=always --manifest-path "$crate/Cargo.toml" $FEATURES --all-targets -- $CLIPPY_LINTS
fi
done
# Keep in sync with examples/Cargo.toml
# List all features except windows/win32
EXAMPLES_FEATURES="--features=rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gl,gst-gl-x11,gst-gl-egl,allocators,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18"
# And also run over all the examples/tutorials
cargo clippy --locked --color=always --manifest-path examples/Cargo.toml --all-targets "$EXAMPLES_FEATURES" -- $CLIPPY_LINTS
cargo clippy --locked --color=always --manifest-path tutorials/Cargo.toml --all-targets --all-features -- $CLIPPY_LINTS

View file

@ -1,43 +0,0 @@
#! /bin/bash
set -ex
rustc --version
cargo --version
for crate in gstreamer*/sys gstreamer-gl/*/sys; do
if [ -e "$crate/Cargo.toml" ]; then
echo "Building $crate with --all-features"
cargo build --locked --color=always --manifest-path "$crate/Cargo.toml" --all-features
fi
done
for crate in gstreamer/sys \
gstreamer-allocators/sys \
gstreamer-analytics/sys \
gstreamer-app/sys \
gstreamer-audio/sys \
gstreamer-base/sys \
gstreamer-check/sys \
gstreamer-controller/sys \
gstreamer-editing-services/sys \
gstreamer-gl/sys \
gstreamer-gl/egl/sys \
gstreamer-gl/wayland/sys \
gstreamer-gl/x11/sys \
gstreamer-mpegts/sys \
gstreamer-net/sys \
gstreamer-pbutils/sys \
gstreamer-play/sys \
gstreamer-player/sys \
gstreamer-rtp/sys \
gstreamer-rtsp-server/sys \
gstreamer-rtsp/sys \
gstreamer-sdp/sys \
gstreamer-tag/sys \
gstreamer-validate/sys \
gstreamer-video/sys \
gstreamer-webrtc/sys; do
echo "Testing $crate with --all-features)"
cargo test --locked --color=always --manifest-path $crate/Cargo.toml --all-features
done

View file

@ -1,86 +0,0 @@
# List of all the crates we want to build
# We need to do this manually to avoid trying
# to build egl,wayland,x11 etc, which can't
# work on windows
[string[]] $crates = @(
'gstreamer',
# Unix specific atm
# 'gstreamer-allocators'
'gstreamer-app',
'gstreamer-audio',
'gstreamer-base',
'gstreamer-check',
'gstreamer-controller',
'gstreamer-editing-services',
'gstreamer-gl',
# 'gstreamer-gl/egl',
# 'gstreamer-gl/wayland',
# 'gstreamer-gl/x11',
'gstreamer-mpegts',
'gstreamer-mpegts/sys',
'gstreamer-net',
'gstreamer-pbutils',
'gstreamer-player',
'gstreamer-rtp',
'gstreamer-rtsp',
'gstreamer-rtsp-server',
'gstreamer-sdp',
'gstreamer-tag',
'gstreamer-tag/sys',
'gstreamer-video',
'gstreamer-webrtc',
'tutorials',
'examples'
)
# "" is the default build, no flags appended
[string[]] $features_matrix = @(
# "--no-default-features",
# "--features=v1_18",
# "--features=v1_20",
"",
"--all-features"
)
foreach($features in $features_matrix) {
foreach($crate in $crates)
{
Write-Host "Building crate: $crate"
Write-Host "Features: $features"
$env:LocalFeatures = $features
# Don't append feature flags if the string is null/empty
# Or when we want to build without default features
if ($env:LocalFeatures -and ($env:LocalFeatures -ne '--no-default-features')) {
if ($crate -eq 'examples') {
# FIXME: We can do --all-features for examples once we have gtk3 installed in the image
$env:LocalFeatures = "--features=rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18,windows"
}
if ($crate -eq 'tutorials') {
$env:LocalFeatures = ''
}
}
Write-Host "with features: $env:LocalFeatures"
cargo build --color=always --manifest-path $crate/Cargo.toml --all-targets $env:LocalFeatures
if (!$?) {
Write-Host "Failed to build crate: $crate"
Exit 1
}
if (($crate -eq "gstreamer-tag/sys") -or ($crate -eq "gstreamer-mpegts/sys")) {
Write-Host "Skipping tests for $crate"
continue
}
$env:G_DEBUG="fatal_warnings"
cargo test --no-fail-fast --color=always --manifest-path $crate/Cargo.toml $env:LocalFeatures
if (!$?) {
Write-Host "Tests failed to for crate: $crate"
Exit 1
}
}
}

View file

@ -1,22 +0,0 @@
# escape=`
FROM "registry.freedesktop.org/gstreamer/gstreamer/amd64/windows:2023-07-17.0-main"
SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"]
ARG DEFAULT_BRANCH="1.24"
ARG RUST_VERSION="invalid"
RUN choco install -y pkgconfiglite nasm llvm openssl
# https://stackoverflow.com/a/50716450
RUN setx PATH '%PATH%;C:\Program Files\NASM;C:\gst-install\bin'
ENV PKG_CONFIG_PATH="C:\gst-install\lib\pkgconfig"
COPY install_gst.ps1 install_dav1d.ps1 C:\
RUN C:\install_gst.ps1
RUN C:\install_dav1d.ps1
RUN Invoke-WebRequest -Uri https://win.rustup.rs/x86_64 -OutFile C:\rustup-init.exe
RUN C:\rustup-init.exe -y --profile minimal --default-toolchain $env:RUST_VERSION
RUN cargo install --locked cargo-c --version 0.9.22+cargo-0.72

View file

@ -1,60 +0,0 @@
# Copied from mesa, big kudos
#
# https://gitlab.freedesktop.org/mesa/mesa/-/blob/master/.gitlab-ci/windows/mesa_container.ps1
# https://gitlab.freedesktop.org/mesa/mesa/-/blob/34e3e164936d1d3cef267da7780e87f062fedf39/.gitlab-ci/windows/mesa_container.ps1
# Implements the equivalent of ci-templates container-ifnot-exists, using
# Docker directly as we don't have buildah/podman/skopeo available under
# Windows, nor can we execute Docker-in-Docker
$registry_uri = $args[0]
$registry_username = $args[1]
$registry_password = $args[2]
$registry_user_image = $args[3]
$registry_central_image = $args[4]
$dockerfile = $args[5]
docker --config "windows-docker.conf" login -u "$registry_username" -p "$registry_password" "$registry_uri"
if (!$?) {
Write-Host "docker login failed to $registry_uri"
Exit 1
}
# if the image already exists, don't rebuild it
docker --config "windows-docker.conf" pull "$registry_user_image"
if ($?) {
Write-Host "User image $registry_user_image already exists; not rebuilding"
docker --config "windows-docker.conf" logout "$registry_uri"
Exit 0
}
# if the image already exists upstream, copy it
docker --config "windows-docker.conf" pull "$registry_central_image"
if ($?) {
Write-Host "Copying central image $registry_central_image to user image $registry_user_image"
docker --config "windows-docker.conf" tag "$registry_central_image" "$registry_user_image"
docker --config "windows-docker.conf" push "$registry_user_image"
$pushstatus = $?
docker --config "windows-docker.conf" logout "$registry_uri"
if (!$pushstatus) {
Write-Host "Pushing image to $registry_user_image failed"
Exit 1
}
Exit 0
}
Write-Host "No image found at $registry_user_image or $registry_central_image; rebuilding"
docker --config "windows-docker.conf" build $DOCKER_BUILD_ARGS --no-cache -t "$registry_user_image" -f "$dockerfile" "./ci/windows-docker"
if (!$?) {
Write-Host "Container build failed"
docker --config "windows-docker.conf" logout "$registry_uri"
Exit 1
}
Get-Date
docker --config "windows-docker.conf" push "$registry_user_image"
$pushstatus = $?
docker --config "windows-docker.conf" logout "$registry_uri"
if (!$pushstatus) {
Write-Host "Pushing image to $registry_user_image failed"
Exit 1
}

View file

@ -1,28 +0,0 @@
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;
# Download gstreamer and all its subprojects
git clone -b 1.4.1 --depth 1 https://code.videolan.org/videolan/dav1d.git C:\dav1d
if (!$?) {
Write-Host "Failed to clone dav1d"
Exit 1
}
Set-Location C:\dav1d
# This is fine, we are not going to use the GtkMedia* apis
$env:MESON_ARGS = "--prefix=C:\gst-install\"
Write-Output "Building dav1d"
cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && meson _build $env:MESON_ARGS && meson compile -C _build && ninja -C _build install"
if (!$?) {
Write-Host "Failed to build and install dav1d"
Exit 1
}
cd C:\
cmd /c rmdir /s /q C:\dav1d
if (!$?) {
Write-Host "Failed to remove dav1d checkout"
Exit 1
}

View file

@ -1,71 +0,0 @@
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;
# Download gstreamer and all its subprojects
git clone -b $env:DEFAULT_BRANCH --depth 1 https://gitlab.freedesktop.org/gstreamer/gstreamer.git C:\gstreamer
if (!$?) {
Write-Host "Failed to clone gstreamer"
Exit 1
}
Set-Location C:\gstreamer
# Copy the cache we already have in the image to avoid massive redownloads
Move-Item C:/subprojects/* C:\gstreamer\subprojects
# Update the subprojects cache
Write-Output "Running meson subproject reset"
meson subprojects update --reset
if (!$?) {
Write-Host "Failed to update gstreamer subprojects"
Exit 1
}
$MESON_ARGS = @(`
"--prefix=C:\gst-install", `
"-Dglib:installed_tests=false", `
"-Dlibnice:tests=disabled", `
"-Dlibnice:examples=disabled", `
"-Dffmpeg:tests=disabled", `
"-Dopenh264:tests=disabled", `
"-Dpygobject:tests=false", `
"-Dgpl=enabled", `
"-Dugly=enabled", `
"-Dbad=enabled", `
"-Dges=enabled", `
"-Drtsp_server=enabled", `
"-Ddevtools=enabled", `
"-Dsharp=disabled", `
"-Dpython=disabled", `
"-Dlibav=disabled", `
"-Dvaapi=disabled", `
"-Dgst-plugins-base:pango=enabled", `
"-Dgst-plugins-good:cairo=enabled", `
"-Dgst-plugins-good:lame=disabled"
)
$PSDefaultParameterValues['Out-File:Encoding'] = 'utf8'
echo "subproject('gtk')" >> meson.build
Write-Output "Building gstreamer"
meson setup --vsenv $MESON_ARGS _build
if (!$?) {
type "_build\meson-logs\meson-log.txt"
Write-Host "Failed to run meson setup, see log above"
Exit 1
}
Write-Output "Compiling gstreamer"
meson compile -C _build
if (!$?) {
Write-Host "Failed to run meson compile"
Exit 1
}
# meson install does a spurious rebuild sometimes that then fails
meson install --no-rebuild -C _build
if (!$?) {
Write-Host "Failed to run meson install"
Exit 1
}
cd c:\
Remove-Item -LiteralPath "C:\gstreamer" -Force -Recurse

View file

@ -1,37 +0,0 @@
exclude = [
"examples",
"tutorials",
]
[advisories]
db-path = "~/.cargo/advisory-db"
db-urls = ["https://github.com/rustsec/advisory-db"]
vulnerability = "deny"
unmaintained = "warn"
notice = "warn"
ignore = []
[licenses]
unlicensed = "deny"
default = "deny"
copyleft = "deny"
allow-osi-fsf-free = "either"
confidence-threshold = 0.8
[bans]
multiple-versions = "deny"
wildcards = "allow"
highlight = "all"
# proc-macro-crate depends on an older version of toml_edit
# https://github.com/bkchr/proc-macro-crate/pull/50
[[bans.skip]]
name = "toml_edit"
version = "0.21"
[sources]
unknown-registry = "deny"
unknown-git = "deny"
allow-git = [
"https://github.com/gtk-rs/gtk-rs-core",
]

679
docs/gstreamer-app/docs.md Normal file
View file

@ -0,0 +1,679 @@
<!-- file * -->
<!-- struct AppSink -->
Appsink is a sink plugin that supports many different methods for making
the application get a handle on the GStreamer data in a pipeline. Unlike
most GStreamer elements, Appsink provides external API functions.
appsink can be used by linking to the gstappsink.h header file to access the
methods or by using the appsink action signals and properties.
The normal way of retrieving samples from appsink is by using the
`AppSink::pull_sample` and `AppSink::pull_preroll` methods.
These methods block until a sample becomes available in the sink or when the
sink is shut down or reaches EOS. There are also timed variants of these
methods, `AppSink::try_pull_sample` and `AppSink::try_pull_preroll`,
which accept a timeout parameter to limit the amount of time to wait.
Appsink will internally use a queue to collect buffers from the streaming
thread. If the application is not pulling samples fast enough, this queue
will consume a lot of memory over time. The "max-buffers" property can be
used to limit the queue size. The "drop" property controls whether the
streaming thread blocks or if older buffers are dropped when the maximum
queue size is reached. Note that blocking the streaming thread can negatively
affect real-time performance and should be avoided.
If a blocking behaviour is not desirable, setting the "emit-signals" property
to `true` will make appsink emit the "new-sample" and "new-preroll" signals
when a sample can be pulled without blocking.
The "caps" property on appsink can be used to control the formats that
appsink can receive. This property can contain non-fixed caps, the format of
the pulled samples can be obtained by getting the sample caps.
If one of the pull-preroll or pull-sample methods return `None`, the appsink
is stopped or in the EOS state. You can check for the EOS state with the
"eos" property or with the `AppSink::is_eos` method.
The eos signal can also be used to be informed when the EOS state is reached
to avoid polling.
# Implements
[`gst_base::BaseSinkExt`](../gst_base/trait.BaseSinkExt.html), [`gst::ElementExt`](../gst/trait.ElementExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`gst::URIHandlerExt`](../gst/trait.URIHandlerExt.html)
<!-- impl AppSink::fn get_buffer_list_support -->
Check if `self` supports buffer lists.
Feature: `v1_12`
# Returns
`true` if `self` supports buffer lists.
<!-- impl AppSink::fn get_caps -->
Get the configured caps on `self`.
# Returns
the `gst::Caps` accepted by the sink. `gst_caps_unref` after usage.
<!-- impl AppSink::fn get_drop -->
Check if `self` will drop old buffers when the maximum amount of queued
buffers is reached.
# Returns
`true` if `self` is dropping old buffers when the queue is
filled.
<!-- impl AppSink::fn get_emit_signals -->
Check if appsink will emit the "new-preroll" and "new-sample" signals.
# Returns
`true` if `self` is emiting the "new-preroll" and "new-sample"
signals.
<!-- impl AppSink::fn get_max_buffers -->
Get the maximum amount of buffers that can be queued in `self`.
# Returns
The maximum amount of buffers that can be queued.
<!-- impl AppSink::fn get_wait_on_eos -->
Check if `self` will wait for all buffers to be consumed when an EOS is
received.
# Returns
`true` if `self` will wait for all buffers to be consumed when an
EOS is received.
<!-- impl AppSink::fn is_eos -->
Check if `self` is EOS, which is when no more samples can be pulled because
an EOS event was received.
This function also returns `true` when the appsink is not in the PAUSED or
PLAYING state.
# Returns
`true` if no more samples can be pulled and the appsink is EOS.
<!-- impl AppSink::fn pull_preroll -->
Get the last preroll sample in `self`. This was the sample that caused the
appsink to preroll in the PAUSED state.
This function is typically used when dealing with a pipeline in the PAUSED
state. Calling this function after doing a seek will give the sample right
after the seek position.
Calling this function will clear the internal reference to the preroll
buffer.
Note that the preroll sample will also be returned as the first sample
when calling `AppSink::pull_sample`.
If an EOS event was received before any buffers, this function returns
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
This function blocks until a preroll sample or EOS is received or the appsink
element is set to the READY/NULL state.
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS.
Call `gst_sample_unref` after usage.
<!-- impl AppSink::fn pull_sample -->
This function blocks until a sample or EOS becomes available or the appsink
element is set to the READY/NULL state.
This function will only return samples when the appsink is in the PLAYING
state. All rendered buffers will be put in a queue so that the application
can pull samples at its own rate. Note that when the application does not
pull samples fast enough, the queued buffers could consume a lot of memory,
especially when dealing with raw video frames.
If an EOS event was received before any buffers, this function returns
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS.
Call `gst_sample_unref` after usage.
<!-- impl AppSink::fn set_buffer_list_support -->
Instruct `self` to enable or disable buffer list support.
For backwards-compatibility reasons applications need to opt in
to indicate that they will be able to handle buffer lists.
Feature: `v1_12`
## `enable_lists`
enable or disable buffer list support
<!-- impl AppSink::fn set_callbacks -->
Set callbacks which will be executed for each new preroll, new sample and eos.
This is an alternative to using the signals, it has lower overhead and is thus
less expensive, but also less flexible.
If callbacks are installed, no signals will be emitted for performance
reasons.
## `callbacks`
the callbacks
## `user_data`
a user_data argument for the callbacks
## `notify`
a destroy notify function
<!-- impl AppSink::fn set_caps -->
Set the capabilities on the appsink element. This function takes
a copy of the caps structure. After calling this method, the sink will only
accept caps that match `caps`. If `caps` is non-fixed, or incomplete,
you must check the caps on the samples to get the actual used caps.
## `caps`
caps to set
<!-- impl AppSink::fn set_drop -->
Instruct `self` to drop old buffers when the maximum amount of queued
buffers is reached.
## `drop`
the new state
<!-- impl AppSink::fn set_emit_signals -->
Make appsink emit the "new-preroll" and "new-sample" signals. This option is
by default disabled because signal emission is expensive and unneeded when
the application prefers to operate in pull mode.
## `emit`
the new state
<!-- impl AppSink::fn set_max_buffers -->
Set the maximum amount of buffers that can be queued in `self`. After this
amount of buffers are queued in appsink, any more buffers will block upstream
elements until a sample is pulled from `self`.
## `max`
the maximum number of buffers to queue
<!-- impl AppSink::fn set_wait_on_eos -->
Instruct `self` to wait for all buffers to be consumed when an EOS is received.
## `wait`
the new state
<!-- impl AppSink::fn try_pull_preroll -->
Get the last preroll sample in `self`. This was the sample that caused the
appsink to preroll in the PAUSED state.
This function is typically used when dealing with a pipeline in the PAUSED
state. Calling this function after doing a seek will give the sample right
after the seek position.
Calling this function will clear the internal reference to the preroll
buffer.
Note that the preroll sample will also be returned as the first sample
when calling `AppSink::pull_sample`.
If an EOS event was received before any buffers or the timeout expires,
this function returns `None`. Use gst_app_sink_is_eos () to check for the EOS
condition.
This function blocks until a preroll sample or EOS is received, the appsink
element is set to the READY/NULL state, or the timeout expires.
Feature: `v1_10`
## `timeout`
the maximum amount of time to wait for the preroll sample
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
Call `gst_sample_unref` after usage.
<!-- impl AppSink::fn try_pull_sample -->
This function blocks until a sample or EOS becomes available or the appsink
element is set to the READY/NULL state or the timeout expires.
This function will only return samples when the appsink is in the PLAYING
state. All rendered buffers will be put in a queue so that the application
can pull samples at its own rate. Note that when the application does not
pull samples fast enough, the queued buffers could consume a lot of memory,
especially when dealing with raw video frames.
If an EOS event was received before any buffers or the timeout expires,
this function returns `None`. Use gst_app_sink_is_eos () to check for the EOS
condition.
Feature: `v1_10`
## `timeout`
the maximum amount of time to wait for a sample
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
Call `gst_sample_unref` after usage.
<!-- trait AppSinkExt::fn connect_eos -->
Signal that the end-of-stream has been reached. This signal is emitted from
the streaming thread.
<!-- trait AppSinkExt::fn connect_new_preroll -->
Signal that a new preroll sample is available.
This signal is emitted from the streaming thread and only when the
"emit-signals" property is `true`.
The new preroll sample can be retrieved with the "pull-preroll" action
signal or `AppSink::pull_preroll` either from this signal callback
or from any other thread.
Note that this signal is only emitted when the "emit-signals" property is
set to `true`, which it is not by default for performance reasons.
<!-- trait AppSinkExt::fn connect_new_sample -->
Signal that a new sample is available.
This signal is emitted from the streaming thread and only when the
"emit-signals" property is `true`.
The new sample can be retrieved with the "pull-sample" action
signal or `AppSink::pull_sample` either from this signal callback
or from any other thread.
Note that this signal is only emitted when the "emit-signals" property is
set to `true`, which it is not by default for performance reasons.
<!-- trait AppSinkExt::fn connect_pull_preroll -->
Get the last preroll sample in `appsink`. This was the sample that caused the
appsink to preroll in the PAUSED state.
This function is typically used when dealing with a pipeline in the PAUSED
state. Calling this function after doing a seek will give the sample right
after the seek position.
Calling this function will clear the internal reference to the preroll
buffer.
Note that the preroll sample will also be returned as the first sample
when calling `AppSink::pull_sample` or the "pull-sample" action signal.
If an EOS event was received before any buffers, this function returns
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
This function blocks until a preroll sample or EOS is received or the appsink
element is set to the READY/NULL state.
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS.
<!-- trait AppSinkExt::fn connect_pull_sample -->
This function blocks until a sample or EOS becomes available or the appsink
element is set to the READY/NULL state.
This function will only return samples when the appsink is in the PLAYING
state. All rendered samples will be put in a queue so that the application
can pull samples at its own rate.
Note that when the application does not pull samples fast enough, the
queued samples could consume a lot of memory, especially when dealing with
raw video frames. It's possible to control the behaviour of the queue with
the "drop" and "max-buffers" properties.
If an EOS event was received before any buffers, this function returns
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS.
<!-- trait AppSinkExt::fn connect_try_pull_preroll -->
Get the last preroll sample in `appsink`. This was the sample that caused the
appsink to preroll in the PAUSED state.
This function is typically used when dealing with a pipeline in the PAUSED
state. Calling this function after doing a seek will give the sample right
after the seek position.
Calling this function will clear the internal reference to the preroll
buffer.
Note that the preroll sample will also be returned as the first sample
when calling `AppSink::pull_sample` or the "pull-sample" action signal.
If an EOS event was received before any buffers or the timeout expires,
this function returns `None`. Use gst_app_sink_is_eos () to check for the EOS
condition.
This function blocks until a preroll sample or EOS is received, the appsink
element is set to the READY/NULL state, or the timeout expires.
Feature: `v1_10`
## `timeout`
the maximum amount of time to wait for the preroll sample
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
<!-- trait AppSinkExt::fn connect_try_pull_sample -->
This function blocks until a sample or EOS becomes available or the appsink
element is set to the READY/NULL state or the timeout expires.
This function will only return samples when the appsink is in the PLAYING
state. All rendered samples will be put in a queue so that the application
can pull samples at its own rate.
Note that when the application does not pull samples fast enough, the
queued samples could consume a lot of memory, especially when dealing with
raw video frames. It's possible to control the behaviour of the queue with
the "drop" and "max-buffers" properties.
If an EOS event was received before any buffers or the timeout expires,
this function returns `None`. Use gst_app_sink_is_eos () to check
for the EOS condition.
Feature: `v1_10`
## `timeout`
the maximum amount of time to wait for a sample
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
<!-- struct AppSrc -->
The appsrc element can be used by applications to insert data into a
GStreamer pipeline. Unlike most GStreamer elements, appsrc provides
external API functions.
appsrc can be used by linking with the libgstapp library to access the
methods directly or by using the appsrc action signals.
Before operating appsrc, the caps property must be set to fixed caps
describing the format of the data that will be pushed with appsrc. An
exception to this is when pushing buffers with unknown caps, in which case no
caps should be set. This is typically true of file-like sources that push raw
byte buffers. If you don't want to explicitly set the caps, you can use
gst_app_src_push_sample. This method gets the caps associated with the
sample and sets them on the appsrc replacing any previously set caps (if
different from sample's caps).
The main way of handing data to the appsrc element is by calling the
`AppSrc::push_buffer` method or by emitting the push-buffer action signal.
This will put the buffer onto a queue from which appsrc will read from in its
streaming thread. It is important to note that data transport will not happen
from the thread that performed the push-buffer call.
The "max-bytes" property controls how much data can be queued in appsrc
before appsrc considers the queue full. A filled internal queue will always
signal the "enough-data" signal, which signals the application that it should
stop pushing data into appsrc. The "block" property will cause appsrc to
block the push-buffer method until free data becomes available again.
When the internal queue is running out of data, the "need-data" signal is
emitted, which signals the application that it should start pushing more data
into appsrc.
In addition to the "need-data" and "enough-data" signals, appsrc can emit the
"seek-data" signal when the "stream-mode" property is set to "seekable" or
"random-access". The signal argument will contain the new desired position in
the stream expressed in the unit set with the "format" property. After
receiving the seek-data signal, the application should push-buffers from the
new position.
These signals allow the application to operate the appsrc in two different
ways:
The push mode, in which the application repeatedly calls the push-buffer/push-sample
method with a new buffer/sample. Optionally, the queue size in the appsrc
can be controlled with the enough-data and need-data signals by respectively
stopping/starting the push-buffer/push-sample calls. This is a typical
mode of operation for the stream-type "stream" and "seekable". Use this
mode when implementing various network protocols or hardware devices.
The pull mode, in which the need-data signal triggers the next push-buffer call.
This mode is typically used in the "random-access" stream-type. Use this
mode for file access or other randomly accessable sources. In this mode, a
buffer of exactly the amount of bytes given by the need-data signal should be
pushed into appsrc.
In all modes, the size property on appsrc should contain the total stream
size in bytes. Setting this property is mandatory in the random-access mode.
For the stream and seekable modes, setting this property is optional but
recommended.
When the application has finished pushing data into appsrc, it should call
`AppSrc::end_of_stream` or emit the end-of-stream action signal. After
this call, no more buffers can be pushed into appsrc until a flushing seek
occurs or the state of the appsrc has gone through READY.
# Implements
[`gst_base::BaseSrcExt`](../gst_base/trait.BaseSrcExt.html), [`gst::ElementExt`](../gst/trait.ElementExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`gst::URIHandlerExt`](../gst/trait.URIHandlerExt.html)
<!-- impl AppSrc::fn end_of_stream -->
Indicates to the appsrc element that the last buffer queued in the
element is the last buffer of the stream.
# Returns
`gst::FlowReturn::Ok` when the EOS was successfuly queued.
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
<!-- impl AppSrc::fn get_caps -->
Get the configured caps on `self`.
# Returns
the `gst::Caps` produced by the source. `gst_caps_unref` after usage.
<!-- impl AppSrc::fn get_current_level_bytes -->
Get the number of currently queued bytes inside `self`.
# Returns
The number of currently queued bytes.
<!-- impl AppSrc::fn get_duration -->
Get the duration of the stream in nanoseconds. A value of GST_CLOCK_TIME_NONE means that the duration is
not known.
Feature: `v1_10`
# Returns
the duration of the stream previously set with `AppSrc::set_duration`;
<!-- impl AppSrc::fn get_emit_signals -->
Check if appsrc will emit the "new-preroll" and "new-buffer" signals.
# Returns
`true` if `self` is emitting the "new-preroll" and "new-buffer"
signals.
<!-- impl AppSrc::fn get_latency -->
Retrieve the min and max latencies in `min` and `max` respectively.
## `min`
the min latency
## `max`
the max latency
<!-- impl AppSrc::fn get_max_bytes -->
Get the maximum amount of bytes that can be queued in `self`.
# Returns
The maximum amount of bytes that can be queued.
<!-- impl AppSrc::fn get_size -->
Get the size of the stream in bytes. A value of -1 means that the size is
not known.
# Returns
the size of the stream previously set with `AppSrc::set_size`;
<!-- impl AppSrc::fn get_stream_type -->
Get the stream type. Control the stream type of `self`
with `AppSrc::set_stream_type`.
# Returns
the stream type.
<!-- impl AppSrc::fn push_buffer -->
Adds a buffer to the queue of buffers that the appsrc element will
push to its source pad. This function takes ownership of the buffer.
When the block property is TRUE, this function can block until free
space becomes available in the queue.
## `buffer`
a `gst::Buffer` to push
# Returns
`gst::FlowReturn::Ok` when the buffer was successfuly queued.
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
`gst::FlowReturn::Eos` when EOS occured.
<!-- impl AppSrc::fn push_buffer_list -->
Adds a buffer list to the queue of buffers and buffer lists that the
appsrc element will push to its source pad. This function takes ownership
of `buffer_list`.
When the block property is TRUE, this function can block until free
space becomes available in the queue.
Feature: `v1_14`
## `buffer_list`
a `gst::BufferList` to push
# Returns
`gst::FlowReturn::Ok` when the buffer list was successfuly queued.
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
`gst::FlowReturn::Eos` when EOS occured.
<!-- impl AppSrc::fn push_sample -->
Extract a buffer from the provided sample and adds it to the queue of
buffers that the appsrc element will push to its source pad. Any
previous caps that were set on appsrc will be replaced by the caps
associated with the sample if not equal.
When the block property is TRUE, this function can block until free
space becomes available in the queue.
## `sample`
a `gst::Sample` from which buffer and caps may be
extracted
# Returns
`gst::FlowReturn::Ok` when the buffer was successfuly queued.
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
`gst::FlowReturn::Eos` when EOS occured.
<!-- impl AppSrc::fn set_callbacks -->
Set callbacks which will be executed when data is needed, enough data has
been collected or when a seek should be performed.
This is an alternative to using the signals, it has lower overhead and is thus
less expensive, but also less flexible.
If callbacks are installed, no signals will be emitted for performance
reasons.
## `callbacks`
the callbacks
## `user_data`
a user_data argument for the callbacks
## `notify`
a destroy notify function
<!-- impl AppSrc::fn set_caps -->
Set the capabilities on the appsrc element. This function takes
a copy of the caps structure. After calling this method, the source will
only produce caps that match `caps`. `caps` must be fixed and the caps on the
buffers must match the caps or left NULL.
## `caps`
caps to set
<!-- impl AppSrc::fn set_duration -->
Set the duration of the stream in nanoseconds. A value of GST_CLOCK_TIME_NONE means that the duration is
not known.
Feature: `v1_10`
## `duration`
the duration to set
<!-- impl AppSrc::fn set_emit_signals -->
Make appsrc emit the "new-preroll" and "new-buffer" signals. This option is
by default disabled because signal emission is expensive and unneeded when
the application prefers to operate in pull mode.
## `emit`
the new state
<!-- impl AppSrc::fn set_latency -->
Configure the `min` and `max` latency in `src`. If `min` is set to -1, the
default latency calculations for pseudo-live sources will be used.
## `min`
the min latency
## `max`
the max latency
<!-- impl AppSrc::fn set_max_bytes -->
Set the maximum amount of bytes that can be queued in `self`.
After the maximum amount of bytes are queued, `self` will emit the
"enough-data" signal.
## `max`
the maximum number of bytes to queue
<!-- impl AppSrc::fn set_size -->
Set the size of the stream in bytes. A value of -1 means that the size is
not known.
## `size`
the size to set
<!-- impl AppSrc::fn set_stream_type -->
Set the stream type on `self`. For seekable streams, the "seek" signal must
be connected to.
A stream_type stream
## `type_`
the new state
<!-- trait AppSrcExt::fn connect_end_of_stream -->
Notify `appsrc` that no more buffer are available.
<!-- trait AppSrcExt::fn connect_enough_data -->
Signal that the source has enough data. It is recommended that the
application stops calling push-buffer until the need-data signal is
emitted again to avoid excessive buffer queueing.
<!-- trait AppSrcExt::fn connect_need_data -->
Signal that the source needs more data. In the callback or from another
thread you should call push-buffer or end-of-stream.
`length` is just a hint and when it is set to -1, any number of bytes can be
pushed into `appsrc`.
You can call push-buffer multiple times until the enough-data signal is
fired.
## `length`
the amount of bytes needed.
<!-- trait AppSrcExt::fn connect_push_buffer -->
Adds a buffer to the queue of buffers that the appsrc element will
push to its source pad. This function does not take ownership of the
buffer so the buffer needs to be unreffed after calling this function.
When the block property is TRUE, this function can block until free space
becomes available in the queue.
## `buffer`
a buffer to push
<!-- trait AppSrcExt::fn connect_push_buffer_list -->
Adds a buffer list to the queue of buffers and buffer lists that the
appsrc element will push to its source pad. This function does not take
ownership of the buffer list so the buffer list needs to be unreffed
after calling this function.
When the block property is TRUE, this function can block until free space
becomes available in the queue.
Feature: `v1_14`
## `buffer_list`
a buffer list to push
<!-- trait AppSrcExt::fn connect_push_sample -->
Extract a buffer from the provided sample and adds the extracted buffer
to the queue of buffers that the appsrc element will
push to its source pad. This function set the appsrc caps based on the caps
in the sample and reset the caps if they change.
Only the caps and the buffer of the provided sample are used and not
for example the segment in the sample.
This function does not take ownership of the
sample so the sample needs to be unreffed after calling this function.
When the block property is TRUE, this function can block until free space
becomes available in the queue.
## `sample`
a sample from which extract buffer to push
<!-- trait AppSrcExt::fn connect_seek_data -->
Seek to the given offset. The next push-buffer should produce buffers from
the new `offset`.
This callback is only called for seekable stream types.
## `offset`
the offset to seek to
# Returns
`true` if the seek succeeded.
<!-- enum AppStreamType -->
The stream type.
<!-- enum AppStreamType::variant Stream -->
No seeking is supported in the stream, such as a
live stream.
<!-- enum AppStreamType::variant Seekable -->
The stream is seekable but seeking might not
be very fast, such as data from a webserver.
<!-- enum AppStreamType::variant RandomAccess -->
The stream is seekable and seeking is fast,
such as in a local file.

View file

@ -0,0 +1,448 @@
<!-- file * -->
<!-- enum AudioChannelPosition -->
Audio channel positions.
These are the channels defined in SMPTE 2036-2-2008
Table 1 for 22.2 audio systems with the Surround and Wide channels from
DTS Coherent Acoustics (v.1.3.1) and 10.2 and 7.1 layouts. In the caps the
actual channel layout is expressed with a channel count and a channel mask,
which describes the existing channels. The positions in the bit mask correspond
to the enum values.
For negotiation it is allowed to have more bits set in the channel mask than
the number of channels to specify the allowed channel positions but this is
not allowed in negotiated caps. It is not allowed in any situation other
than the one mentioned below to have less bits set in the channel mask than
the number of channels.
`AudioChannelPosition::Mono` can only be used with a single mono channel that
has no direction information and would be mixed into all directional channels.
This is expressed in caps by having a single channel and no channel mask.
`AudioChannelPosition::None` can only be used if all channels have this position.
This is expressed in caps by having a channel mask with no bits set.
As another special case it is allowed to have two channels without a channel mask.
This implicitely means that this is a stereo stream with a front left and front right
channel.
<!-- enum AudioChannelPosition::variant None -->
used for position-less channels, e.g.
from a sound card that records 1024 channels; mutually exclusive with
any other channel position
<!-- enum AudioChannelPosition::variant Mono -->
Mono without direction;
can only be used with 1 channel
<!-- enum AudioChannelPosition::variant Invalid -->
invalid position
<!-- enum AudioChannelPosition::variant FrontLeft -->
Front left
<!-- enum AudioChannelPosition::variant FrontRight -->
Front right
<!-- enum AudioChannelPosition::variant FrontCenter -->
Front center
<!-- enum AudioChannelPosition::variant Lfe1 -->
Low-frequency effects 1 (subwoofer)
<!-- enum AudioChannelPosition::variant RearLeft -->
Rear left
<!-- enum AudioChannelPosition::variant RearRight -->
Rear right
<!-- enum AudioChannelPosition::variant FrontLeftOfCenter -->
Front left of center
<!-- enum AudioChannelPosition::variant FrontRightOfCenter -->
Front right of center
<!-- enum AudioChannelPosition::variant RearCenter -->
Rear center
<!-- enum AudioChannelPosition::variant Lfe2 -->
Low-frequency effects 2 (subwoofer)
<!-- enum AudioChannelPosition::variant SideLeft -->
Side left
<!-- enum AudioChannelPosition::variant SideRight -->
Side right
<!-- enum AudioChannelPosition::variant TopFrontLeft -->
Top front left
<!-- enum AudioChannelPosition::variant TopFrontRight -->
Top front right
<!-- enum AudioChannelPosition::variant TopFrontCenter -->
Top front center
<!-- enum AudioChannelPosition::variant TopCenter -->
Top center
<!-- enum AudioChannelPosition::variant TopRearLeft -->
Top rear left
<!-- enum AudioChannelPosition::variant TopRearRight -->
Top rear right
<!-- enum AudioChannelPosition::variant TopSideLeft -->
Top side right
<!-- enum AudioChannelPosition::variant TopSideRight -->
Top rear right
<!-- enum AudioChannelPosition::variant TopRearCenter -->
Top rear center
<!-- enum AudioChannelPosition::variant BottomFrontCenter -->
Bottom front center
<!-- enum AudioChannelPosition::variant BottomFrontLeft -->
Bottom front left
<!-- enum AudioChannelPosition::variant BottomFrontRight -->
Bottom front right
<!-- enum AudioChannelPosition::variant WideLeft -->
Wide left (between front left and side left)
<!-- enum AudioChannelPosition::variant WideRight -->
Wide right (between front right and side right)
<!-- enum AudioChannelPosition::variant SurroundLeft -->
Surround left (between rear left and side left)
<!-- enum AudioChannelPosition::variant SurroundRight -->
Surround right (between rear right and side right)
<!-- enum AudioFormat -->
Enum value describing the most common audio formats.
<!-- enum AudioFormat::variant Unknown -->
unknown or unset audio format
<!-- enum AudioFormat::variant Encoded -->
encoded audio format
<!-- enum AudioFormat::variant S8 -->
8 bits in 8 bits, signed
<!-- enum AudioFormat::variant U8 -->
8 bits in 8 bits, unsigned
<!-- enum AudioFormat::variant S16le -->
16 bits in 16 bits, signed, little endian
<!-- enum AudioFormat::variant S16be -->
16 bits in 16 bits, signed, big endian
<!-- enum AudioFormat::variant U16le -->
16 bits in 16 bits, unsigned, little endian
<!-- enum AudioFormat::variant U16be -->
16 bits in 16 bits, unsigned, big endian
<!-- enum AudioFormat::variant S2432le -->
24 bits in 32 bits, signed, little endian
<!-- enum AudioFormat::variant S2432be -->
24 bits in 32 bits, signed, big endian
<!-- enum AudioFormat::variant U2432le -->
24 bits in 32 bits, unsigned, little endian
<!-- enum AudioFormat::variant U2432be -->
24 bits in 32 bits, unsigned, big endian
<!-- enum AudioFormat::variant S32le -->
32 bits in 32 bits, signed, little endian
<!-- enum AudioFormat::variant S32be -->
32 bits in 32 bits, signed, big endian
<!-- enum AudioFormat::variant U32le -->
32 bits in 32 bits, unsigned, little endian
<!-- enum AudioFormat::variant U32be -->
32 bits in 32 bits, unsigned, big endian
<!-- enum AudioFormat::variant S24le -->
24 bits in 24 bits, signed, little endian
<!-- enum AudioFormat::variant S24be -->
24 bits in 24 bits, signed, big endian
<!-- enum AudioFormat::variant U24le -->
24 bits in 24 bits, unsigned, little endian
<!-- enum AudioFormat::variant U24be -->
24 bits in 24 bits, unsigned, big endian
<!-- enum AudioFormat::variant S20le -->
20 bits in 24 bits, signed, little endian
<!-- enum AudioFormat::variant S20be -->
20 bits in 24 bits, signed, big endian
<!-- enum AudioFormat::variant U20le -->
20 bits in 24 bits, unsigned, little endian
<!-- enum AudioFormat::variant U20be -->
20 bits in 24 bits, unsigned, big endian
<!-- enum AudioFormat::variant S18le -->
18 bits in 24 bits, signed, little endian
<!-- enum AudioFormat::variant S18be -->
18 bits in 24 bits, signed, big endian
<!-- enum AudioFormat::variant U18le -->
18 bits in 24 bits, unsigned, little endian
<!-- enum AudioFormat::variant U18be -->
18 bits in 24 bits, unsigned, big endian
<!-- enum AudioFormat::variant F32le -->
32-bit floating point samples, little endian
<!-- enum AudioFormat::variant F32be -->
32-bit floating point samples, big endian
<!-- enum AudioFormat::variant F64le -->
64-bit floating point samples, little endian
<!-- enum AudioFormat::variant F64be -->
64-bit floating point samples, big endian
<!-- enum AudioFormat::variant S16 -->
16 bits in 16 bits, signed, native endianness
<!-- enum AudioFormat::variant U16 -->
16 bits in 16 bits, unsigned, native endianness
<!-- enum AudioFormat::variant S2432 -->
24 bits in 32 bits, signed, native endianness
<!-- enum AudioFormat::variant U2432 -->
24 bits in 32 bits, unsigned, native endianness
<!-- enum AudioFormat::variant S32 -->
32 bits in 32 bits, signed, native endianness
<!-- enum AudioFormat::variant U32 -->
32 bits in 32 bits, unsigned, native endianness
<!-- enum AudioFormat::variant S24 -->
24 bits in 24 bits, signed, native endianness
<!-- enum AudioFormat::variant U24 -->
24 bits in 24 bits, unsigned, native endianness
<!-- enum AudioFormat::variant S20 -->
20 bits in 24 bits, signed, native endianness
<!-- enum AudioFormat::variant U20 -->
20 bits in 24 bits, unsigned, native endianness
<!-- enum AudioFormat::variant S18 -->
18 bits in 24 bits, signed, native endianness
<!-- enum AudioFormat::variant U18 -->
18 bits in 24 bits, unsigned, native endianness
<!-- enum AudioFormat::variant F32 -->
32-bit floating point samples, native endianness
<!-- enum AudioFormat::variant F64 -->
64-bit floating point samples, native endianness
<!-- struct AudioFormatInfo -->
Information for an audio format.
<!-- struct AudioInfo -->
Information describing audio properties. This information can be filled
in from GstCaps with `AudioInfo::from_caps`.
Use the provided macros to access the info in this structure.
<!-- impl AudioInfo::fn new -->
Allocate a new `AudioInfo` that is also initialized with
`AudioInfo::init`.
# Returns
a new `AudioInfo`. free with `AudioInfo::free`.
<!-- impl AudioInfo::fn convert -->
Converts among various `gst::Format` types. This function handles
GST_FORMAT_BYTES, GST_FORMAT_TIME, and GST_FORMAT_DEFAULT. For
raw audio, GST_FORMAT_DEFAULT corresponds to audio frames. This
function can be used to handle pad queries of the type GST_QUERY_CONVERT.
## `src_fmt`
`gst::Format` of the `src_val`
## `src_val`
value to convert
## `dest_fmt`
`gst::Format` of the `dest_val`
## `dest_val`
pointer to destination value
# Returns
TRUE if the conversion was successful.
<!-- impl AudioInfo::fn copy -->
Copy a GstAudioInfo structure.
# Returns
a new `AudioInfo`. free with gst_audio_info_free.
<!-- impl AudioInfo::fn free -->
Free a GstAudioInfo structure previously allocated with `AudioInfo::new`
or `AudioInfo::copy`.
<!-- impl AudioInfo::fn from_caps -->
Parse `caps` and update `self`.
## `caps`
a `gst::Caps`
# Returns
TRUE if `caps` could be parsed
<!-- impl AudioInfo::fn init -->
Initialize `self` with default values.
<!-- impl AudioInfo::fn is_equal -->
Compares two `AudioInfo` and returns whether they are equal or not
## `other`
a `AudioInfo`
# Returns
`true` if `self` and `other` are equal, else `false`.
<!-- impl AudioInfo::fn set_format -->
Set the default info for the audio info of `format` and `rate` and `channels`.
Note: This initializes `self` first, no values are preserved.
## `format`
the format
## `rate`
the samplerate
## `channels`
the number of channels
## `position`
the channel positions
<!-- impl AudioInfo::fn to_caps -->
Convert the values of `self` into a `gst::Caps`.
# Returns
the new `gst::Caps` containing the
info of `self`.
<!-- enum AudioLayout -->
Layout of the audio samples for the different channels.
<!-- enum AudioLayout::variant Interleaved -->
interleaved audio
<!-- enum AudioLayout::variant NonInterleaved -->
non-interleaved audio
<!-- struct AudioStreamAlign -->
`AudioStreamAlign` provides a helper object that helps tracking audio
stream alignment and discontinuities, and detects discontinuities if
possible.
See `AudioStreamAlign::new` for a description of its parameters and
`AudioStreamAlign::process` for the details of the processing.
Feature: `v1_14`
<!-- impl AudioStreamAlign::fn new -->
Allocate a new `AudioStreamAlign` with the given configuration. All
processing happens according to sample rate `rate`, until
`gst_audio_discont_wait_set_rate` is called with a new `rate`.
A negative rate can be used for reverse playback.
`alignment_threshold` gives the tolerance in nanoseconds after which a
timestamp difference is considered a discontinuity. Once detected,
`discont_wait` nanoseconds have to pass without going below the threshold
again until the output buffer is marked as a discontinuity. These can later
be re-configured with `AudioStreamAlign::set_alignment_threshold` and
`AudioStreamAlign::set_discont_wait`.
Feature: `v1_14`
## `rate`
a sample rate
## `alignment_threshold`
a alignment threshold in nanoseconds
## `discont_wait`
discont wait in nanoseconds
# Returns
a new `AudioStreamAlign`. free with `AudioStreamAlign::free`.
<!-- impl AudioStreamAlign::fn copy -->
Copy a GstAudioStreamAlign structure.
Feature: `v1_14`
# Returns
a new `AudioStreamAlign`. free with gst_audio_stream_align_free.
<!-- impl AudioStreamAlign::fn free -->
Free a GstAudioStreamAlign structure previously allocated with `AudioStreamAlign::new`
or `AudioStreamAlign::copy`.
Feature: `v1_14`
<!-- impl AudioStreamAlign::fn get_samples_since_discont -->
Returns the number of samples that were processed since the last
discontinuity was detected.
Feature: `v1_14`
# Returns
The number of samples processed since the last discontinuity.
<!-- impl AudioStreamAlign::fn get_timestamp_at_discont -->
Timestamp that was passed when a discontinuity was detected, i.e. the first
timestamp after the discontinuity.
Feature: `v1_14`
# Returns
The last timestamp at when a discontinuity was detected
<!-- impl AudioStreamAlign::fn mark_discont -->
Marks the next buffer as discontinuous and resets timestamp tracking.
Feature: `v1_14`
<!-- impl AudioStreamAlign::fn process -->
Processes data with `timestamp` and `n_samples`, and returns the output
timestamp, duration and sample position together with a boolean to signal
whether a discontinuity was detected or not. All non-discontinuous data
will have perfect timestamps and durations.
A discontinuity is detected once the difference between the actual
timestamp and the timestamp calculated from the sample count since the last
discontinuity differs by more than the alignment threshold for a duration
longer than discont wait.
Note: In reverse playback, every buffer is considered discontinuous in the
context of buffer flags because the last sample of the previous buffer is
discontinuous with the first sample of the current one. However for this
function they are only considered discontinuous in reverse playback if the
first sample of the previous buffer is discontinuous with the last sample
of the current one.
Feature: `v1_14`
## `discont`
if this data is considered to be discontinuous
## `timestamp`
a `gst::ClockTime` of the start of the data
## `n_samples`
number of samples to process
## `out_timestamp`
output timestamp of the data
## `out_duration`
output duration of the data
## `out_sample_position`
output sample position of the start of the data
# Returns
`true` if a discontinuity was detected, `false` otherwise.
<!-- struct StreamVolume -->
This interface is implemented by elements that provide a stream volume. Examples for
such elements are `volume` and `playbin`.
Applications can use this interface to get or set the current stream volume. For this
the "volume" `gobject::Object` property can be used or the helper functions `StreamVolume::set_volume`
and `StreamVolume::get_volume`. This volume is always a linear factor, i.e. 0.0 is muted
1.0 is 100%. For showing the volume in a GUI it might make sense to convert it to
a different format by using `StreamVolume::convert_volume`. Volume sliders should usually
use a cubic volume.
Separate from the volume the stream can also be muted by the "mute" `gobject::Object` property or
`StreamVolume::set_mute` and `StreamVolume::get_mute`.
Elements that provide some kind of stream volume should implement the "volume" and
"mute" `gobject::Object` properties and handle setting and getting of them properly.
The volume property is defined to be a linear volume factor.
# Implements
[`StreamVolumeExt`](trait.StreamVolumeExt.html)
<!-- trait StreamVolumeExt -->
Trait containing all `StreamVolume` methods.
# Implementors
[`StreamVolume`](struct.StreamVolume.html)
<!-- impl StreamVolume::fn convert_volume -->
## `from`
`StreamVolumeFormat` to convert from
## `to`
`StreamVolumeFormat` to convert to
## `val`
Volume in `from` format that should be converted
# Returns
the converted volume
<!-- trait StreamVolumeExt::fn get_mute -->
# Returns
Returns `true` if the stream is muted
<!-- trait StreamVolumeExt::fn get_volume -->
## `format`
`StreamVolumeFormat` which should be returned
# Returns
The current stream volume as linear factor
<!-- trait StreamVolumeExt::fn set_mute -->
## `mute`
Mute state that should be set
<!-- trait StreamVolumeExt::fn set_volume -->
## `format`
`StreamVolumeFormat` of `val`
## `val`
Linear volume factor that should be set
<!-- enum StreamVolumeFormat -->
Different representations of a stream volume. `StreamVolume::convert_volume`
allows to convert between the different representations.
Formulas to convert from a linear to a cubic or dB volume are
cbrt(val) and 20 * log10 (val).
<!-- enum StreamVolumeFormat::variant Linear -->
Linear scale factor, 1.0 = 100%
<!-- enum StreamVolumeFormat::variant Cubic -->
Cubic volume scale
<!-- enum StreamVolumeFormat::variant Db -->
Logarithmic volume scale (dB, amplitude not power)

1861
docs/gstreamer-base/docs.md Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,308 @@
<!-- file * -->
<!-- struct TestClock -->
GstTestClock is an implementation of `gst::Clock` which has different
behaviour compared to `gst::SystemClock`. Time for `gst::SystemClock` advances
according to the system time, while time for `TestClock` changes only
when `TestClock::set_time` or `TestClock::advance_time` are
called. `TestClock` provides unit tests with the possibility to
precisely advance the time in a deterministic manner, independent of the
system time or any other external factors.
## Advancing the time of a `TestClock`
```C
#include <gst/gst.h>
#include <gst/check/gsttestclock.h>
GstClock *clock;
GstTestClock *test_clock;
clock = gst_test_clock_new ();
test_clock = GST_TEST_CLOCK (clock);
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
gst_test_clock_advance_time ( test_clock, 1 * GST_SECOND);
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
g_usleep (10 * G_USEC_PER_SEC);
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
gst_test_clock_set_time (test_clock, 42 * GST_SECOND);
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
...
```
`gst::Clock` allows for setting up single shot or periodic clock notifications
as well as waiting for these notifications synchronously (using
`gst::Clock::id_wait`) or asynchronously (using `gst::Clock::id_wait_async` or
`gst::Clock::id_wait_async`). This is used by many GStreamer elements,
among them `GstBaseSrc` and `GstBaseSink`.
`TestClock` keeps track of these clock notifications. By calling
`TestClock::wait_for_next_pending_id` or
`TestClock::wait_for_multiple_pending_ids` a unit tests may wait for the
next one or several clock notifications to be requested. Additionally unit
tests may release blocked waits in a controlled fashion by calling
`TestClock::process_next_clock_id`. This way a unit test can control the
inaccuracy (jitter) of clock notifications, since the test can decide to
release blocked waits when the clock time has advanced exactly to, or past,
the requested clock notification time.
There are also interfaces for determining if a notification belongs to a
`TestClock` or not, as well as getting the number of requested clock
notifications so far.
N.B.: When a unit test waits for a certain amount of clock notifications to
be requested in `TestClock::wait_for_next_pending_id` or
`TestClock::wait_for_multiple_pending_ids` then these functions may block
for a long time. If they block forever then the expected clock notifications
were never requested from `TestClock`, and so the assumptions in the code
of the unit test are wrong. The unit test case runner in gstcheck is
expected to catch these cases either by the default test case timeout or the
one set for the unit test by calling tcase_set_timeout\(\).
The sample code below assumes that the element under test will delay a
buffer pushed on the source pad by some latency until it arrives on the sink
pad. Moreover it is assumed that the element will at some point call
`gst::Clock::id_wait` to synchronously wait for a specific time. The first
buffer sent will arrive exactly on time only delayed by the latency. The
second buffer will arrive a little late (7ms) due to simulated jitter in the
clock notification.
## Demonstration of how to work with clock notifications and `TestClock`
```C
#include <gst/gst.h>
#include <gst/check/gstcheck.h>
#include <gst/check/gsttestclock.h>
GstClockTime latency;
GstElement *element;
GstPad *srcpad;
GstClock *clock;
GstTestClock *test_clock;
GstBuffer buf;
GstClockID pending_id;
GstClockID processed_id;
latency = 42 * GST_MSECOND;
element = create_element (latency, ...);
srcpad = get_source_pad (element);
clock = gst_test_clock_new ();
test_clock = GST_TEST_CLOCK (clock);
gst_element_set_clock (element, clock);
GST_INFO ("Set time, create and push the first buffer\n");
gst_test_clock_set_time (test_clock, 0);
buf = create_test_buffer (gst_clock_get_time (clock), ...);
gst_assert_cmpint (gst_pad_push (srcpad, buf), ==, GST_FLOW_OK);
GST_INFO ("Block until element is waiting for a clock notification\n");
gst_test_clock_wait_for_next_pending_id (test_clock, &pending_id);
GST_INFO ("Advance to the requested time of the clock notification\n");
gst_test_clock_advance_time (test_clock, latency);
GST_INFO ("Release the next blocking wait and make sure it is the one from element\n");
processed_id = gst_test_clock_process_next_clock_id (test_clock);
g_assert (processed_id == pending_id);
g_assert_cmpint (GST_CLOCK_ENTRY_STATUS (processed_id), ==, GST_CLOCK_OK);
gst_clock_id_unref (pending_id);
gst_clock_id_unref (processed_id);
GST_INFO ("Validate that element produced an output buffer and check its timestamp\n");
g_assert_cmpint (get_number_of_output_buffer (...), ==, 1);
buf = get_buffer_pushed_by_element (element, ...);
g_assert_cmpint (GST_BUFFER_TIMESTAMP (buf), ==, latency);
gst_buffer_unref (buf);
GST_INFO ("Check that element does not wait for any clock notification\n");
g_assert (!gst_test_clock_peek_next_pending_id (test_clock, NULL));
GST_INFO ("Set time, create and push the second buffer\n");
gst_test_clock_advance_time (test_clock, 10 * GST_SECOND);
buf = create_test_buffer (gst_clock_get_time (clock), ...);
gst_assert_cmpint (gst_pad_push (srcpad, buf), ==, GST_FLOW_OK);
GST_INFO ("Block until element is waiting for a new clock notification\n");
(gst_test_clock_wait_for_next_pending_id (test_clock, &pending_id);
GST_INFO ("Advance past 7ms beyond the requested time of the clock notification\n");
gst_test_clock_advance_time (test_clock, latency + 7 * GST_MSECOND);
GST_INFO ("Release the next blocking wait and make sure it is the one from element\n");
processed_id = gst_test_clock_process_next_clock_id (test_clock);
g_assert (processed_id == pending_id);
g_assert_cmpint (GST_CLOCK_ENTRY_STATUS (processed_id), ==, GST_CLOCK_OK);
gst_clock_id_unref (pending_id);
gst_clock_id_unref (processed_id);
GST_INFO ("Validate that element produced an output buffer and check its timestamp\n");
g_assert_cmpint (get_number_of_output_buffer (...), ==, 1);
buf = get_buffer_pushed_by_element (element, ...);
g_assert_cmpint (GST_BUFFER_TIMESTAMP (buf), ==,
10 * GST_SECOND + latency + 7 * GST_MSECOND);
gst_buffer_unref (buf);
GST_INFO ("Check that element does not wait for any clock notification\n");
g_assert (!gst_test_clock_peek_next_pending_id (test_clock, NULL));
...
```
Since `TestClock` is only supposed to be used in unit tests it calls
`g_assert`, `g_assert_cmpint` or `g_assert_cmpuint` to validate all function
arguments. This will highlight any issues with the unit test code itself.
# Implements
[`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl TestClock::fn new -->
Creates a new test clock with its time set to zero.
MT safe.
# Returns
a `TestClock` cast to `gst::Clock`.
<!-- impl TestClock::fn new_with_start_time -->
Creates a new test clock with its time set to the specified time.
MT safe.
## `start_time`
a `gst::ClockTime` set to the desired start time of the clock.
# Returns
a `TestClock` cast to `gst::Clock`.
<!-- impl TestClock::fn id_list_get_latest_time -->
Finds the latest time inside the list.
MT safe.
## `pending_list`
List
of of pending `GstClockIDs`
<!-- impl TestClock::fn advance_time -->
Advances the time of the `self` by the amount given by `delta`. The
time of `self` is monotonically increasing, therefore providing a
`delta` which is negative or zero is a programming error.
MT safe.
## `delta`
a positive `gst::ClockTimeDiff` to be added to the time of the clock
<!-- impl TestClock::fn crank -->
A "crank" consists of three steps:
1: Wait for a `gst::ClockID` to be registered with the `TestClock`.
2: Advance the `TestClock` to the time the `gst::ClockID` is waiting for.
3: Release the `gst::ClockID` wait.
A "crank" can be though of as the notion of
manually driving the clock forward to its next logical step.
# Returns
`true` if the crank was successful, `false` otherwise.
MT safe.
<!-- impl TestClock::fn get_next_entry_time -->
Retrieve the requested time for the next pending clock notification.
MT safe.
# Returns
a `gst::ClockTime` set to the time of the next pending clock
notification. If no clock notifications have been requested
`GST_CLOCK_TIME_NONE` will be returned.
<!-- impl TestClock::fn has_id -->
Checks whether `self` was requested to provide the clock notification
given by `id`.
MT safe.
## `id`
a `gst::ClockID` clock notification
# Returns
`true` if the clock has been asked to provide the given clock
notification, `false` otherwise.
<!-- impl TestClock::fn peek_id_count -->
Determine the number of pending clock notifications that have been
requested from the `self`.
MT safe.
# Returns
the number of pending clock notifications.
<!-- impl TestClock::fn peek_next_pending_id -->
Determines if the `pending_id` is the next clock notification scheduled to
be triggered given the current time of the `self`.
MT safe.
## `pending_id`
a `gst::ClockID` clock
notification to look for
# Returns
`true` if `pending_id` is the next clock notification to be
triggered, `false` otherwise.
<!-- impl TestClock::fn process_id_list -->
Processes and releases the pending IDs in the list.
MT safe.
## `pending_list`
List
of pending `GstClockIDs`
<!-- impl TestClock::fn process_next_clock_id -->
MT safe.
# Returns
a `gst::ClockID` containing the next pending clock
notification.
<!-- impl TestClock::fn set_time -->
Sets the time of `self` to the time given by `new_time`. The time of
`self` is monotonically increasing, therefore providing a `new_time`
which is earlier or equal to the time of the clock as given by
`gst::ClockExt::get_time` is a programming error.
MT safe.
## `new_time`
a `gst::ClockTime` later than that returned by `gst::ClockExt::get_time`
<!-- impl TestClock::fn wait_for_multiple_pending_ids -->
Blocks until at least `count` clock notifications have been requested from
`self`. There is no timeout for this wait, see the main description of
`TestClock`.
MT safe.
## `count`
the number of pending clock notifications to wait for
## `pending_list`
Address
of a `glib::List` pointer variable to store the list of pending `GstClockIDs`
that expired, or `None`
<!-- impl TestClock::fn wait_for_next_pending_id -->
Waits until a clock notification is requested from `self`. There is no
timeout for this wait, see the main description of `TestClock`. A reference
to the pending clock notification is stored in `pending_id`.
MT safe.
## `pending_id`
`gst::ClockID`
with information about the pending clock notification
<!-- impl TestClock::fn wait_for_pending_id_count -->
Blocks until at least `count` clock notifications have been requested from
`self`. There is no timeout for this wait, see the main description of
`TestClock`.
# Deprecated
use `TestClock::wait_for_multiple_pending_ids` instead.
## `count`
the number of pending clock notifications to wait for
<!-- trait TestClockExt::fn get_property_start-time -->
When a `TestClock` is constructed it will have a certain start time set.
If the clock was created using `TestClock::new_with_start_time` then
this property contains the value of the `start_time` argument. If
`TestClock::new` was called the clock started at time zero, and thus
this property contains the value 0.
<!-- trait TestClockExt::fn set_property_start-time -->
When a `TestClock` is constructed it will have a certain start time set.
If the clock was created using `TestClock::new_with_start_time` then
this property contains the value of the `start_time` argument. If
`TestClock::new` was called the clock started at time zero, and thus
this property contains the value 0.

File diff suppressed because it is too large Load diff

140
docs/gstreamer-net/docs.md Normal file
View file

@ -0,0 +1,140 @@
<!-- file * -->
<!-- struct NetClientClock -->
`NetClientClock` implements a custom `gst::Clock` that synchronizes its time
to a remote time provider such as `NetTimeProvider`. `NtpClock`
implements a `gst::Clock` that synchronizes its time to a remote NTPv4 server.
A new clock is created with `NetClientClock::new` or
`NtpClock::new`, which takes the address and port of the remote time
provider along with a name and an initial time.
This clock will poll the time provider and will update its calibration
parameters based on the local and remote observations.
The "round-trip" property limits the maximum round trip packets can take.
Various parameters of the clock can be configured with the parent `gst::Clock`
"timeout", "window-size" and "window-threshold" object properties.
A `NetClientClock` and `NtpClock` is typically set on a `gst::Pipeline` with
`gst::Pipeline::use_clock`.
If you set a `gst::Bus` on the clock via the "bus" object property, it will
send `gst::MessageType::Element` messages with an attached `gst::Structure` containing
statistics about clock accuracy and network traffic.
# Implements
[`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl NetClientClock::fn new -->
Create a new `GstNetClientInternalClock` that will report the time
provided by the `NetTimeProvider` on `remote_address` and
`remote_port`.
## `name`
a name for the clock
## `remote_address`
the address or hostname of the remote clock provider
## `remote_port`
the port of the remote clock provider
## `base_time`
initial time of the clock
# Returns
a new `gst::Clock` that receives a time from the remote
clock.
<!-- struct NetTimeProvider -->
This object exposes the time of a `gst::Clock` on the network.
A `NetTimeProvider` is created with `NetTimeProvider::new` which
takes a `gst::Clock`, an address and a port number as arguments.
After creating the object, a client clock such as `NetClientClock` can
query the exposed clock over the network for its values.
The `NetTimeProvider` typically wraps the clock used by a `gst::Pipeline`.
# Implements
[`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl NetTimeProvider::fn new -->
Allows network clients to get the current time of `clock`.
## `clock`
a `gst::Clock` to export over the network
## `address`
an address to bind on as a dotted quad
(xxx.xxx.xxx.xxx), IPv6 address, or NULL to bind to all addresses
## `port`
a port to bind on, or 0 to let the kernel choose
# Returns
the new `NetTimeProvider`, or NULL on error
<!-- struct NtpClock -->
# Implements
[`NetClientClockExt`](trait.NetClientClockExt.html), [`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl NtpClock::fn new -->
Create a new `NtpClock` that will report the time provided by
the NTPv4 server on `remote_address` and `remote_port`.
## `name`
a name for the clock
## `remote_address`
the address or hostname of the remote clock provider
## `remote_port`
the port of the remote clock provider
## `base_time`
initial time of the clock
# Returns
a new `gst::Clock` that receives a time from the remote
clock.
<!-- struct PtpClock -->
GstPtpClock implements a PTP (IEEE1588:2008) ordinary clock in slave-only
mode, that allows a GStreamer pipeline to synchronize to a PTP network
clock in some specific domain.
The PTP subsystem can be initialized with `gst_ptp_init`, which then starts
a helper process to do the actual communication via the PTP ports. This is
required as PTP listens on ports < 1024 and thus requires special
privileges. Once this helper process is started, the main process will
synchronize to all PTP domains that are detected on the selected
interfaces.
`PtpClock::new` then allows to create a GstClock that provides the PTP
time from a master clock inside a specific PTP domain. This clock will only
return valid timestamps once the timestamps in the PTP domain are known. To
check this, you can use `gst::ClockExt::wait_for_sync`, the GstClock::synced
signal and `gst::ClockExt::is_synced`.
To gather statistics about the PTP clock synchronization,
`gst_ptp_statistics_callback_add` can be used. This gives the application
the possibility to collect all kinds of statistics from the clock
synchronization.
# Implements
[`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PtpClock::fn new -->
Creates a new PTP clock instance that exports the PTP time of the master
clock in `domain`. This clock can be slaved to other clocks as needed.
If `gst_ptp_init` was not called before, this will call `gst_ptp_init` with
default parameters.
This clock only returns valid timestamps after it received the first
times from the PTP master clock on the network. Once this happens the
GstPtpClock::internal-clock property will become non-NULL. You can
check this with `gst::ClockExt::wait_for_sync`, the GstClock::synced signal and
`gst::ClockExt::is_synced`.
## `name`
Name of the clock
## `domain`
PTP domain
# Returns
A new `gst::Clock`

View file

@ -0,0 +1,747 @@
<!-- file * -->
<!-- struct Player -->
# Implements
[`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl Player::fn new -->
Creates a new `Player` instance that uses `signal_dispatcher` to dispatch
signals to some event loop system, or emits signals directly if NULL is
passed. See `PlayerGMainContextSignalDispatcher::new`.
Video is going to be rendered by `video_renderer`, or if `None` is provided
no special video set up will be done and some default handling will be
performed.
## `video_renderer`
GstPlayerVideoRenderer to use
## `signal_dispatcher`
GstPlayerSignalDispatcher to use
# Returns
a new `Player` instance
<!-- impl Player::fn config_get_position_update_interval -->
## `config`
a `Player` configuration
# Returns
current position update interval in milliseconds
Since 1.10
<!-- impl Player::fn config_get_seek_accurate -->
## `config`
a `Player` configuration
# Returns
`true` if accurate seeking is enabled
Since 1.12
<!-- impl Player::fn config_get_user_agent -->
Return the user agent which has been configured using
`Player::config_set_user_agent` if any.
## `config`
a `Player` configuration
# Returns
the configured agent, or `None`
Since 1.10
<!-- impl Player::fn config_set_position_update_interval -->
set interval in milliseconds between two position-updated signals.
pass 0 to stop updating the position.
Since 1.10
## `config`
a `Player` configuration
## `interval`
interval in ms
<!-- impl Player::fn config_set_seek_accurate -->
Enable or disable accurate seeking. When enabled, elements will try harder
to seek as accurately as possible to the requested seek position. Generally
it will be slower especially for formats that don't have any indexes or
timestamp markers in the stream.
If accurate seeking is disabled, elements will seek as close as the request
position without slowing down seeking too much.
Accurate seeking is disabled by default.
## `config`
a `Player` configuration
## `accurate`
accurate seek or not
<!-- impl Player::fn config_set_user_agent -->
Set the user agent to pass to the server if `player` needs to connect
to a server during playback. This is typically used when playing HTTP
or RTSP streams.
Since 1.10
## `config`
a `Player` configuration
## `agent`
the string to use as user agent
<!-- impl Player::fn get_audio_streams -->
## `info`
a `PlayerMediaInfo`
# Returns
A `glib::List` of
matching `PlayerAudioInfo`.
<!-- impl Player::fn get_subtitle_streams -->
## `info`
a `PlayerMediaInfo`
# Returns
A `glib::List` of
matching `PlayerSubtitleInfo`.
<!-- impl Player::fn get_video_streams -->
## `info`
a `PlayerMediaInfo`
# Returns
A `glib::List` of
matching `PlayerVideoInfo`.
<!-- impl Player::fn visualizations_free -->
Frees a `None` terminated array of `PlayerVisualization`.
## `viss`
a `None` terminated array of `PlayerVisualization` to free
<!-- impl Player::fn visualizations_get -->
# Returns
a `None` terminated array containing all available
visualizations. Use `Player::visualizations_free` after
usage.
<!-- impl Player::fn get_audio_video_offset -->
Retrieve the current value of audio-video-offset property
# Returns
The current value of audio-video-offset in nanoseconds
Since 1.10
<!-- impl Player::fn get_color_balance -->
Retrieve the current value of the indicated `type_`.
## `type_`
`PlayerColorBalanceType`
# Returns
The current value of `type_`, between [0,1]. In case of
error -1 is returned.
<!-- impl Player::fn get_config -->
Get a copy of the current configuration of the player. This configuration
can either be modified and used for the `Player::set_config` call
or it must be freed after usage.
# Returns
a copy of the current configuration of `self`. Use
`gst::Structure::free` after usage or `Player::set_config`.
Since 1.10
<!-- impl Player::fn get_current_audio_track -->
A Function to get current audio `PlayerAudioInfo` instance.
# Returns
current audio track.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_current_subtitle_track -->
A Function to get current subtitle `PlayerSubtitleInfo` instance.
# Returns
current subtitle track.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_current_video_track -->
A Function to get current video `PlayerVideoInfo` instance.
# Returns
current video track.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_current_visualization -->
# Returns
Name of the currently enabled visualization.
`g_free` after usage.
<!-- impl Player::fn get_duration -->
Retrieves the duration of the media stream that self represents.
# Returns
the duration of the currently-playing media stream, in
nanoseconds.
<!-- impl Player::fn get_media_info -->
A Function to get the current media info `PlayerMediaInfo` instance.
# Returns
media info instance.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_multiview_flags -->
Retrieve the current value of the indicated `type_`.
# Returns
The current value of `type_`, Default: 0x00000000 "none
<!-- impl Player::fn get_multiview_mode -->
Retrieve the current value of the indicated `type_`.
# Returns
The current value of `type_`, Default: -1 "none"
<!-- impl Player::fn get_mute -->
# Returns
`true` if the currently-playing stream is muted.
<!-- impl Player::fn get_pipeline -->
# Returns
The internal playbin instance
<!-- impl Player::fn get_position -->
# Returns
the absolute position time, in nanoseconds, of the
currently-playing stream.
<!-- impl Player::fn get_rate -->
# Returns
current playback rate
<!-- impl Player::fn get_subtitle_uri -->
current subtitle URI
# Returns
URI of the current external subtitle.
`g_free` after usage.
<!-- impl Player::fn get_uri -->
Gets the URI of the currently-playing stream.
# Returns
a string containing the URI of the
currently-playing stream. `g_free` after usage.
<!-- impl Player::fn get_video_snapshot -->
Get a snapshot of the currently selected video stream, if any. The format can be
selected with `format` and optional configuration is possible with `config`
Currently supported settings are:
- width, height of type G_TYPE_INT
- pixel-aspect-ratio of type GST_TYPE_FRACTION
Except for GST_PLAYER_THUMBNAIL_RAW_NATIVE format, if no config is set, pixel-aspect-ratio would be 1/1
## `format`
output format of the video snapshot
## `config`
Additional configuration
# Returns
Current video snapshot sample or `None` on failure
Since 1.12
<!-- impl Player::fn get_volume -->
Returns the current volume level, as a percentage between 0 and 1.
# Returns
the volume as percentage between 0 and 1.
<!-- impl Player::fn has_color_balance -->
Checks whether the `self` has color balance support available.
# Returns
`true` if `self` has color balance support. Otherwise,
`false`.
<!-- impl Player::fn pause -->
Pauses the current stream.
<!-- impl Player::fn play -->
Request to play the loaded stream.
<!-- impl Player::fn seek -->
Seeks the currently-playing stream to the absolute `position` time
in nanoseconds.
## `position`
position to seek in nanoseconds
<!-- impl Player::fn set_audio_track -->
## `stream_index`
stream index
# Returns
`true` or `false`
Sets the audio track `stream_idex`.
<!-- impl Player::fn set_audio_track_enabled -->
Enable or disable the current audio track.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_audio_video_offset -->
Sets audio-video-offset property by value of `offset`
Since 1.10
## `offset`
`gint64` in nanoseconds
<!-- impl Player::fn set_color_balance -->
Sets the current value of the indicated channel `type_` to the passed
value.
## `type_`
`PlayerColorBalanceType`
## `value`
The new value for the `type_`, ranged [0,1]
<!-- impl Player::fn set_config -->
Set the configuration of the player. If the player is already configured, and
the configuration haven't change, this function will return `true`. If the
player is not in the GST_PLAYER_STATE_STOPPED, this method will return `false`
and active configuration will remain.
`config` is a `gst::Structure` that contains the configuration parameters for
the player.
This function takes ownership of `config`.
## `config`
a `gst::Structure`
# Returns
`true` when the configuration could be set.
Since 1.10
<!-- impl Player::fn set_multiview_flags -->
Sets the current value of the indicated mode `type_` to the passed
value.
## `flags`
The new value for the `type_`
<!-- impl Player::fn set_multiview_mode -->
Sets the current value of the indicated mode `type_` to the passed
value.
## `mode`
The new value for the `type_`
<!-- impl Player::fn set_mute -->
`true` if the currently-playing stream should be muted.
## `val`
Mute state the should be set
<!-- impl Player::fn set_rate -->
Playback at specified rate
## `rate`
playback rate
<!-- impl Player::fn set_subtitle_track -->
## `stream_index`
stream index
# Returns
`true` or `false`
Sets the subtitle strack `stream_index`.
<!-- impl Player::fn set_subtitle_track_enabled -->
Enable or disable the current subtitle track.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_subtitle_uri -->
Sets the external subtitle URI. This should be combined with a call to
gst_player_set_subtitle_track_enabled(`self`, TRUE) so the subtitles are actually
rendered.
## `uri`
subtitle URI
<!-- impl Player::fn set_uri -->
Sets the next URI to play.
## `uri`
next URI to play.
<!-- impl Player::fn set_video_track -->
## `stream_index`
stream index
# Returns
`true` or `false`
Sets the video track `stream_index`.
<!-- impl Player::fn set_video_track_enabled -->
Enable or disable the current video track.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_visualization -->
## `name`
visualization element obtained from
`Player::visualizations_get`()
# Returns
`true` if the visualizations was set correctly. Otherwise,
`false`.
<!-- impl Player::fn set_visualization_enabled -->
Enable or disable the visualization.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_volume -->
Sets the volume level of the stream as a percentage between 0 and 1.
## `val`
the new volume level, as a percentage between 0 and 1
<!-- impl Player::fn stop -->
Stops playing the current stream and resets to the first position
in the stream.
<!-- struct PlayerAudioInfo -->
`PlayerStreamInfo` specific to audio streams.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerAudioInfo::fn get_bitrate -->
# Returns
the audio bitrate in `PlayerAudioInfo`.
<!-- impl PlayerAudioInfo::fn get_channels -->
# Returns
the number of audio channels in `PlayerAudioInfo`.
<!-- impl PlayerAudioInfo::fn get_language -->
# Returns
the language of the stream, or NULL if unknown.
<!-- impl PlayerAudioInfo::fn get_max_bitrate -->
# Returns
the audio maximum bitrate in `PlayerAudioInfo`.
<!-- impl PlayerAudioInfo::fn get_sample_rate -->
# Returns
the audio sample rate in `PlayerAudioInfo`.
<!-- enum PlayerColorBalanceType -->
<!-- enum PlayerColorBalanceType::variant Hue -->
hue or color balance.
<!-- enum PlayerColorBalanceType::variant Brightness -->
brightness or black level.
<!-- enum PlayerColorBalanceType::variant Saturation -->
color saturation or chroma
gain.
<!-- enum PlayerColorBalanceType::variant Contrast -->
contrast or luma gain.
<!-- enum PlayerError -->
<!-- enum PlayerError::variant Failed -->
generic error.
<!-- struct PlayerGMainContextSignalDispatcher -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`PlayerSignalDispatcherExt`](trait.PlayerSignalDispatcherExt.html)
<!-- impl PlayerGMainContextSignalDispatcher::fn new -->
Creates a new GstPlayerSignalDispatcher that uses `application_context`,
or the thread default one if `None` is used. See `gst_player_new_full`.
## `application_context`
GMainContext to use or `None`
# Returns
the new GstPlayerSignalDispatcher
<!-- struct PlayerMediaInfo -->
Structure containing the media information of a URI.
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerMediaInfo::fn get_audio_streams -->
# Returns
A `glib::List` of
matching `PlayerAudioInfo`.
<!-- impl PlayerMediaInfo::fn get_container_format -->
# Returns
the container format.
<!-- impl PlayerMediaInfo::fn get_duration -->
# Returns
duration of the media.
<!-- impl PlayerMediaInfo::fn get_image_sample -->
Function to get the image (or preview-image) stored in taglist.
Application can use gst_sample_*`_` API's to get caps, buffer etc.
# Returns
GstSample or NULL.
<!-- impl PlayerMediaInfo::fn get_number_of_audio_streams -->
# Returns
number of audio streams.
<!-- impl PlayerMediaInfo::fn get_number_of_streams -->
# Returns
number of total streams.
<!-- impl PlayerMediaInfo::fn get_number_of_subtitle_streams -->
# Returns
number of subtitle streams.
<!-- impl PlayerMediaInfo::fn get_number_of_video_streams -->
# Returns
number of video streams.
<!-- impl PlayerMediaInfo::fn get_stream_list -->
# Returns
A `glib::List` of
matching `PlayerStreamInfo`.
<!-- impl PlayerMediaInfo::fn get_subtitle_streams -->
# Returns
A `glib::List` of
matching `PlayerSubtitleInfo`.
<!-- impl PlayerMediaInfo::fn get_tags -->
# Returns
the tags contained in media info.
<!-- impl PlayerMediaInfo::fn get_title -->
# Returns
the media title.
<!-- impl PlayerMediaInfo::fn get_uri -->
# Returns
the URI associated with `PlayerMediaInfo`.
<!-- impl PlayerMediaInfo::fn get_video_streams -->
# Returns
A `glib::List` of
matching `PlayerVideoInfo`.
<!-- impl PlayerMediaInfo::fn is_live -->
# Returns
`true` if the media is live.
<!-- impl PlayerMediaInfo::fn is_seekable -->
# Returns
`true` if the media is seekable.
<!-- struct PlayerSignalDispatcher -->
# Implements
[`PlayerSignalDispatcherExt`](trait.PlayerSignalDispatcherExt.html)
<!-- trait PlayerSignalDispatcherExt -->
Trait containing all `PlayerSignalDispatcher` methods.
# Implementors
[`PlayerGMainContextSignalDispatcher`](struct.PlayerGMainContextSignalDispatcher.html), [`PlayerSignalDispatcher`](struct.PlayerSignalDispatcher.html)
<!-- enum PlayerSnapshotFormat -->
<!-- enum PlayerState -->
<!-- enum PlayerState::variant Stopped -->
the player is stopped.
<!-- enum PlayerState::variant Buffering -->
the player is buffering.
<!-- enum PlayerState::variant Paused -->
the player is paused.
<!-- enum PlayerState::variant Playing -->
the player is currently playing a
stream.
<!-- struct PlayerStreamInfo -->
Base structure for information concering a media stream. Depending on
the stream type, one can find more media-specific information in
`PlayerVideoInfo`, `PlayerAudioInfo`, `PlayerSubtitleInfo`.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- trait PlayerStreamInfoExt -->
Trait containing all `PlayerStreamInfo` methods.
# Implementors
[`PlayerAudioInfo`](struct.PlayerAudioInfo.html), [`PlayerStreamInfo`](struct.PlayerStreamInfo.html), [`PlayerSubtitleInfo`](struct.PlayerSubtitleInfo.html), [`PlayerVideoInfo`](struct.PlayerVideoInfo.html)
<!-- trait PlayerStreamInfoExt::fn get_caps -->
# Returns
the `gst::Caps` of the stream.
<!-- trait PlayerStreamInfoExt::fn get_codec -->
A string describing codec used in `PlayerStreamInfo`.
# Returns
codec string or NULL on unknown.
<!-- trait PlayerStreamInfoExt::fn get_index -->
Function to get stream index from `PlayerStreamInfo` instance.
# Returns
the stream index of this stream.
<!-- trait PlayerStreamInfoExt::fn get_stream_type -->
Function to return human readable name for the stream type
of the given `self` (ex: "audio", "video", "subtitle")
# Returns
a human readable name
<!-- trait PlayerStreamInfoExt::fn get_tags -->
# Returns
the tags contained in this stream.
<!-- struct PlayerSubtitleInfo -->
`PlayerStreamInfo` specific to subtitle streams.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerSubtitleInfo::fn get_language -->
# Returns
the language of the stream, or NULL if unknown.
<!-- struct PlayerVideoInfo -->
`PlayerStreamInfo` specific to video streams.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerVideoInfo::fn get_bitrate -->
# Returns
the current bitrate of video in `PlayerVideoInfo`.
<!-- impl PlayerVideoInfo::fn get_framerate -->
## `fps_n`
Numerator of frame rate
## `fps_d`
Denominator of frame rate
<!-- impl PlayerVideoInfo::fn get_height -->
# Returns
the height of video in `PlayerVideoInfo`.
<!-- impl PlayerVideoInfo::fn get_max_bitrate -->
# Returns
the maximum bitrate of video in `PlayerVideoInfo`.
<!-- impl PlayerVideoInfo::fn get_pixel_aspect_ratio -->
Returns the pixel aspect ratio in `par_n` and `par_d`
## `par_n`
numerator
## `par_d`
denominator
<!-- impl PlayerVideoInfo::fn get_width -->
# Returns
the width of video in `PlayerVideoInfo`.
<!-- struct PlayerVideoOverlayVideoRenderer -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`PlayerVideoRendererExt`](trait.PlayerVideoRendererExt.html)
<!-- impl PlayerVideoOverlayVideoRenderer::fn new -->
## `window_handle`
Window handle to use or `None`
<!-- impl PlayerVideoOverlayVideoRenderer::fn new_with_sink -->
## `window_handle`
Window handle to use or `None`
## `video_sink`
the custom video_sink element to be set for the video renderer
# Returns
Since 1.12
<!-- impl PlayerVideoOverlayVideoRenderer::fn expose -->
Tell an overlay that it has been exposed. This will redraw the current frame
in the drawable even if the pipeline is PAUSED.
<!-- impl PlayerVideoOverlayVideoRenderer::fn get_render_rectangle -->
Return the currently configured render rectangle. See `PlayerVideoOverlayVideoRenderer::set_render_rectangle`
for details.
## `x`
the horizontal offset of the render area inside the window
## `y`
the vertical offset of the render area inside the window
## `width`
the width of the render area inside the window
## `height`
the height of the render area inside the window
<!-- impl PlayerVideoOverlayVideoRenderer::fn get_window_handle -->
# Returns
The currently set, platform specific window
handle
<!-- impl PlayerVideoOverlayVideoRenderer::fn set_render_rectangle -->
Configure a subregion as a video target within the window set by
`PlayerVideoOverlayVideoRenderer::set_window_handle`. If this is not
used or not supported the video will fill the area of the window set as the
overlay to 100%. By specifying the rectangle, the video can be overlaid to
a specific region of that window only. After setting the new rectangle one
should call `PlayerVideoOverlayVideoRenderer::expose` to force a
redraw. To unset the region pass -1 for the `width` and `height` parameters.
This method is needed for non fullscreen video overlay in UI toolkits that
do not support subwindows.
## `x`
the horizontal offset of the render area inside the window
## `y`
the vertical offset of the render area inside the window
## `width`
the width of the render area inside the window
## `height`
the height of the render area inside the window
<!-- impl PlayerVideoOverlayVideoRenderer::fn set_window_handle -->
Sets the platform specific window handle into which the video
should be rendered
## `window_handle`
handle referencing to the platform specific window
<!-- struct PlayerVideoRenderer -->
# Implements
[`PlayerVideoRendererExt`](trait.PlayerVideoRendererExt.html)
<!-- trait PlayerVideoRendererExt -->
Trait containing all `PlayerVideoRenderer` methods.
# Implementors
[`PlayerVideoOverlayVideoRenderer`](struct.PlayerVideoOverlayVideoRenderer.html), [`PlayerVideoRenderer`](struct.PlayerVideoRenderer.html)
<!-- struct PlayerVisualization -->
A `PlayerVisualization` descriptor.
<!-- impl PlayerVisualization::fn copy -->
Makes a copy of the `PlayerVisualization`. The result must be
freed using `PlayerVisualization::free`.
# Returns
an allocated copy of `self`.
<!-- impl PlayerVisualization::fn free -->
Frees a `PlayerVisualization`.

View file

@ -0,0 +1,747 @@
<!-- file * -->
<!-- struct Player -->
# Implements
[`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl Player::fn new -->
Creates a new `Player` instance that uses `signal_dispatcher` to dispatch
signals to some event loop system, or emits signals directly if NULL is
passed. See `PlayerGMainContextSignalDispatcher::new`.
Video is going to be rendered by `video_renderer`, or if `None` is provided
no special video set up will be done and some default handling will be
performed.
## `video_renderer`
GstPlayerVideoRenderer to use
## `signal_dispatcher`
GstPlayerSignalDispatcher to use
# Returns
a new `Player` instance
<!-- impl Player::fn config_get_position_update_interval -->
## `config`
a `Player` configuration
# Returns
current position update interval in milliseconds
Since 1.10
<!-- impl Player::fn config_get_seek_accurate -->
## `config`
a `Player` configuration
# Returns
`true` if accurate seeking is enabled
Since 1.12
<!-- impl Player::fn config_get_user_agent -->
Return the user agent which has been configured using
`Player::config_set_user_agent` if any.
## `config`
a `Player` configuration
# Returns
the configured agent, or `None`
Since 1.10
<!-- impl Player::fn config_set_position_update_interval -->
set interval in milliseconds between two position-updated signals.
pass 0 to stop updating the position.
Since 1.10
## `config`
a `Player` configuration
## `interval`
interval in ms
<!-- impl Player::fn config_set_seek_accurate -->
Enable or disable accurate seeking. When enabled, elements will try harder
to seek as accurately as possible to the requested seek position. Generally
it will be slower especially for formats that don't have any indexes or
timestamp markers in the stream.
If accurate seeking is disabled, elements will seek as close as the request
position without slowing down seeking too much.
Accurate seeking is disabled by default.
## `config`
a `Player` configuration
## `accurate`
accurate seek or not
<!-- impl Player::fn config_set_user_agent -->
Set the user agent to pass to the server if `player` needs to connect
to a server during playback. This is typically used when playing HTTP
or RTSP streams.
Since 1.10
## `config`
a `Player` configuration
## `agent`
the string to use as user agent
<!-- impl Player::fn get_audio_streams -->
## `info`
a `PlayerMediaInfo`
# Returns
A `glib::List` of
matching `PlayerAudioInfo`.
<!-- impl Player::fn get_subtitle_streams -->
## `info`
a `PlayerMediaInfo`
# Returns
A `glib::List` of
matching `PlayerSubtitleInfo`.
<!-- impl Player::fn get_video_streams -->
## `info`
a `PlayerMediaInfo`
# Returns
A `glib::List` of
matching `PlayerVideoInfo`.
<!-- impl Player::fn visualizations_free -->
Frees a `None` terminated array of `PlayerVisualization`.
## `viss`
a `None` terminated array of `PlayerVisualization` to free
<!-- impl Player::fn visualizations_get -->
# Returns
a `None` terminated array containing all available
visualizations. Use `Player::visualizations_free` after
usage.
<!-- impl Player::fn get_audio_video_offset -->
Retrieve the current value of audio-video-offset property
# Returns
The current value of audio-video-offset in nanoseconds
Since 1.10
<!-- impl Player::fn get_color_balance -->
Retrieve the current value of the indicated `type_`.
## `type_`
`PlayerColorBalanceType`
# Returns
The current value of `type_`, between [0,1]. In case of
error -1 is returned.
<!-- impl Player::fn get_config -->
Get a copy of the current configuration of the player. This configuration
can either be modified and used for the `Player::set_config` call
or it must be freed after usage.
# Returns
a copy of the current configuration of `self`. Use
`gst::Structure::free` after usage or `Player::set_config`.
Since 1.10
<!-- impl Player::fn get_current_audio_track -->
A Function to get current audio `PlayerAudioInfo` instance.
# Returns
current audio track.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_current_subtitle_track -->
A Function to get current subtitle `PlayerSubtitleInfo` instance.
# Returns
current subtitle track.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_current_video_track -->
A Function to get current video `PlayerVideoInfo` instance.
# Returns
current video track.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_current_visualization -->
# Returns
Name of the currently enabled visualization.
`g_free` after usage.
<!-- impl Player::fn get_duration -->
Retrieves the duration of the media stream that self represents.
# Returns
the duration of the currently-playing media stream, in
nanoseconds.
<!-- impl Player::fn get_media_info -->
A Function to get the current media info `PlayerMediaInfo` instance.
# Returns
media info instance.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_multiview_flags -->
Retrieve the current value of the indicated `type_`.
# Returns
The current value of `type_`, Default: 0x00000000 "none
<!-- impl Player::fn get_multiview_mode -->
Retrieve the current value of the indicated `type_`.
# Returns
The current value of `type_`, Default: -1 "none"
<!-- impl Player::fn get_mute -->
# Returns
`true` if the currently-playing stream is muted.
<!-- impl Player::fn get_pipeline -->
# Returns
The internal playbin instance
<!-- impl Player::fn get_position -->
# Returns
the absolute position time, in nanoseconds, of the
currently-playing stream.
<!-- impl Player::fn get_rate -->
# Returns
current playback rate
<!-- impl Player::fn get_subtitle_uri -->
current subtitle URI
# Returns
URI of the current external subtitle.
`g_free` after usage.
<!-- impl Player::fn get_uri -->
Gets the URI of the currently-playing stream.
# Returns
a string containing the URI of the
currently-playing stream. `g_free` after usage.
<!-- impl Player::fn get_video_snapshot -->
Get a snapshot of the currently selected video stream, if any. The format can be
selected with `format` and optional configuration is possible with `config`
Currently supported settings are:
- width, height of type G_TYPE_INT
- pixel-aspect-ratio of type GST_TYPE_FRACTION
Except for GST_PLAYER_THUMBNAIL_RAW_NATIVE format, if no config is set, pixel-aspect-ratio would be 1/1
## `format`
output format of the video snapshot
## `config`
Additional configuration
# Returns
Current video snapshot sample or `None` on failure
Since 1.12
<!-- impl Player::fn get_volume -->
Returns the current volume level, as a percentage between 0 and 1.
# Returns
the volume as percentage between 0 and 1.
<!-- impl Player::fn has_color_balance -->
Checks whether the `self` has color balance support available.
# Returns
`true` if `self` has color balance support. Otherwise,
`false`.
<!-- impl Player::fn pause -->
Pauses the current stream.
<!-- impl Player::fn play -->
Request to play the loaded stream.
<!-- impl Player::fn seek -->
Seeks the currently-playing stream to the absolute `position` time
in nanoseconds.
## `position`
position to seek in nanoseconds
<!-- impl Player::fn set_audio_track -->
## `stream_index`
stream index
# Returns
`true` or `false`
Sets the audio track `stream_idex`.
<!-- impl Player::fn set_audio_track_enabled -->
Enable or disable the current audio track.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_audio_video_offset -->
Sets audio-video-offset property by value of `offset`
Since 1.10
## `offset`
`gint64` in nanoseconds
<!-- impl Player::fn set_color_balance -->
Sets the current value of the indicated channel `type_` to the passed
value.
## `type_`
`PlayerColorBalanceType`
## `value`
The new value for the `type_`, ranged [0,1]
<!-- impl Player::fn set_config -->
Set the configuration of the player. If the player is already configured, and
the configuration haven't change, this function will return `true`. If the
player is not in the GST_PLAYER_STATE_STOPPED, this method will return `false`
and active configuration will remain.
`config` is a `gst::Structure` that contains the configuration parameters for
the player.
This function takes ownership of `config`.
## `config`
a `gst::Structure`
# Returns
`true` when the configuration could be set.
Since 1.10
<!-- impl Player::fn set_multiview_flags -->
Sets the current value of the indicated mode `type_` to the passed
value.
## `flags`
The new value for the `type_`
<!-- impl Player::fn set_multiview_mode -->
Sets the current value of the indicated mode `type_` to the passed
value.
## `mode`
The new value for the `type_`
<!-- impl Player::fn set_mute -->
`true` if the currently-playing stream should be muted.
## `val`
Mute state the should be set
<!-- impl Player::fn set_rate -->
Playback at specified rate
## `rate`
playback rate
<!-- impl Player::fn set_subtitle_track -->
## `stream_index`
stream index
# Returns
`true` or `false`
Sets the subtitle strack `stream_index`.
<!-- impl Player::fn set_subtitle_track_enabled -->
Enable or disable the current subtitle track.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_subtitle_uri -->
Sets the external subtitle URI. This should be combined with a call to
gst_player_set_subtitle_track_enabled(`self`, TRUE) so the subtitles are actually
rendered.
## `uri`
subtitle URI
<!-- impl Player::fn set_uri -->
Sets the next URI to play.
## `uri`
next URI to play.
<!-- impl Player::fn set_video_track -->
## `stream_index`
stream index
# Returns
`true` or `false`
Sets the video track `stream_index`.
<!-- impl Player::fn set_video_track_enabled -->
Enable or disable the current video track.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_visualization -->
## `name`
visualization element obtained from
`Player::visualizations_get`()
# Returns
`true` if the visualizations was set correctly. Otherwise,
`false`.
<!-- impl Player::fn set_visualization_enabled -->
Enable or disable the visualization.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_volume -->
Sets the volume level of the stream as a percentage between 0 and 1.
## `val`
the new volume level, as a percentage between 0 and 1
<!-- impl Player::fn stop -->
Stops playing the current stream and resets to the first position
in the stream.
<!-- struct PlayerAudioInfo -->
`PlayerStreamInfo` specific to audio streams.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerAudioInfo::fn get_bitrate -->
# Returns
the audio bitrate in `PlayerAudioInfo`.
<!-- impl PlayerAudioInfo::fn get_channels -->
# Returns
the number of audio channels in `PlayerAudioInfo`.
<!-- impl PlayerAudioInfo::fn get_language -->
# Returns
the language of the stream, or NULL if unknown.
<!-- impl PlayerAudioInfo::fn get_max_bitrate -->
# Returns
the audio maximum bitrate in `PlayerAudioInfo`.
<!-- impl PlayerAudioInfo::fn get_sample_rate -->
# Returns
the audio sample rate in `PlayerAudioInfo`.
<!-- enum PlayerColorBalanceType -->
<!-- enum PlayerColorBalanceType::variant Hue -->
hue or color balance.
<!-- enum PlayerColorBalanceType::variant Brightness -->
brightness or black level.
<!-- enum PlayerColorBalanceType::variant Saturation -->
color saturation or chroma
gain.
<!-- enum PlayerColorBalanceType::variant Contrast -->
contrast or luma gain.
<!-- enum PlayerError -->
<!-- enum PlayerError::variant Failed -->
generic error.
<!-- struct PlayerGMainContextSignalDispatcher -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`PlayerSignalDispatcherExt`](trait.PlayerSignalDispatcherExt.html)
<!-- impl PlayerGMainContextSignalDispatcher::fn new -->
Creates a new GstPlayerSignalDispatcher that uses `application_context`,
or the thread default one if `None` is used. See `gst_player_new_full`.
## `application_context`
GMainContext to use or `None`
# Returns
the new GstPlayerSignalDispatcher
<!-- struct PlayerMediaInfo -->
Structure containing the media information of a URI.
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerMediaInfo::fn get_audio_streams -->
# Returns
A `glib::List` of
matching `PlayerAudioInfo`.
<!-- impl PlayerMediaInfo::fn get_container_format -->
# Returns
the container format.
<!-- impl PlayerMediaInfo::fn get_duration -->
# Returns
duration of the media.
<!-- impl PlayerMediaInfo::fn get_image_sample -->
Function to get the image (or preview-image) stored in taglist.
Application can use gst_sample_*`_` API's to get caps, buffer etc.
# Returns
GstSample or NULL.
<!-- impl PlayerMediaInfo::fn get_number_of_audio_streams -->
# Returns
number of audio streams.
<!-- impl PlayerMediaInfo::fn get_number_of_streams -->
# Returns
number of total streams.
<!-- impl PlayerMediaInfo::fn get_number_of_subtitle_streams -->
# Returns
number of subtitle streams.
<!-- impl PlayerMediaInfo::fn get_number_of_video_streams -->
# Returns
number of video streams.
<!-- impl PlayerMediaInfo::fn get_stream_list -->
# Returns
A `glib::List` of
matching `PlayerStreamInfo`.
<!-- impl PlayerMediaInfo::fn get_subtitle_streams -->
# Returns
A `glib::List` of
matching `PlayerSubtitleInfo`.
<!-- impl PlayerMediaInfo::fn get_tags -->
# Returns
the tags contained in media info.
<!-- impl PlayerMediaInfo::fn get_title -->
# Returns
the media title.
<!-- impl PlayerMediaInfo::fn get_uri -->
# Returns
the URI associated with `PlayerMediaInfo`.
<!-- impl PlayerMediaInfo::fn get_video_streams -->
# Returns
A `glib::List` of
matching `PlayerVideoInfo`.
<!-- impl PlayerMediaInfo::fn is_live -->
# Returns
`true` if the media is live.
<!-- impl PlayerMediaInfo::fn is_seekable -->
# Returns
`true` if the media is seekable.
<!-- struct PlayerSignalDispatcher -->
# Implements
[`PlayerSignalDispatcherExt`](trait.PlayerSignalDispatcherExt.html)
<!-- trait PlayerSignalDispatcherExt -->
Trait containing all `PlayerSignalDispatcher` methods.
# Implementors
[`PlayerGMainContextSignalDispatcher`](struct.PlayerGMainContextSignalDispatcher.html), [`PlayerSignalDispatcher`](struct.PlayerSignalDispatcher.html)
<!-- enum PlayerSnapshotFormat -->
<!-- enum PlayerState -->
<!-- enum PlayerState::variant Stopped -->
the player is stopped.
<!-- enum PlayerState::variant Buffering -->
the player is buffering.
<!-- enum PlayerState::variant Paused -->
the player is paused.
<!-- enum PlayerState::variant Playing -->
the player is currently playing a
stream.
<!-- struct PlayerStreamInfo -->
Base structure for information concering a media stream. Depending on
the stream type, one can find more media-specific information in
`PlayerVideoInfo`, `PlayerAudioInfo`, `PlayerSubtitleInfo`.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- trait PlayerStreamInfoExt -->
Trait containing all `PlayerStreamInfo` methods.
# Implementors
[`PlayerAudioInfo`](struct.PlayerAudioInfo.html), [`PlayerStreamInfo`](struct.PlayerStreamInfo.html), [`PlayerSubtitleInfo`](struct.PlayerSubtitleInfo.html), [`PlayerVideoInfo`](struct.PlayerVideoInfo.html)
<!-- trait PlayerStreamInfoExt::fn get_caps -->
# Returns
the `gst::Caps` of the stream.
<!-- trait PlayerStreamInfoExt::fn get_codec -->
A string describing codec used in `PlayerStreamInfo`.
# Returns
codec string or NULL on unknown.
<!-- trait PlayerStreamInfoExt::fn get_index -->
Function to get stream index from `PlayerStreamInfo` instance.
# Returns
the stream index of this stream.
<!-- trait PlayerStreamInfoExt::fn get_stream_type -->
Function to return human readable name for the stream type
of the given `self` (ex: "audio", "video", "subtitle")
# Returns
a human readable name
<!-- trait PlayerStreamInfoExt::fn get_tags -->
# Returns
the tags contained in this stream.
<!-- struct PlayerSubtitleInfo -->
`PlayerStreamInfo` specific to subtitle streams.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerSubtitleInfo::fn get_language -->
# Returns
the language of the stream, or NULL if unknown.
<!-- struct PlayerVideoInfo -->
`PlayerStreamInfo` specific to video streams.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerVideoInfo::fn get_bitrate -->
# Returns
the current bitrate of video in `PlayerVideoInfo`.
<!-- impl PlayerVideoInfo::fn get_framerate -->
## `fps_n`
Numerator of frame rate
## `fps_d`
Denominator of frame rate
<!-- impl PlayerVideoInfo::fn get_height -->
# Returns
the height of video in `PlayerVideoInfo`.
<!-- impl PlayerVideoInfo::fn get_max_bitrate -->
# Returns
the maximum bitrate of video in `PlayerVideoInfo`.
<!-- impl PlayerVideoInfo::fn get_pixel_aspect_ratio -->
Returns the pixel aspect ratio in `par_n` and `par_d`
## `par_n`
numerator
## `par_d`
denominator
<!-- impl PlayerVideoInfo::fn get_width -->
# Returns
the width of video in `PlayerVideoInfo`.
<!-- struct PlayerVideoOverlayVideoRenderer -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`PlayerVideoRendererExt`](trait.PlayerVideoRendererExt.html)
<!-- impl PlayerVideoOverlayVideoRenderer::fn new -->
## `window_handle`
Window handle to use or `None`
<!-- impl PlayerVideoOverlayVideoRenderer::fn new_with_sink -->
## `window_handle`
Window handle to use or `None`
## `video_sink`
the custom video_sink element to be set for the video renderer
# Returns
Since 1.12
<!-- impl PlayerVideoOverlayVideoRenderer::fn expose -->
Tell an overlay that it has been exposed. This will redraw the current frame
in the drawable even if the pipeline is PAUSED.
<!-- impl PlayerVideoOverlayVideoRenderer::fn get_render_rectangle -->
Return the currently configured render rectangle. See `PlayerVideoOverlayVideoRenderer::set_render_rectangle`
for details.
## `x`
the horizontal offset of the render area inside the window
## `y`
the vertical offset of the render area inside the window
## `width`
the width of the render area inside the window
## `height`
the height of the render area inside the window
<!-- impl PlayerVideoOverlayVideoRenderer::fn get_window_handle -->
# Returns
The currently set, platform specific window
handle
<!-- impl PlayerVideoOverlayVideoRenderer::fn set_render_rectangle -->
Configure a subregion as a video target within the window set by
`PlayerVideoOverlayVideoRenderer::set_window_handle`. If this is not
used or not supported the video will fill the area of the window set as the
overlay to 100%. By specifying the rectangle, the video can be overlaid to
a specific region of that window only. After setting the new rectangle one
should call `PlayerVideoOverlayVideoRenderer::expose` to force a
redraw. To unset the region pass -1 for the `width` and `height` parameters.
This method is needed for non fullscreen video overlay in UI toolkits that
do not support subwindows.
## `x`
the horizontal offset of the render area inside the window
## `y`
the vertical offset of the render area inside the window
## `width`
the width of the render area inside the window
## `height`
the height of the render area inside the window
<!-- impl PlayerVideoOverlayVideoRenderer::fn set_window_handle -->
Sets the platform specific window handle into which the video
should be rendered
## `window_handle`
handle referencing to the platform specific window
<!-- struct PlayerVideoRenderer -->
# Implements
[`PlayerVideoRendererExt`](trait.PlayerVideoRendererExt.html)
<!-- trait PlayerVideoRendererExt -->
Trait containing all `PlayerVideoRenderer` methods.
# Implementors
[`PlayerVideoOverlayVideoRenderer`](struct.PlayerVideoOverlayVideoRenderer.html), [`PlayerVideoRenderer`](struct.PlayerVideoRenderer.html)
<!-- struct PlayerVisualization -->
A `PlayerVisualization` descriptor.
<!-- impl PlayerVisualization::fn copy -->
Makes a copy of the `PlayerVisualization`. The result must be
freed using `PlayerVisualization::free`.
# Returns
an allocated copy of `self`.
<!-- impl PlayerVisualization::fn free -->
Frees a `PlayerVisualization`.

File diff suppressed because it is too large Load diff

172
docs/gstreamer-rtsp/docs.md Normal file
View file

@ -0,0 +1,172 @@
<!-- file * -->
<!-- enum RTSPAuthMethod -->
Authentication methods, ordered by strength
<!-- enum RTSPAuthMethod::variant None -->
no authentication
<!-- enum RTSPAuthMethod::variant Basic -->
basic authentication
<!-- enum RTSPAuthMethod::variant Digest -->
digest authentication
<!-- struct RTSPAuthParam -->
<!-- enum RTSPFamily -->
The possible network families.
<!-- enum RTSPFamily::variant None -->
unknown network family
<!-- enum RTSPFamily::variant Inet -->
internet
<!-- enum RTSPFamily::variant Inet6 -->
internet V6
<!-- enum RTSPHeaderField -->
Enumeration of rtsp header fields
<!-- enum RTSPMsgType -->
The type of a message.
<!-- enum RTSPMsgType::variant Invalid -->
invalid message type
<!-- enum RTSPMsgType::variant Request -->
RTSP request message
<!-- enum RTSPMsgType::variant Response -->
RTSP response message
<!-- enum RTSPMsgType::variant HttpRequest -->
HTTP request message.
<!-- enum RTSPMsgType::variant HttpResponse -->
HTTP response message.
<!-- enum RTSPMsgType::variant Data -->
data message
<!-- enum RTSPRangeUnit -->
Different possible time range units.
<!-- enum RTSPRangeUnit::variant Smpte -->
SMPTE timecode
<!-- enum RTSPRangeUnit::variant Smpte30Drop -->
29.97 frames per second
<!-- enum RTSPRangeUnit::variant Smpte25 -->
25 frames per second
<!-- enum RTSPRangeUnit::variant Npt -->
Normal play time
<!-- enum RTSPRangeUnit::variant Clock -->
Absolute time expressed as ISO 8601 timestamps
<!-- enum RTSPResult -->
Result codes from the RTSP functions.
<!-- enum RTSPResult::variant Ok -->
no error
<!-- enum RTSPResult::variant Error -->
some unspecified error occured
<!-- enum RTSPResult::variant Einval -->
invalid arguments were provided to a function
<!-- enum RTSPResult::variant Eintr -->
an operation was canceled
<!-- enum RTSPResult::variant Enomem -->
no memory was available for the operation
<!-- enum RTSPResult::variant Eresolv -->
a host resolve error occured
<!-- enum RTSPResult::variant Enotimpl -->
function not implemented
<!-- enum RTSPResult::variant Esys -->
a system error occured, errno contains more details
<!-- enum RTSPResult::variant Eparse -->
a parsing error occured
<!-- enum RTSPResult::variant Ewsastart -->
windows networking could not start
<!-- enum RTSPResult::variant Ewsaversion -->
windows networking stack has wrong version
<!-- enum RTSPResult::variant Eeof -->
end-of-file was reached
<!-- enum RTSPResult::variant Enet -->
a network problem occured, h_errno contains more details
<!-- enum RTSPResult::variant Enotip -->
the host is not an IP host
<!-- enum RTSPResult::variant Etimeout -->
a timeout occured
<!-- enum RTSPResult::variant Etget -->
the tunnel GET request has been performed
<!-- enum RTSPResult::variant Etpost -->
the tunnel POST request has been performed
<!-- enum RTSPResult::variant Elast -->
last error
<!-- enum RTSPState -->
The different RTSP states.
<!-- enum RTSPState::variant Invalid -->
invalid state
<!-- enum RTSPState::variant Init -->
initializing
<!-- enum RTSPState::variant Ready -->
ready for operation
<!-- enum RTSPState::variant Seeking -->
seeking in progress
<!-- enum RTSPState::variant Playing -->
playing
<!-- enum RTSPState::variant Recording -->
recording
<!-- enum RTSPStatusCode -->
Enumeration of rtsp status codes
<!-- enum RTSPTimeType -->
Possible time types.
<!-- enum RTSPTimeType::variant Seconds -->
seconds
<!-- enum RTSPTimeType::variant Now -->
now
<!-- enum RTSPTimeType::variant End -->
end
<!-- enum RTSPTimeType::variant Frames -->
frames and subframes
<!-- enum RTSPTimeType::variant Utc -->
UTC time
<!-- struct RTSPUrl -->
Provides helper functions to handle RTSP urls.
<!-- impl RTSPUrl::fn copy -->
Make a copy of `self`.
# Returns
a copy of `self`. Free with gst_rtsp_url_free () after usage.
<!-- impl RTSPUrl::fn decode_path_components -->
Splits the path of `self` on '/' boundaries, decoding the resulting components,
The decoding performed by this routine is "URI decoding", as defined in RFC
3986, commonly known as percent-decoding. For example, a string "foo\%2fbar"
will decode to "foo/bar" -- the \%2f being replaced by the corresponding byte
with hex value 0x2f. Note that there is no guarantee that the resulting byte
sequence is valid in any given encoding. As a special case, \%00 is not
unescaped to NUL, as that would prematurely terminate the string.
Also note that since paths usually start with a slash, the first component
will usually be the empty string.
# Returns
`None`-terminated array of URL components. Free with
`g_strfreev` when no longer needed.
<!-- impl RTSPUrl::fn free -->
Free the memory used by `self`.
<!-- impl RTSPUrl::fn get_port -->
Get the port number of `self`.
## `port`
location to hold the port
# Returns
`RTSPResult::Ok`.
<!-- impl RTSPUrl::fn get_request_uri -->
Get a newly allocated string describing the request URI for `self`.
# Returns
a string with the request URI. `g_free` after usage.
<!-- impl RTSPUrl::fn set_port -->
Set the port number in `self` to `port`.
## `port`
the port
# Returns
`RTSPResult::Ok`.
<!-- impl RTSPUrl::fn parse -->
Parse the RTSP `urlstr` into a newly allocated `RTSPUrl`. Free after usage
with `RTSPUrl::free`.
## `urlstr`
the url string to parse
## `url`
location to hold the result.
# Returns
a `RTSPResult`.

565
docs/gstreamer-sdp/docs.md Normal file
View file

@ -0,0 +1,565 @@
<!-- file * -->
<!-- enum MIKEYCacheType -->
The different cache types
<!-- enum MIKEYCacheType::variant None -->
The envelope key MUST NOT be cached
<!-- enum MIKEYCacheType::variant Always -->
The envelope key MUST be cached
<!-- enum MIKEYCacheType::variant ForCsb -->
The envelope key MUST be cached, but only
to be used for the specific CSB.
<!-- enum MIKEYEncAlg -->
The encryption algorithm used to encrypt the Encr data field
<!-- enum MIKEYEncAlg::variant Null -->
no encryption
<!-- enum MIKEYEncAlg::variant AesCm128 -->
AES-CM using a 128-bit key
<!-- enum MIKEYEncAlg::variant AesKw128 -->
AES Key Wrap using a 128-bit key
<!-- enum MIKEYKVType -->
The key validity type
<!-- enum MIKEYKVType::variant Null -->
No specific usage rule
<!-- enum MIKEYKVType::variant Spi -->
The key is associated with the SPI/MKI
<!-- enum MIKEYKVType::variant Interval -->
The key has a start and expiration time
<!-- enum MIKEYKeyDataType -->
The type of key.
<!-- enum MIKEYKeyDataType::variant Tgk -->
a TEK Generation Key
<!-- enum MIKEYKeyDataType::variant Tek -->
Traffic-Encrypting Key
<!-- enum MIKEYMacAlg -->
Specifies the authentication algorithm used
<!-- enum MIKEYMacAlg::variant Null -->
no authentication
<!-- enum MIKEYMacAlg::variant HmacSha1160 -->
HMAC-SHA-1-160
<!-- enum MIKEYMapType -->
Specifies the method of uniquely mapping Crypto Sessions to the security
protocol sessions.
<!-- struct MIKEYMessage -->
Structure holding the information of the MIKEY message
<!-- impl MIKEYMessage::fn new -->
Make a new MIKEY message.
# Returns
a new `MIKEYMessage` on success
<!-- impl MIKEYMessage::fn new_from_bytes -->
Make a new `MIKEYMessage` from `bytes`.
## `bytes`
a `glib::Bytes`
## `info`
a `MIKEYDecryptInfo`
# Returns
a new `MIKEYMessage`
<!-- impl MIKEYMessage::fn new_from_caps -->
Makes mikey message including:
- Security Policy Payload
- Key Data Transport Payload
- Key Data Sub-Payload
## `caps`
a `gst::Caps`, including SRTP parameters (srtp/srtcp cipher, authorization, key data)
# Returns
a `MIKEYMessage`,
or `None` if there is no srtp information in the caps.
<!-- impl MIKEYMessage::fn new_from_data -->
Parse `size` bytes from `data` into a `MIKEYMessage`. `info` contains the
parameters to decrypt and verify the data.
## `data`
bytes to read
## `size`
length of `data`
## `info`
`MIKEYDecryptInfo`
# Returns
a `MIKEYMessage` on success or `None` when parsing failed and
`error` will be set.
<!-- impl MIKEYMessage::fn add_cs_srtp -->
Add a Crypto policy for SRTP to `self`.
## `policy`
The security policy applied for the stream with `ssrc`
## `ssrc`
the SSRC that must be used for the stream
## `roc`
current rollover counter
# Returns
`true` on success
<!-- impl MIKEYMessage::fn add_payload -->
Add a new payload to `self`.
## `payload`
a `MIKEYPayload`
# Returns
`true` on success
<!-- impl MIKEYMessage::fn add_pke -->
Add a new PKE payload to `self` with the given parameters.
## `C`
envelope key cache indicator
## `data_len`
the length of `data`
## `data`
the encrypted envelope key
# Returns
`true` on success
<!-- impl MIKEYMessage::fn add_rand -->
Add a new RAND payload to `self` with the given parameters.
## `len`
the length of `rand`
## `rand`
random data
# Returns
`true` on success
<!-- impl MIKEYMessage::fn add_rand_len -->
Add a new RAND payload to `self` with `len` random bytes.
## `len`
length
# Returns
`true` on success
<!-- impl MIKEYMessage::fn add_t -->
Add a new T payload to `self` with the given parameters.
## `type_`
specifies the timestamp type used
## `ts_value`
The timestamp value of the specified `type_`
# Returns
`true` on success
<!-- impl MIKEYMessage::fn add_t_now_ntp_utc -->
Add a new T payload to `self` that contains the current time
in NTP-UTC format.
# Returns
`true` on success
<!-- impl MIKEYMessage::fn base64_encode -->
# Returns
a `gchar`, base64-encoded data
<!-- impl MIKEYMessage::fn find_payload -->
Find the `nth` occurence of the payload with `type_` in `self`.
## `type_`
a `MIKEYPayloadType`
## `nth`
payload to find
# Returns
the `nth` `MIKEYPayload` of `type_`.
<!-- impl MIKEYMessage::fn get_cs_srtp -->
Get the policy information of `self` at `idx`.
## `idx`
an index
# Returns
a `MIKEYMapSRTP`
<!-- impl MIKEYMessage::fn get_n_cs -->
Get the number of crypto sessions in `self`.
# Returns
the number of crypto sessions
<!-- impl MIKEYMessage::fn get_n_payloads -->
Get the number of payloads in `self`.
# Returns
the number of payloads in `self`
<!-- impl MIKEYMessage::fn get_payload -->
Get the `MIKEYPayload` at `idx` in `self`
## `idx`
an index
# Returns
the `MIKEYPayload` at `idx`. The payload
remains valid for as long as it is part of `self`.
<!-- impl MIKEYMessage::fn insert_cs_srtp -->
Insert a Crypto Session map for SRTP in `self` at `idx`
When `idx` is -1, the policy will be appended.
## `idx`
the index to insert at
## `map`
the map info
# Returns
`true` on success
<!-- impl MIKEYMessage::fn insert_payload -->
Insert the `payload` at index `idx` in `self`. If `idx` is -1, the payload
will be appended to `self`.
## `idx`
an index
## `payload`
a `MIKEYPayload`
# Returns
`true` on success
<!-- impl MIKEYMessage::fn remove_cs_srtp -->
Remove the SRTP policy at `idx`.
## `idx`
the index to remove
# Returns
`true` on success
<!-- impl MIKEYMessage::fn remove_payload -->
Remove the payload in `self` at `idx`
## `idx`
an index
# Returns
`true` on success
<!-- impl MIKEYMessage::fn replace_cs_srtp -->
Replace a Crypto Session map for SRTP in `self` at `idx` with `map`.
## `idx`
the index to insert at
## `map`
the map info
# Returns
`true` on success
<!-- impl MIKEYMessage::fn replace_payload -->
Replace the payload at `idx` in `self` with `payload`.
## `idx`
an index
## `payload`
a `MIKEYPayload`
# Returns
`true` on success
<!-- impl MIKEYMessage::fn set_info -->
Set the information in `self`.
## `version`
a version
## `type_`
a `MIKEYType`
## `V`
verify flag
## `prf_func`
the `MIKEYPRFFunc` function to use
## `CSB_id`
the Crypto Session Bundle id
## `map_type`
the `GstMIKEYCSIDMapType`
# Returns
`true` on success
<!-- impl MIKEYMessage::fn to_bytes -->
Convert `self` to a `glib::Bytes`.
## `info`
a `MIKEYEncryptInfo`
# Returns
a new `glib::Bytes` for `self`.
<!-- impl MIKEYMessage::fn to_caps -->
Feature: `v1_8_1`
## `caps`
a `gst::Caps` to be filled with SRTP parameters (srtp/srtcp cipher, authorization, key data)
# Returns
`true` on success
<!-- enum MIKEYPRFFunc -->
The PRF function that has been/will be used for key derivation
<!-- enum MIKEYPRFFunc::variant MikeyPrfMikey1 -->
MIKEY-1 PRF function
<!-- struct MIKEYPayload -->
Hold the common fields for all payloads
<!-- impl MIKEYPayload::fn new -->
Make a new `MIKEYPayload` with `type_`.
## `type_`
a `MIKEYPayloadType`
# Returns
a new `MIKEYPayload` or `None` on failure.
<!-- impl MIKEYPayload::fn kemac_add_sub -->
Add a new sub payload to `self`.
## `newpay`
a `MIKEYPayload` to add
# Returns
`true` on success.
<!-- impl MIKEYPayload::fn kemac_get_n_sub -->
Get the number of sub payloads of `self`. `self` should be of type
`MIKEYPayloadType::Kemac`.
# Returns
the number of sub payloads in `self`
<!-- impl MIKEYPayload::fn kemac_get_sub -->
Get the sub payload of `self` at `idx`. `self` should be of type
`MIKEYPayloadType::Kemac`.
## `idx`
an index
# Returns
the `MIKEYPayload` at `idx`.
<!-- impl MIKEYPayload::fn kemac_remove_sub -->
Remove the sub payload at `idx` in `self`.
## `idx`
the index to remove
# Returns
`true` on success.
<!-- impl MIKEYPayload::fn kemac_set -->
Set the KEMAC parameters. `self` should point to a `MIKEYPayloadType::Kemac`
payload.
## `enc_alg`
the `MIKEYEncAlg`
## `mac_alg`
a `MIKEYMacAlg`
# Returns
`true` on success
<!-- impl MIKEYPayload::fn key_data_set_interval -->
Set the key validity period in the `MIKEYPayloadType::KeyData` `self`.
## `vf_len`
the length of `vf_data`
## `vf_data`
the Valid From data
## `vt_len`
the length of `vt_data`
## `vt_data`
the Valid To data
# Returns
`true` on success
<!-- impl MIKEYPayload::fn key_data_set_key -->
Set `key_len` bytes of `key_data` of type `key_type` as the key for the
`MIKEYPayloadType::KeyData` `self`.
## `key_type`
a `MIKEYKeyDataType`
## `key_len`
the length of `key_data`
## `key_data`
the key of type `key_type`
# Returns
`true` on success
<!-- impl MIKEYPayload::fn key_data_set_salt -->
Set the salt key data. If `salt_len` is 0 and `salt_data` is `None`, the
salt data will be removed.
## `salt_len`
the length of `salt_data`
## `salt_data`
the salt
# Returns
`true` on success
<!-- impl MIKEYPayload::fn key_data_set_spi -->
Set the SPI/MKI validity in the `MIKEYPayloadType::KeyData` `self`.
## `spi_len`
the length of `spi_data`
## `spi_data`
the SPI/MKI data
# Returns
`true` on success
<!-- impl MIKEYPayload::fn pke_set -->
Set the PKE values in `self`. `self` must be of type
`MIKEYPayloadType::Pke`.
## `C`
envelope key cache indicator
## `data_len`
the length of `data`
## `data`
the encrypted envelope key
# Returns
`true` on success
<!-- impl MIKEYPayload::fn rand_set -->
Set the random values in a `MIKEYPayloadType::Rand` `self`.
## `len`
the length of `rand`
## `rand`
random values
# Returns
`true` on success
<!-- impl MIKEYPayload::fn sp_add_param -->
Add a new parameter to the `MIKEYPayloadType::Sp` `self` with `type_`, `len`
and `val`.
## `type_`
a type
## `len`
a length
## `val`
`len` bytes of data
# Returns
`true` on success
<!-- impl MIKEYPayload::fn sp_get_n_params -->
Get the number of security policy parameters in a `MIKEYPayloadType::Sp`
`self`.
# Returns
the number of parameters in `self`
<!-- impl MIKEYPayload::fn sp_get_param -->
Get the Security Policy parameter in a `MIKEYPayloadType::Sp` `self`
at `idx`.
## `idx`
an index
# Returns
the `MIKEYPayloadSPParam` at `idx` in `self`
<!-- impl MIKEYPayload::fn sp_remove_param -->
Remove the Security Policy parameters from a `MIKEYPayloadType::Sp`
`self` at `idx`.
## `idx`
an index
# Returns
`true` on success
<!-- impl MIKEYPayload::fn sp_set -->
Set the Security Policy parameters for `self`.
## `policy`
the policy number
## `proto`
a `MIKEYSecProto`
# Returns
`true` on success
<!-- impl MIKEYPayload::fn t_set -->
Set the timestamp in a `MIKEYPayloadType::T` `self`.
## `type_`
the `MIKEYTSType`
## `ts_value`
the timestamp value
# Returns
`true` on success
<!-- enum MIKEYPayloadType -->
Different MIKEY Payload types.
<!-- enum MIKEYPayloadType::variant Last -->
Last payload
<!-- enum MIKEYPayloadType::variant Kemac -->
Key data transport payload
<!-- enum MIKEYPayloadType::variant Pke -->
Envelope data payload
<!-- enum MIKEYPayloadType::variant Dh -->
DH data payload
<!-- enum MIKEYPayloadType::variant Sign -->
Signature payload
<!-- enum MIKEYPayloadType::variant T -->
Timestamp payload
<!-- enum MIKEYPayloadType::variant Id -->
ID payload
<!-- enum MIKEYPayloadType::variant Cert -->
Certificate Payload
<!-- enum MIKEYPayloadType::variant Chash -->
Cert hash payload
<!-- enum MIKEYPayloadType::variant V -->
Verfication message payload
<!-- enum MIKEYPayloadType::variant Sp -->
Security Policy payload
<!-- enum MIKEYPayloadType::variant Rand -->
RAND payload
<!-- enum MIKEYPayloadType::variant Err -->
Error payload
<!-- enum MIKEYPayloadType::variant KeyData -->
Key data sub-payload
<!-- enum MIKEYPayloadType::variant GenExt -->
General Extension Payload
<!-- enum MIKEYSecProto -->
Specifies the security protocol
<!-- enum MIKEYSecSRTP -->
This policy specifies the parameters for SRTP and SRTCP
<!-- enum MIKEYSecSRTP::variant EncAlg -->
Encryption algorithm
<!-- enum MIKEYSecSRTP::variant EncKeyLen -->
Session Encr. key length
<!-- enum MIKEYSecSRTP::variant AuthAlg -->
Authentication algorithm
<!-- enum MIKEYSecSRTP::variant AuthKeyLen -->
Session Auth. key length
<!-- enum MIKEYSecSRTP::variant SaltKeyLen -->
Session Salt key length
<!-- enum MIKEYSecSRTP::variant Prf -->
SRTP Pseudo Random Function
<!-- enum MIKEYSecSRTP::variant KeyDerivRate -->
Key derivation rate
<!-- enum MIKEYSecSRTP::variant SrtpEnc -->
SRTP encryption off/on, 0 if off, 1 if on
<!-- enum MIKEYSecSRTP::variant SrtcpEnc -->
SRTCP encryption off/on, 0 if off, 1 if on
<!-- enum MIKEYSecSRTP::variant FecOrder -->
sender's FEC order
<!-- enum MIKEYSecSRTP::variant SrtpAuth -->
SRTP authentication off/on, 0 if off, 1 if on
<!-- enum MIKEYSecSRTP::variant AuthTagLen -->
Authentication tag length
<!-- enum MIKEYSecSRTP::variant SrtpPrefixLen -->
SRTP prefix length
<!-- enum MIKEYTSType -->
Specifies the timestamp type.
<!-- enum MIKEYTSType::variant NtpUtc -->
an NTP time in UTC timezone
<!-- enum MIKEYTSType::variant Ntp -->
an NTP time
<!-- enum MIKEYTSType::variant Counter -->
a counter
<!-- enum MIKEYType -->
Different MIKEY data types.
<!-- enum MIKEYType::variant Invalid -->
Invalid type
<!-- enum MIKEYType::variant PskInit -->
Initiator's pre-shared key message
<!-- enum MIKEYType::variant PskVerify -->
Verification message of a Pre-shared key message
<!-- enum MIKEYType::variant PkInit -->
Initiator's public-key transport message
<!-- enum MIKEYType::variant PkVerify -->
Verification message of a public-key message
<!-- enum MIKEYType::variant DhInit -->
Initiator's DH exchange message
<!-- enum MIKEYType::variant DhResp -->
Responder's DH exchange message
<!-- enum MIKEYType::variant Error -->
Error message

View file

@ -0,0 +1,979 @@
<!-- file * -->
<!-- enum VideoColorMatrix -->
The color matrix is used to convert between Y'PbPr and
non-linear RGB (R'G'B')
<!-- enum VideoColorMatrix::variant Unknown -->
unknown matrix
<!-- enum VideoColorMatrix::variant Rgb -->
identity matrix
<!-- enum VideoColorMatrix::variant Fcc -->
FCC color matrix
<!-- enum VideoColorMatrix::variant Bt709 -->
ITU-R BT.709 color matrix
<!-- enum VideoColorMatrix::variant Bt601 -->
ITU-R BT.601 color matrix
<!-- enum VideoColorMatrix::variant Smpte240m -->
SMPTE 240M color matrix
<!-- enum VideoColorMatrix::variant Bt2020 -->
ITU-R BT.2020 color matrix. Since: 1.6
<!-- enum VideoColorPrimaries -->
The color primaries define the how to transform linear RGB values to and from
the CIE XYZ colorspace.
<!-- enum VideoColorPrimaries::variant Unknown -->
unknown color primaries
<!-- enum VideoColorPrimaries::variant Bt709 -->
BT709 primaries
<!-- enum VideoColorPrimaries::variant Bt470m -->
BT470M primaries
<!-- enum VideoColorPrimaries::variant Bt470bg -->
BT470BG primaries
<!-- enum VideoColorPrimaries::variant Smpte170m -->
SMPTE170M primaries
<!-- enum VideoColorPrimaries::variant Smpte240m -->
SMPTE240M primaries
<!-- enum VideoColorPrimaries::variant Film -->
Generic film
<!-- enum VideoColorPrimaries::variant Bt2020 -->
BT2020 primaries. Since: 1.6
<!-- enum VideoColorPrimaries::variant Adobergb -->
Adobe RGB primaries. Since: 1.8
<!-- enum VideoColorRange -->
Possible color range values. These constants are defined for 8 bit color
values and can be scaled for other bit depths.
<!-- enum VideoColorRange::variant Unknown -->
unknown range
<!-- enum VideoColorRange::variant 0255 -->
[0..255] for 8 bit components
<!-- enum VideoColorRange::variant 16235 -->
[16..235] for 8 bit components. Chroma has
[16..240] range.
<!-- struct VideoColorimetry -->
Structure describing the color info.
<!-- impl VideoColorimetry::fn from_string -->
Parse the colorimetry string and update `self` with the parsed
values.
## `color`
a colorimetry string
# Returns
`true` if `color` points to valid colorimetry info.
<!-- impl VideoColorimetry::fn is_equal -->
Compare the 2 colorimetry sets for equality
## `other`
another `VideoColorimetry`
# Returns
`true` if `self` and `other` are equal.
<!-- impl VideoColorimetry::fn matches -->
Check if the colorimetry information in `info` matches that of the
string `color`.
## `color`
a colorimetry string
# Returns
`true` if `color` conveys the same colorimetry info as the color
information in `info`.
<!-- impl VideoColorimetry::fn to_string -->
Make a string representation of `self`.
# Returns
a string representation of `self`.
<!-- enum VideoFieldOrder -->
Field order of interlaced content. This is only valid for
interlace-mode=interleaved and not interlace-mode=mixed. In the case of
mixed or GST_VIDEO_FIELD_ORDER_UNKOWN, the field order is signalled via
buffer flags.
<!-- enum VideoFieldOrder::variant Unknown -->
unknown field order for interlaced content.
The actual field order is signalled via buffer flags.
<!-- enum VideoFieldOrder::variant TopFieldFirst -->
top field is first
<!-- enum VideoFieldOrder::variant BottomFieldFirst -->
bottom field is first
Feature: `v1_12`
<!-- struct VideoFilter -->
Provides useful functions and a base class for video filters.
The videofilter will by default enable QoS on the parent GstBaseTransform
to implement frame dropping.
# Implements
[`gst_base::BaseTransformExt`](../gst_base/trait.BaseTransformExt.html), [`gst::ElementExt`](../gst/trait.ElementExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- enum VideoFormat -->
Enum value describing the most common video formats.
<!-- enum VideoFormat::variant Unknown -->
Unknown or unset video format id
<!-- enum VideoFormat::variant Encoded -->
Encoded video format. Only ever use that in caps for
special video formats in combination with non-system
memory GstCapsFeatures where it does not make sense
to specify a real video format.
<!-- enum VideoFormat::variant I420 -->
planar 4:2:0 YUV
<!-- enum VideoFormat::variant Yv12 -->
planar 4:2:0 YVU (like I420 but UV planes swapped)
<!-- enum VideoFormat::variant Yuy2 -->
packed 4:2:2 YUV (Y0-U0-Y1-V0 Y2-U2-Y3-V2 Y4 ...)
<!-- enum VideoFormat::variant Uyvy -->
packed 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
<!-- enum VideoFormat::variant Ayuv -->
packed 4:4:4 YUV with alpha channel (A0-Y0-U0-V0 ...)
<!-- enum VideoFormat::variant Rgbx -->
sparse rgb packed into 32 bit, space last
<!-- enum VideoFormat::variant Bgrx -->
sparse reverse rgb packed into 32 bit, space last
<!-- enum VideoFormat::variant Xrgb -->
sparse rgb packed into 32 bit, space first
<!-- enum VideoFormat::variant Xbgr -->
sparse reverse rgb packed into 32 bit, space first
<!-- enum VideoFormat::variant Rgba -->
rgb with alpha channel last
<!-- enum VideoFormat::variant Bgra -->
reverse rgb with alpha channel last
<!-- enum VideoFormat::variant Argb -->
rgb with alpha channel first
<!-- enum VideoFormat::variant Abgr -->
reverse rgb with alpha channel first
<!-- enum VideoFormat::variant Rgb -->
rgb
<!-- enum VideoFormat::variant Bgr -->
reverse rgb
<!-- enum VideoFormat::variant Y41b -->
planar 4:1:1 YUV
<!-- enum VideoFormat::variant Y42b -->
planar 4:2:2 YUV
<!-- enum VideoFormat::variant Yvyu -->
packed 4:2:2 YUV (Y0-V0-Y1-U0 Y2-V2-Y3-U2 Y4 ...)
<!-- enum VideoFormat::variant Y444 -->
planar 4:4:4 YUV
<!-- enum VideoFormat::variant V210 -->
packed 4:2:2 10-bit YUV, complex format
<!-- enum VideoFormat::variant V216 -->
packed 4:2:2 16-bit YUV, Y0-U0-Y1-V1 order
<!-- enum VideoFormat::variant Nv12 -->
planar 4:2:0 YUV with interleaved UV plane
<!-- enum VideoFormat::variant Nv21 -->
planar 4:2:0 YUV with interleaved VU plane
<!-- enum VideoFormat::variant Gray8 -->
8-bit grayscale
<!-- enum VideoFormat::variant Gray16Be -->
16-bit grayscale, most significant byte first
<!-- enum VideoFormat::variant Gray16Le -->
16-bit grayscale, least significant byte first
<!-- enum VideoFormat::variant V308 -->
packed 4:4:4 YUV (Y-U-V ...)
<!-- enum VideoFormat::variant Rgb16 -->
rgb 5-6-5 bits per component
<!-- enum VideoFormat::variant Bgr16 -->
reverse rgb 5-6-5 bits per component
<!-- enum VideoFormat::variant Rgb15 -->
rgb 5-5-5 bits per component
<!-- enum VideoFormat::variant Bgr15 -->
reverse rgb 5-5-5 bits per component
<!-- enum VideoFormat::variant Uyvp -->
packed 10-bit 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
<!-- enum VideoFormat::variant A420 -->
planar 4:4:2:0 AYUV
<!-- enum VideoFormat::variant Rgb8p -->
8-bit paletted RGB
<!-- enum VideoFormat::variant Yuv9 -->
planar 4:1:0 YUV
<!-- enum VideoFormat::variant Yvu9 -->
planar 4:1:0 YUV (like YUV9 but UV planes swapped)
<!-- enum VideoFormat::variant Iyu1 -->
packed 4:1:1 YUV (Cb-Y0-Y1-Cr-Y2-Y3 ...)
<!-- enum VideoFormat::variant Argb64 -->
rgb with alpha channel first, 16 bits per channel
<!-- enum VideoFormat::variant Ayuv64 -->
packed 4:4:4 YUV with alpha channel, 16 bits per channel (A0-Y0-U0-V0 ...)
<!-- enum VideoFormat::variant R210 -->
packed 4:4:4 RGB, 10 bits per channel
<!-- enum VideoFormat::variant I42010be -->
planar 4:2:0 YUV, 10 bits per channel
<!-- enum VideoFormat::variant I42010le -->
planar 4:2:0 YUV, 10 bits per channel
<!-- enum VideoFormat::variant I42210be -->
planar 4:2:2 YUV, 10 bits per channel
<!-- enum VideoFormat::variant I42210le -->
planar 4:2:2 YUV, 10 bits per channel
<!-- enum VideoFormat::variant Y44410be -->
planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
<!-- enum VideoFormat::variant Y44410le -->
planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
<!-- enum VideoFormat::variant Gbr -->
planar 4:4:4 RGB, 8 bits per channel (Since: 1.2)
<!-- enum VideoFormat::variant Gbr10be -->
planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
<!-- enum VideoFormat::variant Gbr10le -->
planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
<!-- enum VideoFormat::variant Nv16 -->
planar 4:2:2 YUV with interleaved UV plane (Since: 1.2)
<!-- enum VideoFormat::variant Nv24 -->
planar 4:4:4 YUV with interleaved UV plane (Since: 1.2)
<!-- enum VideoFormat::variant Nv1264z32 -->
NV12 with 64x32 tiling in zigzag pattern (Since: 1.4)
<!-- enum VideoFormat::variant A42010be -->
planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
<!-- enum VideoFormat::variant A42010le -->
planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
<!-- enum VideoFormat::variant A42210be -->
planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
<!-- enum VideoFormat::variant A42210le -->
planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
<!-- enum VideoFormat::variant A44410be -->
planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
<!-- enum VideoFormat::variant A44410le -->
planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
<!-- enum VideoFormat::variant Nv61 -->
planar 4:2:2 YUV with interleaved VU plane (Since: 1.6)
<!-- enum VideoFormat::variant P01010be -->
planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
<!-- enum VideoFormat::variant P01010le -->
planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
<!-- enum VideoFormat::variant Iyu2 -->
packed 4:4:4 YUV (U-Y-V ...) (Since 1.10)
<!-- enum VideoFormat::variant Vyuy -->
packed 4:2:2 YUV (V0-Y0-U0-Y1 V2-Y2-U2-Y3 V4 ...)
<!-- enum VideoFormat::variant Gbra -->
planar 4:4:4:4 ARGB, 8 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant Gbra10be -->
planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant Gbra10le -->
planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant Gbr12be -->
planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant Gbr12le -->
planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant Gbra12be -->
planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant Gbra12le -->
planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant I42012be -->
planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant I42012le -->
planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant I42212be -->
planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant I42212le -->
planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant Y44412be -->
planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant Y44412le -->
planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
<!-- enum VideoFormat::variant Gray10Le32 -->
10-bit grayscale, packed into 32bit words (2 bits padding) (Since: 1.14)
<!-- enum VideoFormat::variant Nv1210le32 -->
10-bit variant of `VideoFormat::Nv12`, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
<!-- enum VideoFormat::variant Nv1610le32 -->
10-bit variant of `VideoFormat::Nv16`, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
<!-- struct VideoFormatInfo -->
Information for a video format.
<!-- struct VideoFrame -->
A video frame obtained from `VideoFrame::map`
<!-- impl VideoFrame::fn copy -->
Copy the contents from `src` to `self`.
## `src`
a `VideoFrame`
# Returns
TRUE if the contents could be copied.
<!-- impl VideoFrame::fn copy_plane -->
Copy the plane with index `plane` from `src` to `self`.
## `src`
a `VideoFrame`
## `plane`
a plane
# Returns
TRUE if the contents could be copied.
<!-- impl VideoFrame::fn map -->
Use `info` and `buffer` to fill in the values of `self`. `self` is usually
allocated on the stack, and you will pass the address to the `VideoFrame`
structure allocated on the stack; `VideoFrame::map` will then fill in
the structures with the various video-specific information you need to access
the pixels of the video buffer. You can then use accessor macros such as
GST_VIDEO_FRAME_COMP_DATA(), GST_VIDEO_FRAME_PLANE_DATA(),
GST_VIDEO_FRAME_COMP_STRIDE(), GST_VIDEO_FRAME_PLANE_STRIDE() etc.
to get to the pixels.
```C
GstVideoFrame vframe;
...
// set RGB pixels to black one at a time
if (gst_video_frame_map (&amp;vframe, video_info, video_buffer, GST_MAP_WRITE)) {
guint8 *pixels = GST_VIDEO_FRAME_PLANE_DATA (vframe, 0);
guint stride = GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 0);
guint pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (vframe, 0);
for (h = 0; h < height; ++h) {
for (w = 0; w < width; ++w) {
guint8 *pixel = pixels + h * stride + w * pixel_stride;
memset (pixel, 0, pixel_stride);
}
}
gst_video_frame_unmap (&amp;vframe);
}
...
```
All video planes of `buffer` will be mapped and the pointers will be set in
`self`->data.
The purpose of this function is to make it easy for you to get to the video
pixels in a generic way, without you having to worry too much about details
such as whether the video data is allocated in one contiguous memory chunk
or multiple memory chunks (e.g. one for each plane); or if custom strides
and custom plane offsets are used or not (as signalled by GstVideoMeta on
each buffer). This function will just fill the `VideoFrame` structure
with the right values and if you use the accessor macros everything will
just work and you can access the data easily. It also maps the underlying
memory chunks for you.
## `info`
a `VideoInfo`
## `buffer`
the buffer to map
## `flags`
`gst::MapFlags`
# Returns
`true` on success.
<!-- impl VideoFrame::fn map_id -->
Use `info` and `buffer` to fill in the values of `self` with the video frame
information of frame `id`.
When `id` is -1, the default frame is mapped. When `id` != -1, this function
will return `false` when there is no GstVideoMeta with that id.
All video planes of `buffer` will be mapped and the pointers will be set in
`self`->data.
## `info`
a `VideoInfo`
## `buffer`
the buffer to map
## `id`
the frame id to map
## `flags`
`gst::MapFlags`
# Returns
`true` on success.
<!-- impl VideoFrame::fn unmap -->
Unmap the memory previously mapped with gst_video_frame_map.
<!-- struct VideoInfo -->
Information describing image properties. This information can be filled
in from GstCaps with `VideoInfo::from_caps`. The information is also used
to store the specific video info when mapping a video frame with
`VideoFrame::map`.
Use the provided macros to access the info in this structure.
<!-- impl VideoInfo::fn new -->
Allocate a new `VideoInfo` that is also initialized with
`VideoInfo::init`.
# Returns
a new `VideoInfo`. free with `VideoInfo::free`.
<!-- impl VideoInfo::fn align -->
Adjust the offset and stride fields in `self` so that the padding and
stride alignment in `align` is respected.
Extra padding will be added to the right side when stride alignment padding
is required and `align` will be updated with the new padding values.
## `align`
alignment parameters
# Returns
`false` if alignment could not be applied, e.g. because the
size of a frame can't be represented as a 32 bit integer (Since: 1.12)
<!-- impl VideoInfo::fn convert -->
Converts among various `gst::Format` types. This function handles
GST_FORMAT_BYTES, GST_FORMAT_TIME, and GST_FORMAT_DEFAULT. For
raw video, GST_FORMAT_DEFAULT corresponds to video frames. This
function can be used to handle pad queries of the type GST_QUERY_CONVERT.
## `src_format`
`gst::Format` of the `src_value`
## `src_value`
value to convert
## `dest_format`
`gst::Format` of the `dest_value`
## `dest_value`
pointer to destination value
# Returns
TRUE if the conversion was successful.
<!-- impl VideoInfo::fn copy -->
Copy a GstVideoInfo structure.
# Returns
a new `VideoInfo`. free with gst_video_info_free.
<!-- impl VideoInfo::fn free -->
Free a GstVideoInfo structure previously allocated with `VideoInfo::new`
or `VideoInfo::copy`.
<!-- impl VideoInfo::fn from_caps -->
Parse `caps` and update `self`.
## `caps`
a `gst::Caps`
# Returns
TRUE if `caps` could be parsed
<!-- impl VideoInfo::fn init -->
Initialize `self` with default values.
<!-- impl VideoInfo::fn is_equal -->
Compares two `VideoInfo` and returns whether they are equal or not
## `other`
a `VideoInfo`
# Returns
`true` if `self` and `other` are equal, else `false`.
<!-- impl VideoInfo::fn set_format -->
Set the default info for a video frame of `format` and `width` and `height`.
Note: This initializes `self` first, no values are preserved. This function
does not set the offsets correctly for interlaced vertically
subsampled formats.
## `format`
the format
## `width`
a width
## `height`
a height
# Returns
`false` if the returned video info is invalid, e.g. because the
size of a frame can't be represented as a 32 bit integer (Since: 1.12)
<!-- impl VideoInfo::fn to_caps -->
Convert the values of `self` into a `gst::Caps`.
# Returns
a new `gst::Caps` containing the info of `self`.
<!-- enum VideoInterlaceMode -->
The possible values of the `VideoInterlaceMode` describing the interlace
mode of the stream.
<!-- enum VideoInterlaceMode::variant Progressive -->
all frames are progressive
<!-- enum VideoInterlaceMode::variant Interleaved -->
2 fields are interleaved in one video
frame. Extra buffer flags describe the field order.
<!-- enum VideoInterlaceMode::variant Mixed -->
frames contains both interlaced and
progressive video, the buffer flags describe the frame and fields.
<!-- enum VideoInterlaceMode::variant Fields -->
2 fields are stored in one buffer, use the
frame ID to get access to the required field. For multiview (the
'views' property > 1) the fields of view N can be found at frame ID
(N * 2) and (N * 2) + 1.
Each field has only half the amount of lines as noted in the
height property. This mode requires multiple GstVideoMeta metadata
to describe the fields.
<!-- enum VideoMultiviewFramePacking -->
`VideoMultiviewFramePacking` represents the subset of `VideoMultiviewMode`
values that can be applied to any video frame without needing extra metadata.
It can be used by elements that provide a property to override the
multiview interpretation of a video stream when the video doesn't contain
any markers.
This enum is used (for example) on playbin, to re-interpret a played
video stream as a stereoscopic video. The individual enum values are
equivalent to and have the same value as the matching `VideoMultiviewMode`.
<!-- enum VideoMultiviewFramePacking::variant None -->
A special value indicating
no frame packing info.
<!-- enum VideoMultiviewFramePacking::variant Mono -->
All frames are monoscopic.
<!-- enum VideoMultiviewFramePacking::variant Left -->
All frames represent a left-eye view.
<!-- enum VideoMultiviewFramePacking::variant Right -->
All frames represent a right-eye view.
<!-- enum VideoMultiviewFramePacking::variant SideBySide -->
Left and right eye views are
provided in the left and right half of the frame respectively.
<!-- enum VideoMultiviewFramePacking::variant SideBySideQuincunx -->
Left and right eye
views are provided in the left and right half of the frame, but
have been sampled using quincunx method, with half-pixel offset
between the 2 views.
<!-- enum VideoMultiviewFramePacking::variant ColumnInterleaved -->
Alternating vertical
columns of pixels represent the left and right eye view respectively.
<!-- enum VideoMultiviewFramePacking::variant RowInterleaved -->
Alternating horizontal
rows of pixels represent the left and right eye view respectively.
<!-- enum VideoMultiviewFramePacking::variant TopBottom -->
The top half of the frame
contains the left eye, and the bottom half the right eye.
<!-- enum VideoMultiviewFramePacking::variant Checkerboard -->
Pixels are arranged with
alternating pixels representing left and right eye views in a
checkerboard fashion.
<!-- enum VideoMultiviewMode -->
All possible stereoscopic 3D and multiview representations.
In conjunction with `VideoMultiviewFlags`, describes how
multiview content is being transported in the stream.
<!-- enum VideoMultiviewMode::variant None -->
A special value indicating
no multiview information. Used in GstVideoInfo and other places to
indicate that no specific multiview handling has been requested or
provided. This value is never carried on caps.
<!-- enum VideoMultiviewMode::variant Mono -->
All frames are monoscopic.
<!-- enum VideoMultiviewMode::variant Left -->
All frames represent a left-eye view.
<!-- enum VideoMultiviewMode::variant Right -->
All frames represent a right-eye view.
<!-- enum VideoMultiviewMode::variant SideBySide -->
Left and right eye views are
provided in the left and right half of the frame respectively.
<!-- enum VideoMultiviewMode::variant SideBySideQuincunx -->
Left and right eye
views are provided in the left and right half of the frame, but
have been sampled using quincunx method, with half-pixel offset
between the 2 views.
<!-- enum VideoMultiviewMode::variant ColumnInterleaved -->
Alternating vertical
columns of pixels represent the left and right eye view respectively.
<!-- enum VideoMultiviewMode::variant RowInterleaved -->
Alternating horizontal
rows of pixels represent the left and right eye view respectively.
<!-- enum VideoMultiviewMode::variant TopBottom -->
The top half of the frame
contains the left eye, and the bottom half the right eye.
<!-- enum VideoMultiviewMode::variant Checkerboard -->
Pixels are arranged with
alternating pixels representing left and right eye views in a
checkerboard fashion.
<!-- enum VideoMultiviewMode::variant FrameByFrame -->
Left and right eye views
are provided in separate frames alternately.
<!-- enum VideoMultiviewMode::variant MultiviewFrameByFrame -->
Multiple
independent views are provided in separate frames in sequence.
This method only applies to raw video buffers at the moment.
Specific view identification is via the `GstVideoMultiviewMeta`
and `VideoMeta`(s) on raw video buffers.
<!-- enum VideoMultiviewMode::variant Separated -->
Multiple views are
provided as separate `gst::Memory` framebuffers attached to each
`gst::Buffer`, described by the `GstVideoMultiviewMeta`
and `VideoMeta`(s)
<!-- struct VideoOverlay -->
The `VideoOverlay` interface is used for 2 main purposes :
* To get a grab on the Window where the video sink element is going to render.
This is achieved by either being informed about the Window identifier that
the video sink element generated, or by forcing the video sink element to use
a specific Window identifier for rendering.
* To force a redrawing of the latest video frame the video sink element
displayed on the Window. Indeed if the `gst::Pipeline` is in `gst::State::Paused`
state, moving the Window around will damage its content. Application
developers will want to handle the Expose events themselves and force the
video sink element to refresh the Window's content.
Using the Window created by the video sink is probably the simplest scenario,
in some cases, though, it might not be flexible enough for application
developers if they need to catch events such as mouse moves and button
clicks.
Setting a specific Window identifier on the video sink element is the most
flexible solution but it has some issues. Indeed the application needs to set
its Window identifier at the right time to avoid internal Window creation
from the video sink element. To solve this issue a `gst::Message` is posted on
the bus to inform the application that it should set the Window identifier
immediately. Here is an example on how to do that correctly:
```text
static GstBusSyncReply
create_window (GstBus * bus, GstMessage * message, GstPipeline * pipeline)
{
// ignore anything but 'prepare-window-handle' element messages
if (!gst_is_video_overlay_prepare_window_handle_message (message))
return GST_BUS_PASS;
win = XCreateSimpleWindow (disp, root, 0, 0, 320, 240, 0, 0, 0);
XSetWindowBackgroundPixmap (disp, win, None);
XMapRaised (disp, win);
XSync (disp, FALSE);
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message)),
win);
gst_message_unref (message);
return GST_BUS_DROP;
}
...
int
main (int argc, char **argv)
{
...
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, pipeline,
NULL);
...
}
```
## Two basic usage scenarios
There are two basic usage scenarios: in the simplest case, the application
uses `playbin` or `plasink` or knows exactly what particular element is used
for video output, which is usually the case when the application creates
the videosink to use (e.g. `xvimagesink`, `ximagesink`, etc.) itself; in this
case, the application can just create the videosink element, create and
realize the window to render the video on and then
call `VideoOverlay::set_window_handle` directly with the XID or native
window handle, before starting up the pipeline.
As `playbin` and `playsink` implement the video overlay interface and proxy
it transparently to the actual video sink even if it is created later, this
case also applies when using these elements.
In the other and more common case, the application does not know in advance
what GStreamer video sink element will be used for video output. This is
usually the case when an element such as `autovideosink` is used.
In this case, the video sink element itself is created
asynchronously from a GStreamer streaming thread some time after the
pipeline has been started up. When that happens, however, the video sink
will need to know right then whether to render onto an already existing
application window or whether to create its own window. This is when it
posts a prepare-window-handle message, and that is also why this message needs
to be handled in a sync bus handler which will be called from the streaming
thread directly (because the video sink will need an answer right then).
As response to the prepare-window-handle element message in the bus sync
handler, the application may use `VideoOverlay::set_window_handle` to tell
the video sink to render onto an existing window surface. At this point the
application should already have obtained the window handle / XID, so it
just needs to set it. It is generally not advisable to call any GUI toolkit
functions or window system functions from the streaming thread in which the
prepare-window-handle message is handled, because most GUI toolkits and
windowing systems are not thread-safe at all and a lot of care would be
required to co-ordinate the toolkit and window system calls of the
different threads (Gtk+ users please note: prior to Gtk+ 2.18
GDK_WINDOW_XID() was just a simple structure access, so generally fine to do
within the bus sync handler; this macro was changed to a function call in
Gtk+ 2.18 and later, which is likely to cause problems when called from a
sync handler; see below for a better approach without GDK_WINDOW_XID()
used in the callback).
## GstVideoOverlay and Gtk+
```text
#include &lt;gst/video/videooverlay.h&gt;
#include &lt;gtk/gtk.h&gt;
#ifdef GDK_WINDOWING_X11
#include &lt;gdk/gdkx.h&gt; // for GDK_WINDOW_XID
#endif
#ifdef GDK_WINDOWING_WIN32
#include &lt;gdk/gdkwin32.h&gt; // for GDK_WINDOW_HWND
#endif
...
static guintptr video_window_handle = 0;
...
static GstBusSyncReply
bus_sync_handler (GstBus * bus, GstMessage * message, gpointer user_data)
{
// ignore anything but 'prepare-window-handle' element messages
if (!gst_is_video_overlay_prepare_window_handle_message (message))
return GST_BUS_PASS;
if (video_window_handle != 0) {
GstVideoOverlay *overlay;
// GST_MESSAGE_SRC (message) will be the video sink element
overlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message));
gst_video_overlay_set_window_handle (overlay, video_window_handle);
} else {
g_warning ("Should have obtained video_window_handle by now!");
}
gst_message_unref (message);
return GST_BUS_DROP;
}
...
static void
video_widget_realize_cb (GtkWidget * widget, gpointer data)
{
#if GTK_CHECK_VERSION(2,18,0)
// Tell Gtk+/Gdk to create a native window for this widget instead of
// drawing onto the parent widget.
// This is here just for pedagogical purposes, GDK_WINDOW_XID will call
// it as well in newer Gtk versions
if (!gdk_window_ensure_native (widget->window))
g_error ("Couldn't create native window needed for GstVideoOverlay!");
#endif
#ifdef GDK_WINDOWING_X11
{
gulong xid = GDK_WINDOW_XID (gtk_widget_get_window (video_window));
video_window_handle = xid;
}
#endif
#ifdef GDK_WINDOWING_WIN32
{
HWND wnd = GDK_WINDOW_HWND (gtk_widget_get_window (video_window));
video_window_handle = (guintptr) wnd;
}
#endif
}
...
int
main (int argc, char **argv)
{
GtkWidget *video_window;
GtkWidget *app_window;
...
app_window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
...
video_window = gtk_drawing_area_new ();
g_signal_connect (video_window, "realize",
G_CALLBACK (video_widget_realize_cb), NULL);
gtk_widget_set_double_buffered (video_window, FALSE);
...
// usually the video_window will not be directly embedded into the
// application window like this, but there will be many other widgets
// and the video window will be embedded in one of them instead
gtk_container_add (GTK_CONTAINER (ap_window), video_window);
...
// show the GUI
gtk_widget_show_all (app_window);
// realize window now so that the video window gets created and we can
// obtain its XID/HWND before the pipeline is started up and the videosink
// asks for the XID/HWND of the window to render onto
gtk_widget_realize (video_window);
// we should have the XID/HWND now
g_assert (video_window_handle != 0);
...
// set up sync handler for setting the xid once the pipeline is started
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_set_sync_handler (bus, (GstBusSyncHandler) bus_sync_handler, NULL,
NULL);
gst_object_unref (bus);
...
gst_element_set_state (pipeline, GST_STATE_PLAYING);
...
}
```
## GstVideoOverlay and Qt
```text
#include &lt;glib.h&gt;
#include &lt;gst/gst.h&gt;
#include &lt;gst/video/videooverlay.h&gt;
#include &lt;QApplication&gt;
#include &lt;QTimer&gt;
#include &lt;QWidget&gt;
int main(int argc, char *argv[])
{
if (!g_thread_supported ())
g_thread_init (NULL);
gst_init (&argc, &argv);
QApplication app(argc, argv);
app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ()));
// prepare the pipeline
GstElement *pipeline = gst_pipeline_new ("xvoverlay");
GstElement *src = gst_element_factory_make ("videotestsrc", NULL);
GstElement *sink = gst_element_factory_make ("xvimagesink", NULL);
gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
gst_element_link (src, sink);
// prepare the ui
QWidget window;
window.resize(320, 240);
window.show();
WId xwinid = window.winId();
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), xwinid);
// run the pipeline
GstStateChangeReturn sret = gst_element_set_state (pipeline,
GST_STATE_PLAYING);
if (sret == GST_STATE_CHANGE_FAILURE) {
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
// Exit application
QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
}
int ret = app.exec();
window.hide();
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return ret;
}
```
# Implements
[`VideoOverlayExt`](trait.VideoOverlayExt.html)
<!-- trait VideoOverlayExt -->
Trait containing all `VideoOverlay` methods.
# Implementors
[`VideoOverlay`](struct.VideoOverlay.html)
<!-- impl VideoOverlay::fn install_properties -->
This helper shall be used by classes implementing the `VideoOverlay`
interface that want the render rectangle to be controllable using
properties. This helper will install "render-rectangle" property into the
class.
Since 1.14
## `oclass`
The class on which the properties will be installed
## `last_prop_id`
The first free property ID to use
<!-- impl VideoOverlay::fn set_property -->
This helper shall be used by classes implementing the `VideoOverlay`
interface that want the render rectangle to be controllable using
properties. This helper will parse and set the render rectangle calling
`VideoOverlay::set_render_rectangle`.
## `object`
The instance on which the property is set
## `last_prop_id`
The highest property ID.
## `property_id`
The property ID
## `value`
The `gobject::Value` to be set
# Returns
`true` if the `property_id` matches the GstVideoOverlay property
Since 1.14
<!-- trait VideoOverlayExt::fn expose -->
Tell an overlay that it has been exposed. This will redraw the current frame
in the drawable even if the pipeline is PAUSED.
<!-- trait VideoOverlayExt::fn got_window_handle -->
This will post a "have-window-handle" element message on the bus.
This function should only be used by video overlay plugin developers.
## `handle`
a platform-specific handle referencing the window
<!-- trait VideoOverlayExt::fn handle_events -->
Tell an overlay that it should handle events from the window system. These
events are forwarded upstream as navigation events. In some window system,
events are not propagated in the window hierarchy if a client is listening
for them. This method allows you to disable events handling completely
from the `VideoOverlay`.
## `handle_events`
a `gboolean` indicating if events should be handled or not.
<!-- trait VideoOverlayExt::fn prepare_window_handle -->
This will post a "prepare-window-handle" element message on the bus
to give applications an opportunity to call
`VideoOverlay::set_window_handle` before a plugin creates its own
window.
This function should only be used by video overlay plugin developers.
<!-- trait VideoOverlayExt::fn set_render_rectangle -->
Configure a subregion as a video target within the window set by
`VideoOverlay::set_window_handle`. If this is not used or not supported
the video will fill the area of the window set as the overlay to 100%.
By specifying the rectangle, the video can be overlayed to a specific region
of that window only. After setting the new rectangle one should call
`VideoOverlay::expose` to force a redraw. To unset the region pass -1 for
the `width` and `height` parameters.
This method is needed for non fullscreen video overlay in UI toolkits that
do not support subwindows.
## `x`
the horizontal offset of the render area inside the window
## `y`
the vertical offset of the render area inside the window
## `width`
the width of the render area inside the window
## `height`
the height of the render area inside the window
# Returns
`false` if not supported by the sink.
<!-- trait VideoOverlayExt::fn set_window_handle -->
This will call the video overlay's set_window_handle method. You
should use this method to tell to an overlay to display video output to a
specific window (e.g. an XWindow on X11). Passing 0 as the `handle` will
tell the overlay to stop using that window and create an internal one.
## `handle`
a handle referencing the window.
<!-- enum VideoTileMode -->
Enum value describing the available tiling modes.
<!-- enum VideoTileMode::variant Unknown -->
Unknown or unset tile mode
<!-- enum VideoTileMode::variant Zflipz2x2 -->
Every four adjacent blocks - two
horizontally and two vertically are grouped together and are located
in memory in Z or flipped Z order. In case of odd rows, the last row
of blocks is arranged in linear order.
<!-- enum VideoTransferFunction -->
The video transfer function defines the formula for converting between
non-linear RGB (R'G'B') and linear RGB
<!-- enum VideoTransferFunction::variant Unknown -->
unknown transfer function
<!-- enum VideoTransferFunction::variant Gamma10 -->
linear RGB, gamma 1.0 curve
<!-- enum VideoTransferFunction::variant Gamma18 -->
Gamma 1.8 curve
<!-- enum VideoTransferFunction::variant Gamma20 -->
Gamma 2.0 curve
<!-- enum VideoTransferFunction::variant Gamma22 -->
Gamma 2.2 curve
<!-- enum VideoTransferFunction::variant Bt709 -->
Gamma 2.2 curve with a linear segment in the lower
range
<!-- enum VideoTransferFunction::variant Smpte240m -->
Gamma 2.2 curve with a linear segment in the
lower range
<!-- enum VideoTransferFunction::variant Srgb -->
Gamma 2.4 curve with a linear segment in the lower
range
<!-- enum VideoTransferFunction::variant Gamma28 -->
Gamma 2.8 curve
<!-- enum VideoTransferFunction::variant Log100 -->
Logarithmic transfer characteristic
100:1 range
<!-- enum VideoTransferFunction::variant Log316 -->
Logarithmic transfer characteristic
316.22777:1 range
<!-- enum VideoTransferFunction::variant Bt202012 -->
Gamma 2.2 curve with a linear segment in the lower
range. Used for BT.2020 with 12 bits per
component. Since: 1.6
<!-- enum VideoTransferFunction::variant Adobergb -->
Gamma 2.19921875. Since: 1.8

View file

@ -0,0 +1,119 @@
<!-- file * -->
<!-- enum WebRTCDTLSSetup -->
GST_WEBRTC_DTLS_SETUP_NONE: none
GST_WEBRTC_DTLS_SETUP_ACTPASS: actpass
GST_WEBRTC_DTLS_SETUP_ACTIVE: sendonly
GST_WEBRTC_DTLS_SETUP_PASSIVE: recvonly
<!-- struct WebRTCDTLSTransport -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- enum WebRTCDTLSTransportState -->
GST_WEBRTC_DTLS_TRANSPORT_STATE_NEW: new
GST_WEBRTC_DTLS_TRANSPORT_STATE_CLOSED: closed
GST_WEBRTC_DTLS_TRANSPORT_STATE_FAILED: failed
GST_WEBRTC_DTLS_TRANSPORT_STATE_CONNECTING: connecting
GST_WEBRTC_DTLS_TRANSPORT_STATE_CONNECTED: connected
<!-- enum WebRTCICEComponent -->
GST_WEBRTC_ICE_COMPONENT_RTP,
GST_WEBRTC_ICE_COMPONENT_RTCP,
<!-- enum WebRTCICEConnectionState -->
GST_WEBRTC_ICE_CONNECTION_STATE_NEW: new
GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING: checking
GST_WEBRTC_ICE_CONNECTION_STATE_CONNECTED: connected
GST_WEBRTC_ICE_CONNECTION_STATE_COMPLETED: completed
GST_WEBRTC_ICE_CONNECTION_STATE_FAILED: failed
GST_WEBRTC_ICE_CONNECTION_STATE_DISCONNECTED: disconnected
GST_WEBRTC_ICE_CONNECTION_STATE_CLOSED: closed
See <ulink url="http://w3c.github.io/webrtc-pc/`dom`-rtciceconnectionstate">http://w3c.github.io/webrtc-pc/`dom`-rtciceconnectionstate`</ulink>`
<!-- enum WebRTCICEGatheringState -->
GST_WEBRTC_ICE_GATHERING_STATE_NEW: new
GST_WEBRTC_ICE_GATHERING_STATE_GATHERING: gathering
GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE: complete
See <ulink url="http://w3c.github.io/webrtc-pc/`dom`-rtcicegatheringstate">http://w3c.github.io/webrtc-pc/`dom`-rtcicegatheringstate`</ulink>`
<!-- enum WebRTCICERole -->
GST_WEBRTC_ICE_ROLE_CONTROLLED: controlled
GST_WEBRTC_ICE_ROLE_CONTROLLING: controlling
<!-- struct WebRTCICETransport -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- enum WebRTCPeerConnectionState -->
GST_WEBRTC_PEER_CONNECTION_STATE_NEW: new
GST_WEBRTC_PEER_CONNECTION_STATE_CONNECTING: connecting
GST_WEBRTC_PEER_CONNECTION_STATE_CONNECTED: connected
GST_WEBRTC_PEER_CONNECTION_STATE_DISCONNECTED: disconnected
GST_WEBRTC_PEER_CONNECTION_STATE_FAILED: failed
GST_WEBRTC_PEER_CONNECTION_STATE_CLOSED: closed
See <ulink url="http://w3c.github.io/webrtc-pc/`dom`-rtcpeerconnectionstate">http://w3c.github.io/webrtc-pc/`dom`-rtcpeerconnectionstate`</ulink>`
<!-- struct WebRTCRTPReceiver -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- struct WebRTCRTPSender -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- struct WebRTCRTPTransceiver -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- enum WebRTCRTPTransceiverDirection -->
<!-- enum WebRTCSDPType -->
GST_WEBRTC_SDP_TYPE_OFFER: offer
GST_WEBRTC_SDP_TYPE_PRANSWER: pranswer
GST_WEBRTC_SDP_TYPE_ANSWER: answer
GST_WEBRTC_SDP_TYPE_ROLLBACK: rollback
See <ulink url="http://w3c.github.io/webrtc-pc/`rtcsdptype`">http://w3c.github.io/webrtc-pc/`rtcsdptype``</ulink>`
<!-- struct WebRTCSessionDescription -->
See <ulink url="https://www.w3.org/TR/webrtc/`rtcsessiondescription`-class">https://www.w3.org/TR/webrtc/`rtcsessiondescription`-class`</ulink>`
<!-- impl WebRTCSessionDescription::fn new -->
## `type_`
a `WebRTCSDPType`
## `sdp`
a `gst_sdp::SDPMessage`
# Returns
a new `WebRTCSessionDescription` from `type_`
and `sdp`
<!-- impl WebRTCSessionDescription::fn copy -->
# Returns
a new copy of `self`
<!-- impl WebRTCSessionDescription::fn free -->
Free `self` and all associated resources
<!-- enum WebRTCSignalingState -->
GST_WEBRTC_SIGNALING_STATE_STABLE: stable
GST_WEBRTC_SIGNALING_STATE_CLOSED: closed
GST_WEBRTC_SIGNALING_STATE_HAVE_LOCAL_OFFER: have-local-offer
GST_WEBRTC_SIGNALING_STATE_HAVE_REMOTE_OFFER: have-remote-offer
GST_WEBRTC_SIGNALING_STATE_HAVE_LOCAL_PRANSWER: have-local-pranswer
GST_WEBRTC_SIGNALING_STATE_HAVE_REMOTE_PRANSWER: have-remote-pranswer
See <ulink url="http://w3c.github.io/webrtc-pc/`dom`-rtcsignalingstate">http://w3c.github.io/webrtc-pc/`dom`-rtcsignalingstate`</ulink>`
<!-- enum WebRTCStatsType -->
GST_WEBRTC_STATS_CODEC: codec
GST_WEBRTC_STATS_INBOUND_RTP: inbound-rtp
GST_WEBRTC_STATS_OUTBOUND_RTP: outbound-rtp
GST_WEBRTC_STATS_REMOTE_INBOUND_RTP: remote-inbound-rtp
GST_WEBRTC_STATS_REMOTE_OUTBOUND_RTP: remote-outbound-rtp
GST_WEBRTC_STATS_CSRC: csrc
GST_WEBRTC_STATS_PEER_CONNECTION: peer-connectiion
GST_WEBRTC_STATS_DATA_CHANNEL: data-channel
GST_WEBRTC_STATS_STREAM: stream
GST_WEBRTC_STATS_TRANSPORT: transport
GST_WEBRTC_STATS_CANDIDATE_PAIR: candidate-pair
GST_WEBRTC_STATS_LOCAL_CANDIDATE: local-candidate
GST_WEBRTC_STATS_REMOTE_CANDIDATE: remote-candidate
GST_WEBRTC_STATS_CERTIFICATE: certificate

14862
docs/gstreamer/docs.md Normal file

File diff suppressed because it is too large Load diff

View file

@ -1,73 +1,45 @@
[package]
name = "examples"
version.workspace = true
license = "MIT"
version = "0.12.0"
authors = ["Sebastian Dröge <sebastian@centricular.com>"]
edition.workspace = true
rust-version.workspace = true
[dependencies]
glib.workspace = true
gst.workspace = true
gst-gl = { workspace = true, optional = true }
gst-gl-egl = { workspace = true, optional = true }
gst-gl-x11 = { workspace = true, optional = true }
gst-app.workspace = true
gst-audio.workspace = true
gst-base.workspace = true
gst-video.workspace = true
gst-pbutils.workspace = true
gst-play = { workspace = true, optional = true }
gst-player = { workspace = true, optional = true }
ges = { workspace = true, optional = true }
gst-sdp = { workspace = true, optional = true }
gst-rtsp = { workspace = true, optional = true }
gst-rtsp-server = { workspace = true, optional = true }
gst-allocators = { workspace = true, optional = true }
gio = { workspace = true, optional = true }
anyhow = "1.0"
byte-slice-cast = "1"
cairo-rs = { workspace = true, features=["use_glib"], optional = true }
derive_more = "0.99.5"
futures = "0.3"
glutin = { version = "0.31", optional = true, default-features = false }
glutin-winit = { version = "0.4", optional = true, default-features = false }
image = { version = "0.24", optional = true, default-features = false, features = ["png", "jpeg"] }
memfd = { version = "0.6", optional = true }
memmap2 = { version = "0.9", optional = true }
pango = { workspace = true, optional = true }
pangocairo = { workspace = true, optional = true }
raw-window-handle = { version = "0.5", optional = true }
uds = { version = "0.4", optional = true }
winit = { version = "0.29", optional = true, default-features = false, features = ["rwh_05"] }
atomic_refcell = "0.1"
data-encoding = "2.0"
once_cell = "1"
[target.'cfg(windows)'.dependencies]
windows = { version = "0.56", features=["Win32_Graphics_Direct3D11",
"Win32_Foundation", "Win32_Graphics_Direct3D", "Win32_Graphics_Dxgi",
"Win32_Graphics_Dxgi_Common", "Win32_Graphics_Direct2D",
"Win32_Graphics_Direct2D_Common", "Win32_Graphics_DirectWrite",
"Win32_Graphics_Imaging", "Win32_System_Com", "Foundation_Numerics"], optional = true }
[target.'cfg(target_os = "macos")'.dependencies]
cocoa = "0.25"
objc = "0.2.7"
[build-dependencies]
gl_generator = { version = "0.14", optional = true }
glib = "0.6"
gstreamer = { version = "0.12", path = "../gstreamer" }
gstreamer-app = { version = "0.12", path = "../gstreamer-app" }
gstreamer-audio = { version = "0.12", path = "../gstreamer-audio" }
gstreamer-video = { version = "0.12", path = "../gstreamer-video" }
gstreamer-pbutils = { version = "0.12", path = "../gstreamer-pbutils" }
gstreamer-player = { version = "0.12", path = "../gstreamer-player", optional = true }
gstreamer-rtsp = { version = "0.12", path = "../gstreamer-rtsp", optional = true }
gstreamer-rtsp-server = { version = "0.12", path = "../gstreamer-rtsp-server", optional = true }
gstreamer-rtsp-server-sys = { version = "0.6", features = ["v1_8"], optional = true }
gstreamer-editing-services = { version = "0.12", path = "../gstreamer-editing-services", optional = true }
gtk = { version = "0.5", features = ["v3_6"], optional = true }
gdk = { version = "0.9", optional = true }
gio = { version = "0.5", optional = true }
futures-preview = { version = "0.2", optional = true }
byte-slice-cast = "0.2"
failure = "0.1"
failure_derive = "0.1"
cairo-rs = { version = "0.5", features=["use_glib"], optional = true }
pango = { version = "0.5", optional = true }
pangocairo = { version = "0.6", optional = true }
[features]
default = []
rtsp-server = ["gst-rtsp-server", "gst-rtsp", "gst-sdp"]
rtsp-server-record = ["gst-rtsp-server", "gst-rtsp", "gio"]
gst-player = ["gstreamer-player"]
ges = ["gstreamer-editing-services"]
gtksink = ["gtk", "gio"]
gtkvideooverlay = ["gtk", "gdk", "gio"]
gtkvideooverlay-x11 = ["gtkvideooverlay"]
gtkvideooverlay-quartz = ["gtkvideooverlay"]
generic-futures = ["gstreamer/futures", "futures-preview"]
glib-futures = ["generic-futures", "glib/futures"]
gst-rtsp-server = ["gstreamer-rtsp-server"]
gst-rtsp-server-record = ["gstreamer-rtsp-server-sys", "gstreamer-rtsp-server", "gstreamer-rtsp", "gio"]
default-features = []
v1_10 = ["gstreamer/v1_10"]
pango-cairo = ["pango", "pangocairo", "cairo-rs"]
overlay-composition = ["pango", "pangocairo", "cairo-rs"]
gl = ["dep:gst-gl", "dep:gl_generator", "dep:glutin", "dep:glutin-winit", "dep:winit", "dep:raw-window-handle"]
gst-gl-x11 = ["dep:gst-gl-x11", "glutin-winit?/glx"] # glx turns on x11
gst-gl-egl = ["dep:gst-gl-egl", "glutin-winit?/egl", "glutin-winit?/x11", "glutin-winit?/wayland"] # Use X11 or Wayland via EGL
allocators = ["gst-allocators", "memmap2", "memfd", "uds"]
[[bin]]
name = "appsink"
@ -75,24 +47,23 @@ name = "appsink"
[[bin]]
name = "appsrc"
[[bin]]
name = "custom_events"
[[bin]]
name = "custom_meta"
[[bin]]
name = "decodebin"
[[bin]]
name = "debug_ringbuffer"
[[bin]]
name = "encodebin"
[[bin]]
name = "events"
[[bin]]
name = "gtksink"
required-features = ["gtksink"]
[[bin]]
name = "gtkvideooverlay"
required-features = ["gtkvideooverlay"]
[[bin]]
name = "iterator"
@ -102,16 +73,9 @@ name = "launch_glib_main"
[[bin]]
name = "launch"
[[bin]]
name = "transmux"
[[bin]]
name = "pad_probes"
[[bin]]
name = "play"
required-features = ["gst-play"]
[[bin]]
name = "playbin"
@ -130,15 +94,7 @@ name = "rtpfecserver"
[[bin]]
name = "rtsp-server"
required-features = ["rtsp-server"]
[[bin]]
name = "rtsp-server-subclass"
required-features = ["rtsp-server"]
[[bin]]
name = "rtsp-server-custom-auth"
required-features = ["rtsp-server", "gst-rtsp-server/v1_22"]
required-features = ["gst-rtsp-server"]
[[bin]]
name = "tagsetter"
@ -148,13 +104,15 @@ name = "toc"
[[bin]]
name = "futures"
required-features = ["generic-futures"]
[[bin]]
name = "glib-futures"
required-features = ["glib-futures"]
[[bin]]
name = "rtsp-server-record"
required-features = ["rtsp-server-record"]
required-features = ["gst-rtsp-server-record"]
[[bin]]
name = "discoverer"
@ -163,47 +121,6 @@ name = "discoverer"
name = "pango-cairo"
required-features = ["pango-cairo"]
[[bin]]
name = "overlay-composition"
required-features = ["overlay-composition"]
[[bin]]
name = "overlay-composition-d2d"
required-features = ["windows"]
[[bin]]
name = "ges"
required-features = ["ges"]
[[bin]]
name = "glwindow"
required-features = ["gl"]
[[bin]]
name = "glfilter"
required-features = ["gl"]
[[bin]]
name = "subclass"
[[bin]]
name = "video_converter"
[[bin]]
name = "thumbnail"
required-features = ["image"]
[[bin]]
name = "fd_allocator"
required-features = ["allocators"]
[[bin]]
name = "cairo_compositor"
required-features = ["cairo-rs", "gst-video/v1_18"]
[[bin]]
name = "d3d11videosink"
required-features = ["windows"]
[[bin]]
name = "audio_multichannel_interleave"

View file

@ -1,22 +0,0 @@
#[cfg(feature = "gl")]
fn generate_gl_bindings() {
let dest = std::path::PathBuf::from(&std::env::var("OUT_DIR").unwrap());
let mut file = std::fs::File::create(dest.join("test_gl_bindings.rs")).unwrap();
gl_generator::Registry::new(
gl_generator::Api::Gles2,
(3, 0),
gl_generator::Profile::Core,
gl_generator::Fallbacks::All,
[],
)
.write_bindings(gl_generator::StructGenerator, &mut file)
.unwrap();
}
#[cfg(not(feature = "gl"))]
fn generate_gl_bindings() {}
fn main() {
println!("cargo:rerun-if-changed=build.rs");
generate_gl_bindings();
}

View file

@ -1,104 +1,113 @@
// This example demonstrates the use of the appsink element.
// It operates the following pipeline:
#[macro_use]
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_app as gst_app;
extern crate gstreamer_audio as gst_audio;
// {audiotestsrc} - {appsink}
extern crate glib;
// The application specifies what format it wants to handle. This format
// is applied by calling set_caps on the appsink. Now it's the audiotestsrc's
// task to provide this data format. If the element connected to the appsink's
// sink-pad were not able to provide what we ask them to, this would fail.
// This is the format we request:
// Audio / Signed 16bit / 1 channel / arbitrary sample rate
use std::i16;
use anyhow::Error;
extern crate byte_slice_cast;
use byte_slice_cast::*;
use derive_more::{Display, Error};
use gst::{element_error, prelude::*};
use std::error::Error as StdError;
use std::i16;
use std::i32;
extern crate failure;
use failure::Error;
#[macro_use]
extern crate failure_derive;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[derive(Debug, Fail)]
#[fail(display = "Missing element {}", _0)]
struct MissingElement(&'static str);
#[derive(Debug, Fail)]
#[fail(
display = "Received error from {}: {} (debug: {:?})",
src, error, debug
)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
#[cause]
cause: glib::Error,
}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("audiotestsrc").build()?;
let appsink = gst_app::AppSink::builder()
// Tell the appsink what format we want. It will then be the audiotestsrc's job to
// provide the format we request.
// This can be set after linking the two objects, because format negotiation between
// both elements will happen during pre-rolling of the pipeline.
.caps(
&gst_audio::AudioCapsBuilder::new_interleaved()
.format(gst_audio::AUDIO_FORMAT_S16)
.channels(1)
.build(),
)
.build();
let pipeline = gst::Pipeline::new(None);
let src =
gst::ElementFactory::make("audiotestsrc", None).ok_or(MissingElement("audiotestsrc"))?;
let sink = gst::ElementFactory::make("appsink", None).ok_or(MissingElement("appsink"))?;
pipeline.add_many([&src, appsink.upcast_ref()])?;
src.link(&appsink)?;
pipeline.add_many(&[&src, &sink])?;
src.link(&sink)?;
let appsink = sink
.dynamic_cast::<gst_app::AppSink>()
.expect("Sink element is expected to be an appsink!");
appsink.set_caps(&gst::Caps::new_simple(
"audio/x-raw",
&[
("format", &gst_audio::AUDIO_FORMAT_S16.to_string()),
("layout", &"interleaved"),
("channels", &(1i32)),
("rate", &gst::IntRange::<i32>::new(1, i32::MAX)),
],
));
// Getting data out of the appsink is done by setting callbacks on it.
// The appsink will then call those handlers, as soon as data is available.
appsink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
// Add a handler to the "new-sample" signal.
gst_app::AppSinkCallbacks::new()
.new_sample(|appsink| {
// Pull the sample in question out of the appsink's buffer.
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or_else(|| {
element_error!(
let sample = match appsink.pull_sample() {
None => return gst::FlowReturn::Eos,
Some(sample) => sample,
};
let buffer = if let Some(buffer) = sample.get_buffer() {
buffer
} else {
gst_element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to get buffer from appsink")
);
gst::FlowError::Error
})?;
return gst::FlowReturn::Error;
};
// At this point, buffer is only a reference to an existing memory region somewhere.
// When we want to access its content, we have to map it while requesting the required
// mode of access (read, read/write).
// This type of abstraction is necessary, because the buffer in question might not be
// on the machine's main memory itself, but rather in the GPU's memory.
// So mapping the buffer makes the underlying memory region accessible to us.
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
let map = buffer.map_readable().map_err(|_| {
element_error!(
let map = if let Some(map) = buffer.map_readable() {
map
} else {
gst_element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to map buffer readable")
);
gst::FlowError::Error
})?;
return gst::FlowReturn::Error;
};
// We know what format the data in the memory region has, since we requested
// it by setting the appsink's caps. So what we do here is interpret the
// memory region we mapped as an array of signed 16 bit integers.
let samples = map.as_slice_of::<i16>().map_err(|_| {
element_error!(
let samples = if let Ok(samples) = map.as_slice_of::<i16>() {
samples
} else {
gst_element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to interpret buffer as S16 PCM")
("Failed to interprete buffer as S16 PCM")
);
gst::FlowError::Error
})?;
return gst::FlowReturn::Error;
};
// For buffer (= chunk of samples), we calculate the root mean square:
// (https://en.wikipedia.org/wiki/Root_mean_square)
let sum: f64 = samples
.iter()
.map(|sample| {
@ -107,9 +116,9 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
})
.sum();
let rms = (sum / (samples.len() as f64)).sqrt();
println!("rms: {rms}");
println!("rms: {}", rms);
Ok(gst::FlowSuccess::Ok)
gst::FlowReturn::Ok
})
.build(),
);
@ -118,34 +127,34 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
}
fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
pipeline.set_state(gst::State::Playing).into_result()?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
}
.into());
pipeline.set_state(gst::State::Null).into_result()?;
Err(ErrorMessage {
src: err
.get_src()
.map(|s| s.get_path_string())
.unwrap_or_else(|| String::from("None")),
error: err.get_error().description().into(),
debug: err.get_debug(),
cause: err.get_error(),
})?;
}
_ => (),
}
}
pipeline.set_state(gst::State::Null)?;
pipeline.set_state(gst::State::Null).into_result()?;
Ok(())
}
@ -153,12 +162,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
fn example_main() {
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,29 +1,36 @@
// This example shows how to use the appsrc element.
// It operates the following pipeline:
// {appsrc} - {videoconvert} - {autovideosink}
// The application itself provides the video-data for the pipeline, by providing
// it in the callback of the appsrc element. Videoconvert makes sure that the
// format the application provides can be displayed by the autovideosink
// at the end of the pipeline.
// The application provides data of the following format:
// Video / BGRx (4 bytes) / 2 fps
use anyhow::Error;
use derive_more::{Display, Error};
extern crate gstreamer as gst;
use gst::prelude::*;
use gst_video::prelude::*;
extern crate gstreamer_app as gst_app;
extern crate gstreamer_video as gst_video;
extern crate glib;
use std::error::Error as StdError;
extern crate failure;
use failure::Error;
#[macro_use]
extern crate failure_derive;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[derive(Debug, Fail)]
#[fail(display = "Missing element {}", _0)]
struct MissingElement(&'static str);
#[derive(Debug, Fail)]
#[fail(
display = "Received error from {}: {} (debug: {:?})",
src, error, debug
)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
#[cause]
cause: glib::Error,
}
const WIDTH: usize = 320;
@ -32,26 +39,27 @@ const HEIGHT: usize = 240;
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
let pipeline = gst::Pipeline::default();
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("appsrc", None).ok_or(MissingElement("appsrc"))?;
let videoconvert =
gst::ElementFactory::make("videoconvert", None).ok_or(MissingElement("videoconvert"))?;
let sink =
gst::ElementFactory::make("autovideosink", None).ok_or(MissingElement("autovideosink"))?;
// Specify the format we want to provide as application into the pipeline
// by creating a video info with the given format and creating caps from it for the appsrc element.
let video_info =
gst_video::VideoInfo::builder(gst_video::VideoFormat::Bgrx, WIDTH as u32, HEIGHT as u32)
.fps(gst::Fraction::new(2, 1))
.build()
.expect("Failed to create video info");
pipeline.add_many(&[&src, &videoconvert, &sink])?;
gst::Element::link_many(&[&src, &videoconvert, &sink])?;
let appsrc = gst_app::AppSrc::builder()
.caps(&video_info.to_caps().unwrap())
.format(gst::Format::Time)
.build();
let appsrc = src
.dynamic_cast::<gst_app::AppSrc>()
.expect("Source element is expected to be an appsrc!");
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
let info = gst_video::VideoInfo::new(gst_video::VideoFormat::Bgrx, WIDTH as u32, HEIGHT as u32)
.fps(gst::Fraction::new(2, 1))
.build()
.expect("Failed to create video info");
pipeline.add_many([appsrc.upcast_ref(), &videoconvert, &sink])?;
gst::Element::link_many([appsrc.upcast_ref(), &videoconvert, &sink])?;
appsrc.set_caps(&info.to_caps().unwrap());
appsrc.set_property_format(gst::Format::Time);
// Our frame counter, that is stored in the mutable environment
// of the closure of the need-data callback
@ -62,64 +70,32 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// need-data callback.
let mut i = 0;
appsrc.set_callbacks(
// Since our appsrc element operates in pull mode (it asks us to provide data),
// we add a handler for the need-data callback and provide new data from there.
// In our case, we told gstreamer that we do 2 frames per second. While the
// buffers of all elements of the pipeline are still empty, this will be called
// a couple of times until all of them are filled. After this initial period,
// this handler will be called (on average) twice per second.
gst_app::AppSrcCallbacks::builder()
gst_app::AppSrcCallbacks::new()
.need_data(move |appsrc, _| {
// We only produce 100 frames
if i == 100 {
let _ = appsrc.end_of_stream();
return;
}
println!("Producing frame {i}");
println!("Producing frame {}", i);
let r = if i % 2 == 0 { 0 } else { 255 };
let g = if i % 3 == 0 { 0 } else { 255 };
let b = if i % 5 == 0 { 0 } else { 255 };
// Create the buffer that can hold exactly one BGRx frame.
let mut buffer = gst::Buffer::with_size(video_info.size()).unwrap();
let mut buffer = gst::Buffer::with_size(WIDTH * HEIGHT * 4).unwrap();
{
let buffer = buffer.get_mut().unwrap();
// For each frame we produce, we set the timestamp when it should be displayed
// (pts = presentation time stamp)
// The autovideosink will use this information to display the frame at the right time.
buffer.set_pts(i * 500 * gst::ClockTime::MSECOND);
buffer.set_pts(i * 500 * gst::MSECOND);
// At this point, buffer is only a reference to an existing memory region somewhere.
// When we want to access its content, we have to map it while requesting the required
// mode of access (read, read/write).
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
let mut vframe =
gst_video::VideoFrameRef::from_buffer_ref_writable(buffer, &video_info)
.unwrap();
let mut data = buffer.map_writable().unwrap();
// Remember some values from the frame for later usage
let width = vframe.width() as usize;
let height = vframe.height() as usize;
// Each line of the first plane has this many bytes
let stride = vframe.plane_stride()[0] as usize;
// Iterate over each of the height many lines of length stride
for line in vframe
.plane_data_mut(0)
.unwrap()
.chunks_exact_mut(stride)
.take(height)
{
// Iterate over each pixel of 4 bytes in that line
for pixel in line[..(4 * width)].chunks_exact_mut(4) {
pixel[0] = b;
pixel[1] = g;
pixel[2] = r;
pixel[3] = 0;
}
for p in data.as_mut_slice().chunks_mut(4) {
assert_eq!(p.len(), 4);
p[0] = b;
p[1] = g;
p[2] = r;
p[3] = 0;
}
}
@ -135,34 +111,34 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
}
fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
pipeline.set_state(gst::State::Playing).into_result()?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
}
.into());
pipeline.set_state(gst::State::Null).into_result()?;
Err(ErrorMessage {
src: err
.get_src()
.map(|s| s.get_path_string())
.unwrap_or_else(|| String::from("None")),
error: err.get_error().description().into(),
debug: err.get_debug(),
cause: err.get_error(),
})?;
}
_ => (),
}
}
pipeline.set_state(gst::State::Null)?;
pipeline.set_state(gst::State::Null).into_result()?;
Ok(())
}
@ -170,12 +146,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
fn example_main() {
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,153 +0,0 @@
// This example demonstrates how to mix multiple audio
// streams into a single output using the audiomixer element.
// In this case, we're mixing 4 stereo streams into a single 8 channel output.
use gst::prelude::*;
use std::env;
#[path = "../examples-common.rs"]
mod examples_common;
const TRACKS: i32 = 4;
fn create_source_and_link(pipeline: &gst::Pipeline, mixer: &gst::Element, track_number: i32) {
let freq = ((track_number + 1) * 1000) as f64;
let audiosrc = gst::ElementFactory::make("audiotestsrc")
.property("freq", freq)
.property("num-buffers", 2000)
.build()
.unwrap();
let caps = gst_audio::AudioCapsBuilder::new().channels(2).build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps)
.build()
.unwrap();
pipeline.add_many([&audiosrc, &capsfilter]).unwrap();
gst::Element::link_many([&audiosrc, &capsfilter]).unwrap();
let src_pad = capsfilter.static_pad("src").unwrap();
let mixer_pad = mixer.request_pad_simple("sink_%u").unwrap();
// audiomixer expects a mix-matrix set on each input pad,
// indicating which output channels our input should appear in.
// Rows => input channels, columns => output channels.
// Here each input channel will appear in exactly one output channel.
let mut mix_matrix: Vec<Vec<f32>> = vec![];
for i in 0..TRACKS {
if i == track_number {
mix_matrix.push(vec![1.0, 0.0]);
mix_matrix.push(vec![0.0, 1.0]);
} else {
mix_matrix.push(vec![0.0, 0.0]);
mix_matrix.push(vec![0.0, 0.0]);
}
}
let mut audiomixer_config = gst_audio::AudioConverterConfig::new();
audiomixer_config.set_mix_matrix(&mix_matrix);
mixer_pad.set_property("converter-config", audiomixer_config);
src_pad.link(&mixer_pad).unwrap();
}
fn example_main() {
gst::init().unwrap();
let args: Vec<_> = env::args().collect();
let output_file = if args.len() == 2 {
&args[1]
} else {
println!("Usage: audiomixer <output file>");
std::process::exit(-1);
};
let pipeline = gst::Pipeline::new();
let audiomixer = gst::ElementFactory::make("audiomixer").build().unwrap();
// Using an arbitrary layout of 4 stereo pairs.
let positions = [
gst_audio::AudioChannelPosition::FrontLeft,
gst_audio::AudioChannelPosition::FrontRight,
gst_audio::AudioChannelPosition::RearLeft,
gst_audio::AudioChannelPosition::RearRight,
gst_audio::AudioChannelPosition::SideLeft,
gst_audio::AudioChannelPosition::SideRight,
gst_audio::AudioChannelPosition::TopFrontLeft,
gst_audio::AudioChannelPosition::TopFrontRight,
];
let mask = gst_audio::AudioChannelPosition::positions_to_mask(&positions, true).unwrap();
let caps = gst_audio::AudioCapsBuilder::new()
.channels(positions.len() as i32)
.channel_mask(mask)
.build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps)
.build()
.unwrap();
let audioconvert = gst::ElementFactory::make("audioconvert").build().unwrap();
let audioresample = gst::ElementFactory::make("audioresample").build().unwrap();
let wavenc = gst::ElementFactory::make("wavenc").build().unwrap();
let sink = gst::ElementFactory::make("filesink")
.property("location", output_file)
.build()
.unwrap();
pipeline
.add_many([
&audiomixer,
&capsfilter,
&audioconvert,
&audioresample,
&wavenc,
&sink,
])
.unwrap();
gst::Element::link_many([
&audiomixer,
&capsfilter,
&audioconvert,
&audioresample,
&wavenc,
&sink,
])
.unwrap();
for i in 0..TRACKS {
create_source_and_link(&pipeline, &audiomixer, i);
}
let bus = pipeline.bus().expect("Pipeline without bus");
pipeline
.set_state(gst::State::Playing)
.expect("Unable to start pipeline");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
eprintln!(
"Error from {:?}: {} ({:?})",
msg.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to change pipeline state to NULL");
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
examples_common::run(example_main);
}

View file

@ -1,719 +0,0 @@
// This example demonstrates how to implement a custom compositor based on cairo.
#![allow(clippy::non_send_fields_in_send_ty)]
use anyhow::{Context, Error};
use gst::prelude::*;
use gst_base::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
// Our custom compositor element is defined in this module.
mod cairo_compositor {
use gst_base::subclass::prelude::*;
use gst_video::{prelude::*, subclass::prelude::*};
// In the imp submodule we include the actual implementation of the compositor.
mod imp {
use std::sync::Mutex;
use super::*;
// Settings of the compositor.
#[derive(Clone)]
struct Settings {
background_color: u32,
}
impl Default for Settings {
fn default() -> Self {
Self {
background_color: 0xff_00_00_00,
}
}
}
// This is the private data of our compositor.
#[derive(Default)]
pub struct CairoCompositor {
settings: Mutex<Settings>,
}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data.
#[glib::object_subclass]
impl ObjectSubclass for CairoCompositor {
const NAME: &'static str = "CairoCompositor";
type Type = super::CairoCompositor;
type ParentType = gst_video::VideoAggregator;
type Interfaces = (gst::ChildProxy,);
}
// Implementation of glib::Object virtual methods.
impl ObjectImpl for CairoCompositor {
// Specification of the compositor properties.
// In this case a single property for configuring the background color of the
// composition.
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: std::sync::OnceLock<Vec<glib::ParamSpec>> =
std::sync::OnceLock::new();
PROPERTIES.get_or_init(|| {
vec![glib::ParamSpecUInt::builder("background-color")
.nick("Background Color")
.blurb("Background color as 0xRRGGBB")
.default_value(Settings::default().background_color)
.build()]
})
}
// Called by the application whenever the value of a property should be changed.
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
let mut settings = self.settings.lock().unwrap();
match pspec.name() {
"background-color" => {
settings.background_color = value.get().unwrap();
}
_ => unimplemented!(),
};
}
// Called by the application whenever the value of a property should be retrieved.
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
let settings = self.settings.lock().unwrap();
match pspec.name() {
"background-color" => settings.background_color.to_value(),
_ => unimplemented!(),
}
}
}
// Implementation of gst::Object virtual methods.
impl GstObjectImpl for CairoCompositor {}
// Implementation of gst::Element virtual methods.
impl ElementImpl for CairoCompositor {
// The element specific metadata. This information is what is visible from
// gst-inspect-1.0 and can also be programmatically retrieved from the gst::Registry
// after initial registration without having to load the plugin in memory.
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: std::sync::OnceLock<gst::subclass::ElementMetadata> =
std::sync::OnceLock::new();
Some(ELEMENT_METADATA.get_or_init(|| {
gst::subclass::ElementMetadata::new(
"Cairo Compositor",
"Compositor/Video",
"Cairo based compositor",
"Sebastian Dröge <sebastian@centricular.com>",
)
}))
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: std::sync::OnceLock<Vec<gst::PadTemplate>> =
std::sync::OnceLock::new();
PAD_TEMPLATES.get_or_init(|| {
// Create pad templates for our sink and source pad. These are later used for
// actually creating the pads and beforehand already provide information to
// GStreamer about all possible pads that could exist for this type.
// On all pads we can only handle BGRx.
let caps = gst_video::VideoCapsBuilder::new()
.format(gst_video::VideoFormat::Bgrx)
.pixel_aspect_ratio((1, 1).into())
.build();
vec![
// The src pad template must be named "src" for aggregator
// and always be there.
gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap(),
// The sink pad template must be named "sink_%u" by default for aggregator
// and be requested by the application.
//
// Also declare here that it should be a pad with our custom compositor pad
// type that is defined further below.
gst::PadTemplate::with_gtype(
"sink_%u",
gst::PadDirection::Sink,
gst::PadPresence::Request,
&caps,
super::CairoCompositorPad::static_type(),
)
.unwrap(),
]
})
}
// Notify via the child proxy interface whenever a new pad is added or removed.
fn request_new_pad(
&self,
templ: &gst::PadTemplate,
name: Option<&str>,
caps: Option<&gst::Caps>,
) -> Option<gst::Pad> {
let element = self.obj();
let pad = self.parent_request_new_pad(templ, name, caps)?;
element.child_added(&pad, &pad.name());
Some(pad)
}
fn release_pad(&self, pad: &gst::Pad) {
let element = self.obj();
element.child_removed(pad, &pad.name());
self.parent_release_pad(pad);
}
}
// Implementation of gst_base::Aggregator virtual methods.
impl AggregatorImpl for CairoCompositor {
// Called whenever a query arrives at the given sink pad of the compositor.
fn sink_query(
&self,
aggregator_pad: &gst_base::AggregatorPad,
query: &mut gst::QueryRef,
) -> bool {
use gst::QueryViewMut;
// We can accept any input caps that match the pad template. By default
// videoaggregator only allows caps that have the same format as the output.
match query.view_mut() {
QueryViewMut::Caps(q) => {
let caps = aggregator_pad.pad_template_caps();
let filter = q.filter();
let caps = if let Some(filter) = filter {
filter.intersect_with_mode(&caps, gst::CapsIntersectMode::First)
} else {
caps
};
q.set_result(&caps);
true
}
QueryViewMut::AcceptCaps(q) => {
let caps = q.caps();
let template_caps = aggregator_pad.pad_template_caps();
let res = caps.is_subset(&template_caps);
q.set_result(res);
true
}
_ => self.parent_sink_query(aggregator_pad, query),
}
}
}
// Implementation of gst_video::VideoAggregator virtual methods.
impl VideoAggregatorImpl for CairoCompositor {
// Called by videoaggregator whenever the output format should be determined.
fn find_best_format(
&self,
_downstream_caps: &gst::Caps,
) -> Option<(gst_video::VideoInfo, bool)> {
// Let videoaggregator select whatever format downstream wants.
//
// By default videoaggregator doesn't allow a different format than the input
// format.
None
}
// Called whenever a new output frame should be produced. At this point, each pad has
// either no frame queued up at all or the frame that should be used for this output
// time.
fn aggregate_frames(
&self,
token: &gst_video::subclass::AggregateFramesToken,
outbuf: &mut gst::BufferRef,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let element = self.obj();
let pads = element.sink_pads();
// Map the output frame writable.
let out_info = element.video_info().unwrap();
let mut out_frame =
gst_video::VideoFrameRef::from_buffer_ref_writable(outbuf, &out_info).unwrap();
// And then create a cairo context for drawing on the output frame.
with_frame(&mut out_frame, |ctx| {
let settings = self.settings.lock().unwrap().clone();
// First of all, clear the background.
let bg = (
((settings.background_color >> 16) & 0xff) as f64 / 255.0,
((settings.background_color >> 8) & 0xff) as f64 / 255.0,
(settings.background_color & 0xff) as f64 / 255.0,
);
ctx.set_operator(cairo::Operator::Source);
ctx.set_source_rgb(bg.0, bg.1, bg.2);
ctx.paint().unwrap();
ctx.set_operator(cairo::Operator::Over);
// Then for each pad (in zorder), draw it according to the current settings.
for pad in pads {
let pad = pad.downcast_ref::<CairoCompositorPad>().unwrap();
let settings = pad.imp().settings.lock().unwrap().clone();
if settings.alpha <= 0.0 || settings.scale <= 0.0 {
continue;
}
let frame = match pad.prepared_frame(token) {
Some(frame) => frame,
None => continue,
};
ctx.save().unwrap();
ctx.translate(settings.xpos, settings.ypos);
ctx.scale(settings.scale, settings.scale);
ctx.translate(frame.width() as f64 / 2.0, frame.height() as f64 / 2.0);
ctx.rotate(settings.rotate / 360.0 * 2.0 * std::f64::consts::PI);
ctx.translate(
-(frame.width() as f64 / 2.0),
-(frame.height() as f64 / 2.0),
);
paint_frame(ctx, &frame, settings.alpha);
ctx.restore().unwrap();
}
});
Ok(gst::FlowSuccess::Ok)
}
}
// Implementation of gst::ChildProxy virtual methods.
//
// This allows accessing the pads and their properties from e.g. gst-launch.
impl ChildProxyImpl for CairoCompositor {
fn children_count(&self) -> u32 {
let object = self.obj();
object.num_pads() as u32
}
fn child_by_name(&self, name: &str) -> Option<glib::Object> {
let object = self.obj();
object
.pads()
.into_iter()
.find(|p| p.name() == name)
.map(|p| p.upcast())
}
fn child_by_index(&self, index: u32) -> Option<glib::Object> {
let object = self.obj();
object
.pads()
.into_iter()
.nth(index as usize)
.map(|p| p.upcast())
}
}
}
// Creates a cairo context around the given video frame and then calls the closure to operate
// on the cairo context. Ensures that no references to the video frame stay inside cairo.
fn with_frame<F: FnOnce(&cairo::Context)>(
frame: &mut gst_video::VideoFrameRef<&mut gst::BufferRef>,
func: F,
) {
// SAFETY: This is the one and only surface reference and it is dropped at the end, meaning
// nothing from cairo is referencing the frame data anymore.
unsafe {
use glib::translate::*;
let surface = cairo::ImageSurface::create_for_data_unsafe(
frame.plane_data_mut(0).unwrap().as_mut_ptr(),
cairo::Format::Rgb24,
frame.width() as i32,
frame.height() as i32,
frame.plane_stride()[0],
)
.unwrap();
let ctx = cairo::Context::new(&surface).unwrap();
func(&ctx);
drop(ctx);
surface.finish();
assert_eq!(
cairo::ffi::cairo_surface_get_reference_count(surface.to_glib_none().0),
1,
);
}
}
// Paints the frame with the given alpha on the cairo context at the current origin.
// Ensures that no references to the video frame stay inside cairo.
fn paint_frame(
ctx: &cairo::Context,
frame: &gst_video::VideoFrameRef<&gst::BufferRef>,
alpha: f64,
) {
// SAFETY: This is the one and only surface reference and it is dropped at the end, meaning
// nothing from cairo is referencing the frame data anymore.
//
// Also nothing is ever writing to the surface from here.
unsafe {
use glib::translate::*;
let surface = cairo::ImageSurface::create_for_data_unsafe(
frame.plane_data(0).unwrap().as_ptr() as *mut u8,
cairo::Format::Rgb24,
frame.width() as i32,
frame.height() as i32,
frame.plane_stride()[0],
)
.unwrap();
ctx.set_source_surface(&surface, 0.0, 0.0).unwrap();
ctx.paint_with_alpha(alpha).unwrap();
ctx.set_source_rgb(0.0, 0.0, 0.0);
assert_eq!(
cairo::ffi::cairo_surface_get_reference_count(surface.to_glib_none().0),
1,
);
}
}
// This here defines the public interface of our element and implements
// the corresponding traits so that it behaves like any other gst::Element.
glib::wrapper! {
pub struct CairoCompositor(ObjectSubclass<imp::CairoCompositor>) @extends gst_video::VideoAggregator, gst_base::Aggregator, gst::Element, gst::Object, @implements gst::ChildProxy;
}
impl CairoCompositor {
// Creates a new instance of our compositor with the given name.
pub fn new(name: Option<&str>) -> Self {
glib::Object::builder().property("name", name).build()
}
}
// In the imp submodule we include the implementation of the pad subclass.
//
// This doesn't implement any additional logic but only provides properties for configuring the
// appearance of the stream corresponding to this pad and the storage of the property values.
mod imp_pad {
use std::sync::Mutex;
use super::*;
// Settings of our pad.
#[derive(Clone)]
pub(super) struct Settings {
pub(super) alpha: f64,
pub(super) scale: f64,
pub(super) rotate: f64,
pub(super) xpos: f64,
pub(super) ypos: f64,
}
impl Default for Settings {
fn default() -> Self {
Self {
alpha: 1.0,
scale: 1.0,
rotate: 0.0,
xpos: 0.0,
ypos: 0.0,
}
}
}
// This is the private data of our pad.
#[derive(Default)]
pub struct CairoCompositorPad {
pub(super) settings: Mutex<Settings>,
}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data.
#[glib::object_subclass]
impl ObjectSubclass for CairoCompositorPad {
const NAME: &'static str = "CairoCompositorPad";
type Type = super::CairoCompositorPad;
type ParentType = gst_video::VideoAggregatorPad;
}
// Implementation of glib::Object virtual methods.
impl ObjectImpl for CairoCompositorPad {
// Specification of the compositor pad properties.
// In this case there are various properties for defining the position and otherwise
// the appearance of the stream corresponding to this pad.
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: std::sync::OnceLock<Vec<glib::ParamSpec>> =
std::sync::OnceLock::new();
PROPERTIES.get_or_init(|| {
vec![
glib::ParamSpecDouble::builder("alpha")
.nick("Alpha")
.blurb("Alpha value of the input")
.minimum(0.0)
.maximum(1.0)
.default_value(Settings::default().alpha)
.build(),
glib::ParamSpecDouble::builder("scale")
.nick("Scale")
.blurb("Scale factor of the input")
.minimum(0.0)
.maximum(f64::MAX)
.default_value(Settings::default().scale)
.build(),
glib::ParamSpecDouble::builder("rotate")
.nick("Rotate")
.blurb("Rotation of the input")
.minimum(0.0)
.maximum(360.0)
.default_value(Settings::default().rotate)
.build(),
glib::ParamSpecDouble::builder("xpos")
.nick("X Position")
.blurb("Horizontal position of the input")
.minimum(0.0)
.maximum(f64::MAX)
.default_value(Settings::default().xpos)
.build(),
glib::ParamSpecDouble::builder("ypos")
.nick("Y Position")
.blurb("Vertical position of the input")
.minimum(0.0)
.maximum(f64::MAX)
.default_value(Settings::default().ypos)
.build(),
]
})
}
// Called by the application whenever the value of a property should be changed.
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
let mut settings = self.settings.lock().unwrap();
match pspec.name() {
"alpha" => {
settings.alpha = value.get().unwrap();
}
"scale" => {
settings.scale = value.get().unwrap();
}
"rotate" => {
settings.rotate = value.get().unwrap();
}
"xpos" => {
settings.xpos = value.get().unwrap();
}
"ypos" => {
settings.ypos = value.get().unwrap();
}
_ => unimplemented!(),
};
}
// Called by the application whenever the value of a property should be retrieved.
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
let settings = self.settings.lock().unwrap();
match pspec.name() {
"alpha" => settings.alpha.to_value(),
"scale" => settings.scale.to_value(),
"rotate" => settings.rotate.to_value(),
"xpos" => settings.xpos.to_value(),
"ypos" => settings.ypos.to_value(),
_ => unimplemented!(),
}
}
}
// Implementation of gst::Object virtual methods.
impl GstObjectImpl for CairoCompositorPad {}
// Implementation of gst::Pad virtual methods.
impl PadImpl for CairoCompositorPad {}
// Implementation of gst_base::AggregatorPad virtual methods.
impl AggregatorPadImpl for CairoCompositorPad {}
// Implementation of gst_video::VideoAggregatorPad virtual methods.
impl VideoAggregatorPadImpl for CairoCompositorPad {}
}
// This here defines the public interface of our element and implements
// the corresponding traits so that it behaves like any other gst::Pad.
glib::wrapper! {
pub struct CairoCompositorPad(ObjectSubclass<imp_pad::CairoCompositorPad>) @extends gst_video::VideoAggregatorPad, gst_base::AggregatorPad, gst::Pad, gst::Object;
}
}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
// Create our pipeline with the compositor and two input streams.
let pipeline = gst::Pipeline::default();
let src1 = gst::ElementFactory::make("videotestsrc")
.property_from_str("pattern", "ball")
.build()?;
let src2 = gst::ElementFactory::make("videotestsrc")
.property_from_str("pattern", "smpte")
.build()?;
let comp = cairo_compositor::CairoCompositor::new(None);
let conv = gst::ElementFactory::make("videoconvert").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
comp.set_property("background-color", 0xff_33_33_33u32);
pipeline.add_many([&src1, &src2, comp.upcast_ref(), &conv, &sink])?;
// Link everything together.
src1.link_filtered(
&comp,
&gst::Caps::builder("video/x-raw")
.field("width", 320i32)
.field("height", 240i32)
.build(),
)
.context("Linking source 1")?;
src2.link_filtered(
&comp,
&gst::Caps::builder("video/x-raw")
.field("width", 320i32)
.field("height", 240i32)
.build(),
)
.context("Linking source 2")?;
comp.link_filtered(
&conv,
&gst::Caps::builder("video/x-raw")
.field("width", 1280i32)
.field("height", 720i32)
.build(),
)
.context("Linking converter")?;
conv.link(&sink).context("Linking sink")?;
// Change positions etc of both inputs based on a timer
let xmax = 1280.0 - 320.0f64;
let ymax = 720.0 - 240.0f64;
let sink_0 = comp.static_pad("sink_0").unwrap();
sink_0.set_property("xpos", 0.0f64);
sink_0.set_property("ypos", 0.0f64);
let sink_1 = comp.static_pad("sink_1").unwrap();
sink_1.set_property("xpos", xmax);
sink_1.set_property("ypos", ymax);
comp.set_emit_signals(true);
comp.connect_samples_selected(move |_agg, _seg, pts, _dts, _dur, _info| {
// Position and rotation period is 10s.
let pos = (pts.unwrap().nseconds() % gst::ClockTime::from_seconds(10).nseconds()) as f64
/ gst::ClockTime::from_seconds(10).nseconds() as f64;
let xpos = (1.0 + f64::sin(2.0 * std::f64::consts::PI * pos)) * xmax / 2.0;
let ypos = (1.0 + f64::cos(2.0 * std::f64::consts::PI * pos)) * ymax / 2.0;
sink_0.set_property("xpos", xpos);
sink_0.set_property("ypos", ypos);
let xpos = (1.0 + f64::cos(2.0 * std::f64::consts::PI * pos)) * xmax / 2.0;
let ypos = (1.0 + f64::sin(2.0 * std::f64::consts::PI * pos)) * ymax / 2.0;
sink_1.set_property("xpos", xpos);
sink_1.set_property("ypos", ypos);
sink_0.set_property("rotate", pos * 360.0);
sink_1.set_property("rotate", 360.0 - pos * 360.0);
// Alpha period is 2s.
let pos = (pts.unwrap().nseconds() % gst::ClockTime::from_seconds(2).nseconds()) as f64
/ gst::ClockTime::from_seconds(2).nseconds() as f64;
sink_0.set_property(
"alpha",
(1.0 + f64::sin(2.0 * std::f64::consts::PI * pos)) / 2.0,
);
sink_1.set_property(
"alpha",
(1.0 + f64::cos(2.0 * std::f64::consts::PI * pos)) / 2.0,
);
// Scale period is 20s.
let pos = (pts.unwrap().nseconds() % gst::ClockTime::from_seconds(20).nseconds()) as f64
/ gst::ClockTime::from_seconds(20).nseconds() as f64;
sink_0.set_property("scale", pos);
sink_1.set_property("scale", 1.0 - pos);
});
Ok(pipeline)
}
// Start the pipeline and collect messages from the bus until an error or EOS.
fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.expect("Pipeline without bus. Shouldn't happen!");
let mut bus_stream = bus.stream();
let main_context = glib::MainContext::default();
// Storage for any error so we can report it later.
let mut error = None;
main_context.block_on(async {
use futures::prelude::*;
while let Some(msg) = bus_stream.next().await {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
error = Some(anyhow::anyhow!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
));
break;
}
_ => (),
}
}
});
// In case of error, report to the caller.
if let Some(error) = error {
let _ = pipeline.set_state(gst::State::Null);
return Err(error);
}
pipeline.set_state(gst::State::Null)?;
Ok(())
}
fn example_main() -> Result<(), Error> {
create_pipeline().and_then(main_loop)
}
fn main() -> Result<(), Error> {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically).
examples_common::run(example_main)
}

View file

@ -1,180 +0,0 @@
// This example demonstrates how custom application-specific events can be
// created, sent in a pipeline and retrieved later.
//
// It uses a queue that contains several seconds worth of data. When the event
// is sent on the sink pad, we expect to see it emerge on the other side when
// the data in front of it has exited.
use gst::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug)]
pub struct ExampleCustomEvent {
pub send_eos: bool,
}
impl ExampleCustomEvent {
const EVENT_NAME: &'static str = "example-custom-event";
#[allow(clippy::new_ret_no_self)]
pub fn new(send_eos: bool) -> gst::Event {
let s = gst::Structure::builder(Self::EVENT_NAME)
.field("send_eos", send_eos)
.build();
gst::event::CustomDownstream::new(s)
}
pub fn parse(ev: &gst::EventRef) -> Option<ExampleCustomEvent> {
match ev.view() {
gst::EventView::CustomDownstream(e) => {
let s = match e.structure() {
Some(s) if s.name() == Self::EVENT_NAME => s,
_ => return None, // No structure in this event, or the name didn't match
};
let send_eos = s.get::<bool>("send_eos").unwrap();
Some(ExampleCustomEvent { send_eos })
}
_ => None, // Not a custom event
}
}
}
fn example_main() {
gst::init().unwrap();
let main_loop = glib::MainLoop::new(None, false);
// This creates a pipeline by parsing the gst-launch pipeline syntax.
let pipeline = gst::parse::launch(
"audiotestsrc name=src ! queue max-size-time=2000000000 ! fakesink name=sink sync=true",
)
.unwrap();
let bus = pipeline.bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
let sink = pipeline.by_name("sink").unwrap();
let sinkpad = sink.static_pad("sink").unwrap();
// Need to move a new reference into the closure.
// !!ATTENTION!!:
// It might seem appealing to use pipeline.clone() here, because that greatly
// simplifies the code within the callback. What this actually does, however, is creating
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
// which is in turn stored in another strong reference on the pipeline is creating a
// reference cycle.
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
let pipeline_weak = pipeline.downgrade();
// Add a pad probe on the sink pad and catch the custom event we sent, then send
// an EOS event on the pipeline.
sinkpad.add_probe(gst::PadProbeType::EVENT_DOWNSTREAM, move |_, probe_info| {
let Some(event) = probe_info.event() else {
return gst::PadProbeReturn::Ok;
};
let Some(custom_event) = ExampleCustomEvent::parse(event) else {
return gst::PadProbeReturn::Ok;
};
let Some(pipeline) = pipeline_weak.upgrade() else {
return gst::PadProbeReturn::Ok;
};
if custom_event.send_eos {
/* Send EOS event to shut down the pipeline, but from an async callback, as we're
* in a pad probe blocking the stream thread here... */
println!("Got custom event with send_eos=true. Sending EOS");
let ev = gst::event::Eos::new();
let pipeline_weak = pipeline_weak.clone();
pipeline.call_async(move |_| {
if let Some(pipeline) = pipeline_weak.upgrade() {
pipeline.send_event(ev);
}
});
} else {
println!("Got custom event, with send_eos=false. Ignoring");
}
gst::PadProbeReturn::Ok
});
println!("Pipeline is running. Waiting 2 seconds");
/* Send 2 events into the pipeline - one with send_eos = false, followed
* by 1 with send_eos = true. Use a timeout event to send them in a few seconds
* when the pipeline has filled. */
for (i, send_eos) in [false, true].iter().enumerate() {
let pipeline_weak = pipeline.downgrade();
glib::timeout_add_seconds(2 + i as u32, move || {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let Some(pipeline) = pipeline_weak.upgrade() else {
return glib::ControlFlow::Break;
};
println!("Sending custom event to the pipeline with send_eos={send_eos}");
let ev = ExampleCustomEvent::new(*send_eos);
if !pipeline.send_event(ev) {
println!("Warning: Failed to send custom event");
}
// Remove this handler, the pipeline will shutdown once our pad probe catches the custom
// event and sends EOS
glib::ControlFlow::Break
});
}
let main_loop_clone = main_loop.clone();
// This sets the bus's signal handler (don't be mislead by the "add", there can only be one).
// Every message from the bus is passed through this function. Its returnvalue determines
// whether the handler wants to be called again. If glib::ControlFlow::Break is returned, the
// handler is removed and will never be called again. The mainloop still runs though.
let _bus_watch = bus
.add_watch(move |_, msg| {
use gst::MessageView;
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => {
println!("received eos");
// An EndOfStream event was sent to the pipeline, so we tell our main loop
// to stop execution here.
main_loop.quit()
}
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
main_loop.quit();
}
_ => (),
};
// Tell the mainloop to continue executing this callback.
glib::ControlFlow::Continue
})
.expect("Failed to add bus watch");
// Operate GStreamer's bus, facilitating GLib's mainloop here.
// This function call will block until you tell the mainloop to quit
// (see above for how to do this).
main_loop.run();
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
examples_common::run(example_main);
}

View file

@ -1,290 +0,0 @@
// This example demonstrates how custom GstMeta can be defined and used on buffers.
//
// It simply attaches a GstMeta with a Rust String to buffers that are passed into
// an appsrc and retrieves them again from an appsink.
#![allow(clippy::non_send_fields_in_send_ty)]
use gst::{element_error, prelude::*};
#[path = "../examples-common.rs"]
mod examples_common;
mod custom_meta {
use std::{fmt, mem};
use gst::prelude::*;
// Public Rust type for the custom meta.
#[repr(transparent)]
pub struct CustomMeta(imp::CustomMeta);
// Metas must be Send+Sync.
unsafe impl Send for CustomMeta {}
unsafe impl Sync for CustomMeta {}
impl CustomMeta {
// Add a new custom meta to the buffer with the given label.
pub fn add(
buffer: &mut gst::BufferRef,
label: String,
) -> gst::MetaRefMut<Self, gst::meta::Standalone> {
unsafe {
// Manually dropping because gst_buffer_add_meta() takes ownership of the
// content of the struct.
let mut params = mem::ManuallyDrop::new(imp::CustomMetaParams { label });
// The label is passed through via the params to custom_meta_init().
let meta = gst::ffi::gst_buffer_add_meta(
buffer.as_mut_ptr(),
imp::custom_meta_get_info(),
&mut *params as *mut imp::CustomMetaParams as glib::ffi::gpointer,
) as *mut imp::CustomMeta;
Self::from_mut_ptr(buffer, meta)
}
}
// Retrieve the stored label.
pub fn label(&self) -> &str {
self.0.label.as_str()
}
}
// Trait to allow using the gst::Buffer API with this meta.
unsafe impl MetaAPI for CustomMeta {
type GstType = imp::CustomMeta;
fn meta_api() -> glib::Type {
imp::custom_meta_api_get_type()
}
}
impl fmt::Debug for CustomMeta {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("CustomMeta")
.field("label", &self.label())
.finish()
}
}
// Actual unsafe implementation of the meta.
mod imp {
use std::{mem, ptr};
use glib::translate::*;
pub(super) struct CustomMetaParams {
pub label: String,
}
// This is the C type that is actually stored as meta inside the buffers.
#[repr(C)]
pub struct CustomMeta {
parent: gst::ffi::GstMeta,
pub(super) label: String,
}
// Function to register the meta API and get a type back.
pub(super) fn custom_meta_api_get_type() -> glib::Type {
static TYPE: std::sync::OnceLock<glib::Type> = std::sync::OnceLock::new();
*TYPE.get_or_init(|| unsafe {
let t = glib::Type::from_glib(gst::ffi::gst_meta_api_type_register(
b"MyCustomMetaAPI\0".as_ptr() as *const _,
// We provide no tags here as our meta is just a label and does
// not refer to any specific aspect of the buffer.
[ptr::null::<std::os::raw::c_char>()].as_ptr() as *mut *const _,
));
assert_ne!(t, glib::Type::INVALID);
t
})
}
// Initialization function for our meta. This needs to ensure all fields are correctly
// initialized. They will contain random memory before.
unsafe extern "C" fn custom_meta_init(
meta: *mut gst::ffi::GstMeta,
params: glib::ffi::gpointer,
_buffer: *mut gst::ffi::GstBuffer,
) -> glib::ffi::gboolean {
assert!(!params.is_null());
let meta = &mut *(meta as *mut CustomMeta);
let params = ptr::read(params as *const CustomMetaParams);
// Need to initialize all our fields correctly here.
ptr::write(&mut meta.label, params.label);
true.into_glib()
}
// Free function for our meta. This needs to free/drop all memory we allocated.
unsafe extern "C" fn custom_meta_free(
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
) {
let meta = &mut *(meta as *mut CustomMeta);
// Need to free/drop all our fields here.
ptr::drop_in_place(&mut meta.label);
}
// Transform function for our meta. This needs to get it from the old buffer to the new one
// in a way that is compatible with the transformation type. In this case we just always
// copy it over.
unsafe extern "C" fn custom_meta_transform(
dest: *mut gst::ffi::GstBuffer,
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
_type_: glib::ffi::GQuark,
_data: glib::ffi::gpointer,
) -> glib::ffi::gboolean {
let meta = &*(meta as *mut CustomMeta);
// We simply copy over our meta here. Other metas might have to look at the type
// and do things conditional on that, or even just drop the meta.
super::CustomMeta::add(gst::BufferRef::from_mut_ptr(dest), meta.label.clone());
true.into_glib()
}
// Register the meta itself with its functions.
pub(super) fn custom_meta_get_info() -> *const gst::ffi::GstMetaInfo {
struct MetaInfo(ptr::NonNull<gst::ffi::GstMetaInfo>);
unsafe impl Send for MetaInfo {}
unsafe impl Sync for MetaInfo {}
static META_INFO: std::sync::OnceLock<MetaInfo> = std::sync::OnceLock::new();
META_INFO
.get_or_init(|| unsafe {
MetaInfo(
ptr::NonNull::new(gst::ffi::gst_meta_register(
custom_meta_api_get_type().into_glib(),
b"MyCustomMeta\0".as_ptr() as *const _,
mem::size_of::<CustomMeta>(),
Some(custom_meta_init),
Some(custom_meta_free),
Some(custom_meta_transform),
) as *mut gst::ffi::GstMetaInfo)
.expect("Failed to register meta API"),
)
})
.0
.as_ptr()
}
}
}
fn example_main() {
gst::init().unwrap();
// This creates a pipeline with appsrc and appsink.
let pipeline = gst::Pipeline::default();
let appsrc = gst_app::AppSrc::builder().build();
let appsink = gst_app::AppSink::builder().build();
pipeline.add(&appsrc).unwrap();
pipeline.add(&appsink).unwrap();
appsrc.link(&appsink).unwrap();
// Our buffer counter, that is stored in the mutable environment
// of the closure of the need-data callback.
let mut i = 0;
appsrc.set_callbacks(
gst_app::AppSrcCallbacks::builder()
.need_data(move |appsrc, _| {
// We only produce 5 buffers.
if i == 5 {
let _ = appsrc.end_of_stream();
return;
}
println!("Producing buffer {i}");
// Add a custom meta with a label to this buffer.
let mut buffer = gst::Buffer::new();
{
let buffer = buffer.get_mut().unwrap();
custom_meta::CustomMeta::add(buffer, format!("This is buffer {i}"));
}
i += 1;
// appsrc already handles the error here for us.
let _ = appsrc.push_buffer(buffer);
})
.build(),
);
// Getting data out of the appsink is done by setting callbacks on it.
// The appsink will then call those handlers, as soon as data is available.
appsink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
// Add a handler to the "new-sample" signal.
.new_sample(|appsink| {
// Pull the sample in question out of the appsink's buffer.
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or_else(|| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to get buffer from appsink")
);
gst::FlowError::Error
})?;
// Retrieve the custom meta from the buffer and print it.
let meta = buffer
.meta::<custom_meta::CustomMeta>()
.expect("No custom meta found");
println!("Got buffer with label: {}", meta.label());
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
// Actually start the pipeline.
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
let bus = pipeline
.bus()
.expect("Pipeline without bus. Shouldn't happen!");
// And run until EOS or an error happened.
for msg in bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
}
}
// Finally shut down everything.
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically).
examples_common::run(example_main);
}

View file

@ -1,364 +0,0 @@
// This example demonstrates the use of the d3d11videosink's "present"
// signal and the use of Direct2D/DirectWrite APIs in Rust.
//
// Application can perform various hardware-accelerated 2D graphics operation
// (e.g., like cairo can support) and text rendering via the Windows APIs.
// In this example, 2D graphics operation and text rendering will happen
// directly to the on the DXGI swapchain's backbuffer via Windows API in
// strictly zero-copy manner
use std::{
collections::VecDeque,
sync::{Arc, Mutex},
time::SystemTime,
};
use gst::{glib, prelude::*};
use windows::{
core::*,
Win32::Graphics::{
Direct2D::{Common::*, *},
Direct3D11::*,
DirectWrite::*,
Dxgi::{Common::*, *},
},
};
struct OverlayContext {
d2d_factory: ID2D1Factory,
dwrite_factory: IDWriteFactory,
text_format: IDWriteTextFormat,
texture_desc: D3D11_TEXTURE2D_DESC,
text_layout: Option<IDWriteTextLayout>,
timestamp_queue: VecDeque<SystemTime>,
avg_fps: f32,
display_fps: f32,
font_size: f32,
}
fn create_overlay_context() -> Arc<Mutex<OverlayContext>> {
// Lots of DirectX APIs are marked as unsafe but the below operations
// are not expected to be failed unless GPU hang or device remove condition
// happens
let d2d_factory = unsafe {
D2D1CreateFactory::<ID2D1Factory>(D2D1_FACTORY_TYPE_MULTI_THREADED, None).unwrap()
};
let dwrite_factory =
unsafe { DWriteCreateFactory::<IDWriteFactory>(DWRITE_FACTORY_TYPE_SHARED).unwrap() };
// Font size can be updated later
let text_format = unsafe {
dwrite_factory
.CreateTextFormat(
w!("Consolas"),
None,
DWRITE_FONT_WEIGHT_REGULAR,
DWRITE_FONT_STYLE_NORMAL,
DWRITE_FONT_STRETCH_NORMAL,
12f32,
w!("en-us"),
)
.unwrap()
};
Arc::new(Mutex::new(OverlayContext {
d2d_factory,
dwrite_factory,
text_format,
texture_desc: D3D11_TEXTURE2D_DESC::default(),
text_layout: None,
timestamp_queue: VecDeque::with_capacity(10),
avg_fps: 0f32,
display_fps: 0f32,
font_size: 12f32,
}))
}
fn main() -> Result<()> {
gst::init().unwrap();
let args: Vec<String> = std::env::args().collect();
if args.len() != 2 {
println!("URI must be specified");
return Ok(());
}
let main_loop = glib::MainLoop::new(None, false);
let overlay_context = create_overlay_context();
let overlay_context_weak = Arc::downgrade(&overlay_context);
// Needs BGRA or RGBA swapchain for D2D interop,
// and "present" signal must be explicitly enabled
let videosink = gst::ElementFactory::make("d3d11videosink")
.property("emit-present", true)
.property_from_str("display-format", "DXGI_FORMAT_B8G8R8A8_UNORM")
.build()
.unwrap();
// Listen "present" signal and draw overlay from the callback
// Required operations here:
// 1) Gets IDXGISurface and ID3D11Texture2D interface from
// given ID3D11RenderTargetView COM object
// - ID3D11Texture2D: To get texture resolution
// - IDXGISurface: To create Direct2D render target
// 2) Creates or reuses IDWriteTextLayout interface
// - This object represents text layout we want to draw on render target
// 3) Draw rectangle (overlay background) and text on render target
//
// NOTE: ID2D1Factory, IDWriteFactory, IDWriteTextFormat, and
// IDWriteTextLayout objects are device-independent. Which can be created
// earlier instead of creating them in the callback.
// But ID2D1RenderTarget is a device-dependent resource.
// The client should not hold the d2d render target object outside of
// this callback scope because the resource must be cleared before
// releasing/resizing DXGI swapchain.
videosink.connect_closure(
"present",
false,
glib::closure!(move |_sink: &gst::Element,
_device: &gst::Object,
rtv_raw: glib::Pointer| {
let overlay_context = overlay_context_weak.upgrade().unwrap();
let mut context = overlay_context.lock().unwrap();
let dwrite_factory = context.dwrite_factory.clone();
let d2d_factory = context.d2d_factory.clone();
// SAFETY: transmute() below is clearly unsafe operation here.
// Regarding the other part of the below block, all DirectX
// APIs are marked as unsafe, except for cast.
//
// In theory, all the Direct3D/Direct2D APIs could fail for
// some reasons (it's hardware!), but in practice, it's very unexpected
// situation and any of failure below would mean we are doing
// something in wrong way or driver bug or so.
unsafe {
let rtv = ID3D11RenderTargetView::from_raw_borrowed(&rtv_raw).unwrap();
let resource = rtv.GetResource().unwrap();
let texture = resource.cast::<ID3D11Texture2D>().unwrap();
let desc = {
let mut desc = D3D11_TEXTURE2D_DESC::default();
texture.GetDesc(&mut desc);
desc
};
// Window size was updated, creates new text layout
let calculate_font_size = if desc != context.texture_desc {
context.texture_desc = desc;
context.text_layout = None;
true
} else {
false
};
// New fps, creates new layout
if context.avg_fps != context.display_fps {
context.display_fps = context.avg_fps;
context.text_layout = None;
}
if context.text_layout.is_none() {
let overlay_string = format!("TextOverlay, Fps {:.1}", context.display_fps);
let overlay_wstring = overlay_string.encode_utf16().collect::<Vec<_>>();
let layout = dwrite_factory
.CreateTextLayout(
&overlay_wstring,
&context.text_format,
desc.Width as f32,
desc.Height as f32 / 5f32,
)
.unwrap();
// Adjust alignment
layout
.SetTextAlignment(DWRITE_TEXT_ALIGNMENT_CENTER)
.unwrap();
layout
.SetParagraphAlignment(DWRITE_PARAGRAPH_ALIGNMENT_CENTER)
.unwrap();
// XXX: This is not an efficient approach.
// The font size can be pre-calculated for a pre-defined
// window size and string length
let mut range = DWRITE_TEXT_RANGE {
startPosition: 0u32,
length: overlay_wstring.len() as u32,
};
if calculate_font_size {
let mut font_size = 12f32;
let mut was_decreased = false;
loop {
let mut metrics = DWRITE_TEXT_METRICS::default();
layout.GetMetrics(&mut metrics).unwrap();
layout
.GetFontSize(0, &mut font_size, Some(&mut range))
.unwrap();
if metrics.widthIncludingTrailingWhitespace >= desc.Width as f32 {
if font_size > 1f32 {
font_size -= 0.5f32;
was_decreased = true;
layout.SetFontSize(font_size, range).unwrap();
continue;
}
break;
}
if was_decreased {
break;
}
if metrics.widthIncludingTrailingWhitespace < desc.Width as f32 {
if metrics.widthIncludingTrailingWhitespace
>= desc.Width as f32 * 0.7f32
{
break;
}
font_size += 0.5f32;
layout.SetFontSize(font_size, range).unwrap();
}
}
context.font_size = font_size;
} else {
layout.SetFontSize(context.font_size, range).unwrap();
}
context.text_layout = Some(layout);
};
let dxgi_surf = resource.cast::<IDXGISurface>().unwrap();
let render_target = d2d_factory
.CreateDxgiSurfaceRenderTarget(
&dxgi_surf,
&D2D1_RENDER_TARGET_PROPERTIES {
r#type: D2D1_RENDER_TARGET_TYPE_DEFAULT,
pixelFormat: D2D1_PIXEL_FORMAT {
format: DXGI_FORMAT_B8G8R8A8_UNORM,
alphaMode: D2D1_ALPHA_MODE_PREMULTIPLIED,
},
// zero means default DPI
dpiX: 0f32,
dpiY: 0f32,
usage: D2D1_RENDER_TARGET_USAGE_NONE,
minLevel: D2D1_FEATURE_LEVEL_DEFAULT,
},
)
.unwrap();
let text_brush = render_target
.CreateSolidColorBrush(
&D2D1_COLOR_F {
r: 0f32,
g: 0f32,
b: 0f32,
a: 1f32,
},
None,
)
.unwrap();
let overlay_brush = render_target
.CreateSolidColorBrush(
&D2D1_COLOR_F {
r: 0f32,
g: 0.5f32,
b: 0.5f32,
a: 0.3f32,
},
None,
)
.unwrap();
render_target.BeginDraw();
// Draws overlay background. It will blend overlay's background
// color with already rendred video frame
render_target.FillRectangle(
&D2D_RECT_F {
left: 0f32,
top: 0f32,
right: desc.Width as f32,
bottom: desc.Height as f32 / 5f32,
},
&overlay_brush,
);
// Then, renders text
render_target.DrawTextLayout(
D2D_POINT_2F { x: 0f32, y: 0f32 },
context.text_layout.as_ref(),
&text_brush,
D2D1_DRAW_TEXT_OPTIONS_NONE,
);
// EndDraw may not be successful for some reasons.
// Ignores any error in this example
let _ = render_target.EndDraw(None, None);
}
}),
);
// Add pad probe to calculate framerate
let sinkpad = videosink.static_pad("sink").unwrap();
let overlay_context_weak = Arc::downgrade(&overlay_context);
sinkpad.add_probe(gst::PadProbeType::BUFFER, move |_, probe_info| {
let overlay_context = overlay_context_weak.upgrade().unwrap();
let mut context = overlay_context.lock().unwrap();
context.timestamp_queue.push_back(SystemTime::now());
// Updates framerate per 10 frames
if context.timestamp_queue.len() >= 10 {
let now = context.timestamp_queue.back().unwrap();
let front = context.timestamp_queue.front().unwrap();
let duration = now.duration_since(*front).unwrap().as_millis() as f32;
context.avg_fps = 1000f32 * (context.timestamp_queue.len() - 1) as f32 / duration;
context.timestamp_queue.clear();
}
gst::PadProbeReturn::Ok
});
let playbin = gst::ElementFactory::make("playbin")
.property("uri", &args[1])
.property("video-sink", &videosink)
.build()
.unwrap();
let main_loop_clone = main_loop.clone();
let bus = playbin.bus().unwrap();
let _bus_watch = bus
.add_watch(move |_, msg| {
use gst::MessageView;
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => {
println!("received eos");
main_loop.quit()
}
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
main_loop.quit();
}
_ => (),
};
glib::ControlFlow::Continue
})
.unwrap();
playbin.set_state(gst::State::Playing).unwrap();
main_loop.run();
playbin.set_state(gst::State::Null).unwrap();
Ok(())
}

View file

@ -1,85 +0,0 @@
// This example shows how to use the debug ringbuffer.
//
// It runs a simple GStreamer pipeline for a short time,
// and on EOS it dumps the last few KB of debug logs.
//
// It's possible to dump the logs at any time in an application,
// not just on exit like is done here.
use std::process;
use gst::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
fn example_main() {
let pipeline_str = "videotestsrc num-buffers=100 ! autovideosink";
gst::init().unwrap();
/* Disable stdout debug, then configure the debug ringbuffer and enable
* all debug */
gst::log::remove_default_log_function();
/* Keep 1KB of logs per thread, removing old threads after 10 seconds */
gst::log::add_ring_buffer_logger(1024, 10);
/* Enable all debug categories */
gst::log::set_default_threshold(gst::DebugLevel::Log);
let mut context = gst::ParseContext::new();
let pipeline =
match gst::parse::launch_full(pipeline_str, Some(&mut context), gst::ParseFlags::empty()) {
Ok(pipeline) => pipeline,
Err(err) => {
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
println!("Missing element(s): {:?}", context.missing_elements());
} else {
println!("Failed to parse pipeline: {err}");
}
process::exit(-1)
}
};
let bus = pipeline.bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => {
break;
}
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
/* Insert a message into the debug log */
gst::error!(gst::CAT_DEFAULT, "Hi from the debug log ringbuffer example");
println!("Dumping debug logs\n");
for s in gst::log::ring_buffer_logger_get_logs().iter() {
println!("{s}\n------------------");
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
examples_common::run(example_main);
}

View file

@ -1,57 +1,39 @@
// This example demonstrates the use of the decodebin element
// The decodebin element tries to automatically detect the incoming
// format and to autoplug the appropriate demuxers / decoders to handle it.
// and decode it to raw audio, video or subtitles.
// Before the pipeline hasn't been prerolled, the decodebin can't possibly know what
// format it gets as its input. So at first, the pipeline looks like this:
#[macro_use]
extern crate gstreamer as gst;
use gst::prelude::*;
// {filesrc} - {decodebin}
extern crate glib;
// As soon as the decodebin has detected the stream format, it will try to decode every
// contained stream to its raw format.
// The application connects a signal-handler to decodebin's pad-added signal, which tells us
// whenever the decodebin provided us with another contained (raw) stream from the input file.
use std::env;
use std::error::Error as StdError;
#[cfg(feature = "v1_10")]
use std::sync::{Arc, Mutex};
// This application supports audio and video streams. Video streams are
// displayed using an autovideosink, and audiostreams are played back using autoaudiosink.
// So for a file that contains one audio and one video stream,
// the pipeline looks like the following:
extern crate failure;
use failure::Error;
// /-[audio]-{audioconvert}-{audioresample}-{autoaudiosink}
// {filesrc}-{decodebin}-|
// \-[video]-{viceoconvert}-{videoscale}-{autovideosink}
// Both auto-sinks at the end automatically select the best available (actual) sink. Since the
// selection of available actual sinks is platform specific
// (like using pulseaudio for audio output on linux, e.g.),
// we need to add the audioconvert and audioresample elements before handing the stream to the
// autoaudiosink, because we need to make sure, that the stream is always supported by the actual sink.
// Especially Windows APIs tend to be quite picky about samplerate and sample-format.
// The same applies to videostreams.
use std::{
env,
sync::{Arc, Mutex},
};
use anyhow::Error;
use derive_more::{Display, Error};
use gst::{element_error, element_warning, prelude::*};
#[macro_use]
extern crate failure_derive;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
}
#[derive(Debug, Fail)]
#[fail(display = "Missing element {}", _0)]
struct MissingElement(&'static str);
#[derive(Clone, Debug, glib::Boxed)]
#[boxed_type(name = "ErrorValue")]
struct ErrorValue(Arc<Mutex<Option<Error>>>);
#[derive(Debug, Fail)]
#[fail(
display = "Received error from {}: {} (debug: {:?})",
src, error, debug
)]
struct ErrorMessage {
src: String,
error: String,
debug: Option<String>,
#[cause]
cause: glib::Error,
}
fn example_main() -> Result<(), Error> {
gst::init()?;
@ -64,52 +46,37 @@ fn example_main() -> Result<(), Error> {
std::process::exit(-1)
};
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("filesrc")
.property("location", uri)
.build()?;
let decodebin = gst::ElementFactory::make("decodebin").build()?;
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("filesrc", None).ok_or(MissingElement("filesrc"))?;
let decodebin =
gst::ElementFactory::make("decodebin", None).ok_or(MissingElement("decodebin"))?;
pipeline.add_many([&src, &decodebin])?;
gst::Element::link_many([&src, &decodebin])?;
src.set_property("location", &uri)?;
pipeline.add_many(&[&src, &decodebin])?;
gst::Element::link_many(&[&src, &decodebin])?;
// Need to move a new reference into the closure.
// !!ATTENTION!!:
// It might seem appealing to use pipeline.clone() here, because that greatly
// simplifies the code within the callback. What this actually does, however, is creating
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
// which is in turn stored in another strong reference on the pipeline is creating a
// reference cycle.
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
let pipeline_weak = pipeline.downgrade();
// Connect to decodebin's pad-added signal, that is emitted whenever
// it found another stream from the input file and found a way to decode it to its raw format.
// decodebin automatically adds a src-pad for this raw stream, which
// we can use to build the follow-up pipeline.
decodebin.connect_pad_added(move |dbin, src_pad| {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let Some(pipeline) = pipeline_weak.upgrade() else {
return;
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return,
};
// Try to detect whether the raw stream decodebin provided us with
// just now is either audio or video (or none of both, e.g. subtitles).
let (is_audio, is_video) = {
let media_type = src_pad.current_caps().and_then(|caps| {
caps.structure(0).map(|s| {
let name = s.name();
let media_type = src_pad.get_current_caps().and_then(|caps| {
caps.get_structure(0).map(|s| {
let name = s.get_name();
(name.starts_with("audio/"), name.starts_with("video/"))
})
});
match media_type {
None => {
element_warning!(
gst_element_warning!(
dbin,
gst::CoreError::Negotiation,
("Failed to get media type from pad {}", src_pad.name())
("Failed to get media type from pad {}", src_pad.get_name())
);
return;
@ -118,42 +85,36 @@ fn example_main() -> Result<(), Error> {
}
};
// We create a closure here, calling it directly below it, because this greatly
// improves readability for error-handling. Like this, we can simply use the
// ?-operator within the closure, and handle the actual error down below where
// we call the insert_sink(..) closure.
let insert_sink = |is_audio, is_video| -> Result<(), Error> {
if is_audio {
// decodebin found a raw audiostream, so we build the follow-up pipeline to
// play it on the default audio playback device (using autoaudiosink).
let queue = gst::ElementFactory::make("queue").build()?;
let convert = gst::ElementFactory::make("audioconvert").build()?;
let resample = gst::ElementFactory::make("audioresample").build()?;
let sink = gst::ElementFactory::make("autoaudiosink").build()?;
let queue =
gst::ElementFactory::make("queue", None).ok_or(MissingElement("queue"))?;
let convert = gst::ElementFactory::make("audioconvert", None)
.ok_or(MissingElement("audioconvert"))?;
let resample = gst::ElementFactory::make("audioresample", None)
.ok_or(MissingElement("audioresample"))?;
let sink = gst::ElementFactory::make("autoaudiosink", None)
.ok_or(MissingElement("autoaudiosink"))?;
let elements = &[&queue, &convert, &resample, &sink];
pipeline.add_many(elements)?;
gst::Element::link_many(elements)?;
// !!ATTENTION!!:
// This is quite important and people forget it often. Without making sure that
// the new elements have the same state as the pipeline, things will fail later.
// They would still be in Null state and can't process data.
for e in elements {
e.sync_state_with_parent()?;
}
// Get the queue element's sink pad and link the decodebin's newly created
// src pad for the audio stream to it.
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
src_pad.link(&sink_pad)?;
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
src_pad.link(&sink_pad).into_result()?;
} else if is_video {
// decodebin found a raw videostream, so we build the follow-up pipeline to
// display it using the autovideosink.
let queue = gst::ElementFactory::make("queue").build()?;
let convert = gst::ElementFactory::make("videoconvert").build()?;
let scale = gst::ElementFactory::make("videoscale").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
let queue =
gst::ElementFactory::make("queue", None).ok_or(MissingElement("queue"))?;
let convert = gst::ElementFactory::make("videoconvert", None)
.ok_or(MissingElement("videoconvert"))?;
let scale = gst::ElementFactory::make("videoscale", None)
.ok_or(MissingElement("videoscale"))?;
let sink = gst::ElementFactory::make("autovideosink", None)
.ok_or(MissingElement("autovideosink"))?;
let elements = &[&queue, &convert, &scale, &sink];
pipeline.add_many(elements)?;
@ -163,106 +124,110 @@ fn example_main() -> Result<(), Error> {
e.sync_state_with_parent()?
}
// Get the queue element's sink pad and link the decodebin's newly created
// src pad for the video stream to it.
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
src_pad.link(&sink_pad)?;
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
src_pad.link(&sink_pad).into_result()?;
}
Ok(())
};
// When adding and linking new elements in a callback fails, error information is often sparse.
// GStreamer's built-in debugging can be hard to link back to the exact position within the code
// that failed. Since callbacks are called from random threads within the pipeline, it can get hard
// to get good error information. The macros used in the following can solve that. With the use
// of those, one can send arbitrary rust types (using the pipeline's bus) into the mainloop.
// What we send here is unpacked down below, in the iteration-code over sent bus-messages.
// Because we are using the failure crate for error details here, we even get a backtrace for
// where the error was constructed. (If RUST_BACKTRACE=1 is set)
if let Err(err) = insert_sink(is_audio, is_video) {
// The following sends a message of type Error on the bus, containing our detailed
// error information.
element_error!(
#[cfg(feature = "v1_10")]
gst_element_error!(
dbin,
gst::LibraryError::Failed,
("Failed to insert sink"),
details: gst::Structure::builder("error-details")
.field("error",
&ErrorValue(Arc::new(Mutex::new(Some(err)))))
&glib::AnySendValue::new(Arc::new(Mutex::new(Some(err)))))
.build()
);
#[cfg(not(feature = "v1_10"))]
gst_element_error!(
dbin,
gst::LibraryError::Failed,
("Failed to insert sink"),
["{}", err]
);
}
});
pipeline.set_state(gst::State::Playing)?;
pipeline.set_state(gst::State::Playing).into_result()?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
// This code iterates over all messages that are sent across our pipeline's bus.
// In the callback ("pad-added" on the decodebin), we sent better error information
// using a bus message. This is the position where we get those messages and log
// the contained information.
for msg in bus.iter_timed(gst::ClockTime::NONE) {
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
pipeline.set_state(gst::State::Null).into_result()?;
match err.details() {
// This bus-message of type error contained our custom error-details struct
// that we sent in the pad-added callback above. So we unpack it and log
// the detailed error information here. details contains a glib::SendValue.
// The unpacked error is the converted to a Result::Err, stopping the
// application's execution.
Some(details) if details.name() == "error-details" => details
.get::<&ErrorValue>("error")
.unwrap()
.clone()
.0
.lock()
.unwrap()
.take()
.map(Result::Err)
.expect("error-details message without actual error"),
_ => Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
}
.into()),
}?;
#[cfg(feature = "v1_10")]
{
match err.get_details() {
Some(details) if details.get_name() == "error-details" => details
.get::<&glib::AnySendValue>("error")
.cloned()
.and_then(|v| {
v.downcast_ref::<Arc<Mutex<Option<Error>>>>()
.and_then(|v| v.lock().unwrap().take())
})
.map(Result::Err)
.expect("error-details message without actual error"),
_ => Err(ErrorMessage {
src: err
.get_src()
.map(|s| s.get_path_string())
.unwrap_or_else(|| String::from("None")),
error: err.get_error().description().into(),
debug: err.get_debug(),
cause: err.get_error(),
}
.into()),
}?;
}
#[cfg(not(feature = "v1_10"))]
{
Err(ErrorMessage {
src: err
.get_src()
.map(|s| s.get_path_string())
.unwrap_or_else(|| String::from("None")),
error: err.get_error().description().into(),
debug: err.get_debug(),
cause: err.get_error(),
})?;
}
break;
}
MessageView::StateChanged(s) => {
println!(
"State changed from {:?}: {:?} -> {:?} ({:?})",
s.src().map(|s| s.path_string()),
s.old(),
s.current(),
s.pending()
s.get_src().map(|s| s.get_path_string()),
s.get_old(),
s.get_current(),
s.get_pending()
);
}
_ => (),
}
}
pipeline.set_state(gst::State::Null)?;
pipeline.set_state(gst::State::Null).into_result()?;
Ok(())
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -1,33 +1,35 @@
// This example uses gstreamer's discoverer api
// https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/GstDiscoverer.html
// To detect as much information from a given URI.
// The amount of time that the discoverer is allowed to use is limited by a timeout.
// This allows to handle e.g. network problems gracefully. When the timeout hits before
// discoverer was able to detect anything, discoverer will report an error.
// In this example, we catch this error and stop the application.
// Discovered information could for example contain the stream's duration or whether it is
// seekable (filesystem) or not (some http servers).
extern crate gstreamer as gst;
extern crate gstreamer_pbutils as pbutils;
use pbutils::prelude::*;
use pbutils::DiscovererInfo;
use pbutils::DiscovererStreamInfo;
extern crate glib;
extern crate failure;
use failure::Error;
#[macro_use]
extern crate failure_derive;
use std::env;
use anyhow::Error;
use derive_more::{Display, Error};
use gst_pbutils::{prelude::*, DiscovererInfo, DiscovererStreamInfo};
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Discoverer error {_0}")]
struct DiscovererError(#[error(not(source))] &'static str);
#[derive(Debug, Fail)]
#[fail(display = "Discoverer error {}", _0)]
struct DiscovererError(&'static str);
fn print_tags(info: &DiscovererInfo) {
println!("Tags:");
let tags = info.tags();
let tags = info.get_tags();
match tags {
Some(taglist) => {
println!(" {taglist}"); // FIXME use an iterator
println!(" {}", taglist.to_string()); // FIXME use an iterator
}
None => {
println!(" no tags");
@ -37,29 +39,31 @@ fn print_tags(info: &DiscovererInfo) {
fn print_stream_info(stream: &DiscovererStreamInfo) {
println!("Stream: ");
if let Some(stream_id) = stream.stream_id() {
println!(" Stream id: {}", stream_id);
match stream.get_stream_id() {
Some(id) => println!(" Stream id: {}", id),
None => {}
}
let caps_str = match stream.caps() {
let caps_str = match stream.get_caps() {
Some(caps) => caps.to_string(),
None => String::from("--"),
};
println!(" Format: {caps_str}");
println!(" Format: {}", caps_str);
}
fn print_discoverer_info(info: &DiscovererInfo) -> Result<(), Error> {
println!("URI: {}", info.uri());
println!("Duration: {}", info.duration().display());
let uri = info
.get_uri()
.ok_or(DiscovererError("URI should not be null"))?;
println!("URI: {}", uri);
println!("Duration: {}", info.get_duration());
print_tags(info);
print_stream_info(
&info
.stream_info()
.get_stream_info()
.ok_or(DiscovererError("Error while obtaining stream info"))?,
);
let children = info.stream_list();
let children = info.get_stream_list();
println!("Children streams:");
for child in children {
print_stream_info(&child);
@ -80,7 +84,7 @@ fn run_discoverer() -> Result<(), Error> {
};
let timeout: gst::ClockTime = gst::ClockTime::from_seconds(15);
let discoverer = gst_pbutils::Discoverer::new(timeout)?;
let discoverer = pbutils::Discoverer::new(timeout)?;
let info = discoverer.discover_uri(uri)?;
print_discoverer_info(&info)?;
Ok(())
@ -89,12 +93,12 @@ fn run_discoverer() -> Result<(), Error> {
fn example_main() {
match run_discoverer() {
Ok(_) => (),
Err(e) => eprintln!("Error: {e}"),
Err(e) => eprintln!("Error: {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,71 +1,66 @@
// This example demonstrates the use of the encodebin element.
// The example takes an arbitrary URI as input, which it will try to decode
// and finally reencode using the encodebin element.
// For more information about how the decodebin element works, have a look at
// the decodebin-example.
// Since we tell the encodebin what format we want to get out of it from the start,
// it provides the correct caps and we can link it before starting the pipeline.
// After the decodebin has found all streams and we piped them into the encodebin,
// the operated pipeline looks as follows:
#[macro_use]
extern crate gstreamer as gst;
use gst::prelude::*;
// /-{queue}-{audioconvert}-{audioresample}-\
// {uridecodebin} -| {encodebin}-{filesink}
// \-{queue}-{videoconvert}-{videoscale}----/
use std::{
env,
sync::{Arc, Mutex},
};
use anyhow::Error;
use derive_more::{Display, Error};
use gst::{element_error, element_warning};
extern crate gstreamer_pbutils as gst_pbutils;
use gst_pbutils::prelude::*;
extern crate glib;
use std::env;
use std::error::Error as StdError;
#[cfg(feature = "v1_10")]
use std::sync::{Arc, Mutex};
extern crate failure;
use failure::Error;
#[macro_use]
extern crate failure_derive;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[derive(Debug, Fail)]
#[fail(display = "Missing element {}", _0)]
struct MissingElement(&'static str);
#[derive(Debug, Fail)]
#[fail(
display = "Received error from {}: {} (debug: {:?})",
src, error, debug
)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
#[cause]
cause: glib::Error,
}
#[derive(Clone, Debug, glib::Boxed)]
#[boxed_type(name = "ErrorValue")]
struct ErrorValue(Arc<Mutex<Option<Error>>>);
fn configure_encodebin(encodebin: &gst::Element) -> Result<(), Error> {
let audio_profile = gst_pbutils::EncodingAudioProfileBuilder::new()
.format(&gst::Caps::new_simple("audio/x-vorbis", &[]))
.presence(0)
.build()?;
fn configure_encodebin(encodebin: &gst::Element) {
// To tell the encodebin what we want it to produce, we create an EncodingProfile
// https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/GstEncodingProfile.html
// This profile consists of information about the contained audio and video formats
// as well as the container format we want everything to be combined into.
let video_profile = gst_pbutils::EncodingVideoProfileBuilder::new()
.format(&gst::Caps::new_simple("video/x-theora", &[]))
.presence(0)
.build()?;
// Every audiostream piped into the encodebin should be encoded using vorbis.
let audio_profile =
gst_pbutils::EncodingAudioProfile::builder(&gst::Caps::builder("audio/x-vorbis").build())
.presence(0)
.build();
let container_profile = gst_pbutils::EncodingContainerProfileBuilder::new()
.name("container")
.format(&gst::Caps::new_simple("video/x-matroska", &[]))
.add_profile(&(video_profile))
.add_profile(&(audio_profile))
.build()?;
// Every videostream piped into the encodebin should be encoded using theora.
let video_profile =
gst_pbutils::EncodingVideoProfile::builder(&gst::Caps::builder("video/x-theora").build())
.presence(0)
.build();
encodebin
.set_property("profile", &container_profile)
.expect("set profile property failed");
// All streams are then finally combined into a matroska container.
let container_profile = gst_pbutils::EncodingContainerProfile::builder(
&gst::Caps::builder("video/x-matroska").build(),
)
.name("container")
.add_profile(video_profile)
.add_profile(audio_profile)
.build();
// Finally, apply the EncodingProfile onto our encodebin element.
encodebin.set_property("profile", &container_profile);
Ok(())
}
fn example_main() -> Result<(), Error> {
@ -83,61 +78,47 @@ fn example_main() -> Result<(), Error> {
std::process::exit(-1)
};
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("uridecodebin")
.property("uri", uri)
.build()?;
let encodebin = gst::ElementFactory::make("encodebin").build()?;
let sink = gst::ElementFactory::make("filesink")
.property("location", output_file)
.build()?;
let pipeline = gst::Pipeline::new(None);
let src =
gst::ElementFactory::make("uridecodebin", None).ok_or(MissingElement("uridecodebin"))?;
let encodebin =
gst::ElementFactory::make("encodebin", None).ok_or(MissingElement("encodebin"))?;
let sink = gst::ElementFactory::make("filesink", None).ok_or(MissingElement("filesink"))?;
// Configure the encodebin.
// Here we tell the bin what format we expect it to create at its output.
configure_encodebin(&encodebin);
src.set_property("uri", &uri)
.expect("setting URI Property failed");
sink.set_property("location", &output_file)
.expect("setting location property failed");
configure_encodebin(&encodebin)?;
pipeline
.add_many([&src, &encodebin, &sink])
.add_many(&[&src, &encodebin, &sink])
.expect("failed to add elements to pipeline");
// It is clear from the start, that encodebin has only one src pad, so we can
// directly link it to our filesink without problems.
// The caps of encodebin's src-pad are set after we configured the encoding-profile.
// (But filesink doesn't really care about the caps at its input anyway)
gst::Element::link_many([&encodebin, &sink])?;
gst::Element::link_many(&[&encodebin, &sink])?;
// Need to move a new reference into the closure.
// !!ATTENTION!!:
// It might seem appealing to use pipeline.clone() here, because that greatly
// simplifies the code within the callback. What this actually does, however, is creating
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
// which is in turn stored in another strong reference on the pipeline is creating a
// reference cycle.
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
let pipeline_weak = pipeline.downgrade();
// Much of the following is the same code as in the decodebin example
// so if you want more information on that front, have a look there.
// Need to move a new reference into the closure
let pipeline_clone = pipeline.clone();
src.connect_pad_added(move |dbin, dbin_src_pad| {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let Some(pipeline) = pipeline_weak.upgrade() else {
return;
};
let pipeline = &pipeline_clone;
let (is_audio, is_video) = {
let media_type = dbin_src_pad.current_caps().and_then(|caps| {
caps.structure(0).map(|s| {
let name = s.name();
let media_type = dbin_src_pad.get_current_caps().and_then(|caps| {
caps.get_structure(0).map(|s| {
let name = s.get_name();
(name.starts_with("audio/"), name.starts_with("video/"))
})
});
match media_type {
None => {
element_warning!(
gst_element_warning!(
dbin,
gst::CoreError::Negotiation,
("Failed to get media type from pad {}", dbin_src_pad.name())
(
"Failed to get media type from pad {}",
dbin_src_pad.get_name()
)
);
return;
@ -148,9 +129,12 @@ fn example_main() -> Result<(), Error> {
let link_to_encodebin = |is_audio, is_video| -> Result<(), Error> {
if is_audio {
let queue = gst::ElementFactory::make("queue").build()?;
let convert = gst::ElementFactory::make("audioconvert").build()?;
let resample = gst::ElementFactory::make("audioresample").build()?;
let queue =
gst::ElementFactory::make("queue", None).ok_or(MissingElement("queue"))?;
let convert = gst::ElementFactory::make("audioconvert", None)
.ok_or(MissingElement("audioconvert"))?;
let resample = gst::ElementFactory::make("audioresample", None)
.ok_or(MissingElement("audioresample"))?;
let elements = &[&queue, &convert, &resample];
pipeline
@ -158,27 +142,27 @@ fn example_main() -> Result<(), Error> {
.expect("failed to add audio elements to pipeline");
gst::Element::link_many(elements)?;
// Request a sink pad from our encodebin, that can handle a raw audiostream.
// The encodebin will then automatically create an internal pipeline, that encodes
// the audio stream in the format we specified in the EncodingProfile.
let enc_sink_pad = encodebin
.request_pad_simple("audio_%u")
.get_request_pad("audio_%u")
.expect("Could not get audio pad from encodebin");
let src_pad = resample.static_pad("src").expect("resample has no srcpad");
src_pad.link(&enc_sink_pad)?;
let src_pad = resample
.get_static_pad("src")
.expect("resample has no srcpad");
src_pad.link(&enc_sink_pad).into_result()?;
for e in elements {
e.sync_state_with_parent()?;
}
// Get the queue element's sink pad and link the decodebin's newly created
// src pad for the audio stream to it.
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
dbin_src_pad.link(&sink_pad)?;
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
dbin_src_pad.link(&sink_pad).into_result()?;
} else if is_video {
let queue = gst::ElementFactory::make("queue").build()?;
let convert = gst::ElementFactory::make("videoconvert").build()?;
let scale = gst::ElementFactory::make("videoscale").build()?;
let queue =
gst::ElementFactory::make("queue", None).ok_or(MissingElement("queue"))?;
let convert = gst::ElementFactory::make("videoconvert", None)
.ok_or(MissingElement("videoconvert"))?;
let scale = gst::ElementFactory::make("videoscale", None)
.ok_or(MissingElement("videoscale"))?;
let elements = &[&queue, &convert, &scale];
pipeline
@ -186,100 +170,122 @@ fn example_main() -> Result<(), Error> {
.expect("failed to add video elements to pipeline");
gst::Element::link_many(elements)?;
// Request a sink pad from our encodebin, that can handle a raw videostream.
// The encodebin will then automatically create an internal pipeline, that encodes
// the video stream in the format we specified in the EncodingProfile.
let enc_sink_pad = encodebin
.request_pad_simple("video_%u")
.get_request_pad("video_%u")
.expect("Could not get video pad from encodebin");
let src_pad = scale.static_pad("src").expect("videoscale has no srcpad");
src_pad.link(&enc_sink_pad)?;
let src_pad = scale
.get_static_pad("src")
.expect("videoscale has no srcpad");
src_pad.link(&enc_sink_pad).into_result()?;
for e in elements {
e.sync_state_with_parent()?
}
// Get the queue element's sink pad and link the decodebin's newly created
// src pad for the video stream to it.
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
dbin_src_pad.link(&sink_pad)?;
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
dbin_src_pad.link(&sink_pad).into_result()?;
}
Ok(())
};
if let Err(err) = link_to_encodebin(is_audio, is_video) {
element_error!(
#[cfg(feature = "v1_10")]
gst_element_error!(
dbin,
gst::LibraryError::Failed,
("Failed to insert sink"),
details: gst::Structure::builder("error-details")
.field("error",
&ErrorValue(Arc::new(Mutex::new(Some(err)))))
&glib::AnySendValue::new(Arc::new(Mutex::new(Some(err)))))
.build()
);
#[cfg(not(feature = "v1_10"))]
gst_element_error!(
dbin,
gst::LibraryError::Failed,
("Failed to insert sink"),
["{}", err]
);
}
});
pipeline.set_state(gst::State::Playing)?;
pipeline.set_state(gst::State::Playing).into_result()?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
pipeline.set_state(gst::State::Null).into_result()?;
match err.details() {
Some(details) if details.name() == "error-details" => details
.get::<&ErrorValue>("error")
.unwrap()
.clone()
.0
.lock()
.unwrap()
.take()
.map(Result::Err)
.expect("error-details message without actual error"),
_ => Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
}
.into()),
}?;
#[cfg(feature = "v1_10")]
{
match err.get_details() {
Some(details) if details.get_name() == "error-details" => details
.get::<&glib::AnySendValue>("error")
.cloned()
.and_then(|v| {
v.downcast_ref::<Arc<Mutex<Option<Error>>>>()
.and_then(|v| v.lock().unwrap().take())
})
.map(Result::Err)
.expect("error-details message without actual error"),
_ => Err(ErrorMessage {
src: err
.get_src()
.map(|s| s.get_path_string())
.unwrap_or_else(|| String::from("None")),
error: err.get_error().description().into(),
debug: err.get_debug(),
cause: err.get_error(),
}
.into()),
}?;
}
#[cfg(not(feature = "v1_10"))]
{
Err(ErrorMessage {
src: err
.get_src()
.map(|s| s.get_path_string())
.unwrap_or_else(|| String::from("None")),
error: err.get_error().description().into(),
debug: err.get_debug(),
cause: err.get_error(),
})?;
}
break;
}
MessageView::StateChanged(s) => {
println!(
"State changed from {:?}: {:?} -> {:?} ({:?})",
s.src().map(|s| s.path_string()),
s.old(),
s.current(),
s.pending()
s.get_src().map(|s| s.get_path_string()),
s.get_old(),
s.get_current(),
s.get_pending()
);
}
_ => (),
}
}
pipeline.set_state(gst::State::Null)?;
pipeline.set_state(gst::State::Null).into_result()?;
Ok(())
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -1,26 +1,8 @@
// This example demonstrates how events can be created and sent to the pipeline.
// What this example does is scheduling a timeout on the main loop, and
// sending an EOS message on the bus from there - telling the pipeline
// to shut down. Once that event is processed by everything, the EOS message
// is going to be sent and we catch that one to shut down everything.
// GStreamer's bus is an abstraction layer above an arbitrary main loop.
// This makes sure that GStreamer can be used in conjunction with any existing
// other framework (GUI frameworks, mostly) that operate their own main loops.
// Main idea behind the bus is the simplification between the application and
// GStreamer, because GStreamer is heavily threaded underneath.
// Any thread can post messages to the bus, which is essentially a thread-safe
// queue of messages to process. When a new message was sent to the bus, it
// will wake up the main loop implementation underneath it (which will then
// process the pending messages from the main loop thread).
// An application itself can post messages to the bus aswell.
// This makes it possible, e.g., to schedule an arbitrary piece of code
// to run in the main loop thread - avoiding potential threading issues.
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate glib;
#[path = "../examples-common.rs"]
mod examples_common;
@ -29,104 +11,64 @@ fn example_main() {
let main_loop = glib::MainLoop::new(None, false);
// This creates a pipeline by parsing the gst-launch pipeline syntax.
let pipeline = gst::parse::launch("audiotestsrc ! fakesink").unwrap();
let bus = pipeline.bus().unwrap();
let pipeline = gst::parse_launch("audiotestsrc ! fakesink").unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
// Need to move a new reference into the closure.
// !!ATTENTION!!:
// It might seem appealing to use pipeline.clone() here, because that greatly
// simplifies the code within the callback. What this actually does, however, is creating
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
// which is in turn stored in another strong reference on the pipeline is creating a
// reference cycle.
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
let pipeline_weak = pipeline.downgrade();
// Add a timeout to the main loop. This closure will be executed
// in an interval of 5 seconds. The return value of the handler function
// determines whether the handler still wants to be called:
// - glib::ControlFlow::Break - stop calling this handler, remove timeout
// - glib::ControlFlow::Continue- continue calling this handler
glib::timeout_add_seconds(5, move || {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let Some(pipeline) = pipeline_weak.upgrade() else {
return glib::ControlFlow::Break;
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::Continue(false),
};
println!("sending eos");
// We create an EndOfStream event here, that tells all elements to drain
// their internal buffers to their following elements, essentially draining the
// whole pipeline (front to back). It ensuring that no data is left unhandled and potentially
// headers were rewritten (e.g. when using something like an MP4 or Matroska muxer).
// The EOS event is handled directly from this very thread until the first
// queue element is reached during pipeline-traversal, where it is then queued
// up and later handled from the queue's streaming thread for the elements
// following that queue.
// Once all sinks are done handling the EOS event (and all buffers that were before the
// EOS event in the pipeline already), the pipeline would post an EOS message on the bus,
// essentially telling the application that the pipeline is completely drained.
pipeline.send_event(gst::event::Eos::new());
let ev = gst::Event::new_eos().build();
pipeline.send_event(ev);
// Remove this handler, the pipeline will shutdown anyway, now that we
// sent the EOS event.
glib::ControlFlow::Break
glib::Continue(false)
});
//bus.add_signal_watch();
//bus.connect_message(None, move |_, msg| {
//bus.connect_message(move |_, msg| {
let main_loop_clone = main_loop.clone();
// This sets the bus's signal handler (don't be mislead by the "add", there can only be one).
// Every message from the bus is passed through this function. Its returnvalue determines
// whether the handler wants to be called again. If glib::ControlFlow::Break is returned, the
// handler is removed and will never be called again. The mainloop still runs though.
let _bus_watch = bus
.add_watch(move |_, msg| {
use gst::MessageView;
bus.add_watch(move |_, msg| {
use gst::MessageView;
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => {
println!("received eos");
// An EndOfStream event was sent to the pipeline, so we tell our main loop
// to stop execution here.
main_loop.quit()
}
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
main_loop.quit();
}
_ => (),
};
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => {
println!("received eos");
main_loop.quit()
}
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
main_loop.quit();
}
_ => (),
};
// Tell the mainloop to continue executing this callback.
glib::ControlFlow::Continue
})
.expect("Failed to add bus watch");
glib::Continue(true)
});
// Operate GStreamer's bus, facilliating GLib's mainloop here.
// This function call will block until you tell the mainloop to quit
// (see above for how to do this).
main_loop.run();
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
bus.remove_watch();
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,481 +0,0 @@
// This example demonstrates the use of the FdMemory allocator.
// It operates the following two pipelines:
// sender: {videotestsrc} - {appsink}
// receiver: {appsrc} - {FdMemoryVideoFilter} - {videoconvert} - {queue} - {autovideosink}
// The sender creates shared memory files from the appsink which are sent
// to the receiver using a unix domain socket.
// The receiver creates buffers in the appsrc using the FdMemoryAllocator from
// the received file descriptors.
// Additional to demonstrating how the FdMemoryAllocator can be used to share
// file descriptors the example implements a custom VideoFilter demonstrating
// how the file descriptor of FdMemory can be accessed in a pipeline.
// Note that instead of manual mapping the file descriptor it is also possible
// to use map_writable, which will also map the file descriptor.
use std::{
os::unix::{net::UnixStream, prelude::AsRawFd},
sync::{Arc, Mutex},
};
use anyhow::Error;
use futures::StreamExt;
use gst::{element_error, prelude::*};
use memmap2::MmapMut;
use uds::UnixStreamExt;
#[path = "../examples-common.rs"]
mod examples_common;
fn create_receiver_pipeline(
video_info: &gst_video::VideoInfo,
receiver: UnixStream,
) -> Result<gst::Pipeline, Error> {
let caps = video_info.to_caps()?;
let pipeline = gst::Pipeline::default();
let src = gst_app::AppSrc::builder()
.caps(&caps)
.do_timestamp(true)
.is_live(true)
.build();
let filter = video_filter::FdMemoryFadeInVideoFilter::default().upcast::<gst::Element>();
let convert = gst::ElementFactory::make("videoconvert").build()?;
let queue = gst::ElementFactory::make("queue").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
pipeline.add_many([src.upcast_ref(), &filter, &convert, &queue, &sink])?;
gst::Element::link_many([src.upcast_ref(), &filter, &convert, &queue, &sink])?;
let fd_allocator = gst_allocators::FdAllocator::new();
let video_info = video_info.clone();
let mut fd_buf = [-1; 253];
src.set_callbacks(
gst_app::AppSrcCallbacks::builder()
.need_data(move |appsrc, _| {
// Read the next fds from the socket, if 0
// is returned the sender has closed the stream
// which is handled as EOS here.
let fds = match receiver.recv_fds(&mut [0u8; 1], &mut fd_buf) {
Ok((_, 0)) => {
let _ = appsrc.end_of_stream();
return;
}
Ok((_, fds)) => fds,
Err(err) => {
gst::error_msg!(
gst::StreamError::Failed,
("failed to receive fds: {}", err)
);
return;
}
};
for fd in &fd_buf[0..fds] {
// Allocate a new FdMemory for the received file descriptor.
// It is important that the size matches the size of the
// actual backing storage. In this example we just use the
// same video info in both sides, sending and receiving.
// Pass FdMemoryFlags::NONE to make the FdMemory take
// ownership of the passed file descriptor. The file descriptor
// will be closed when the memory is released.
let memory = unsafe {
fd_allocator
.alloc(*fd, video_info.size(), gst_allocators::FdMemoryFlags::NONE)
.unwrap()
};
let mut buffer = gst::Buffer::new();
let buffer_mut = buffer.make_mut();
buffer_mut.append_memory(memory);
let _ = appsrc.push_buffer(buffer);
}
})
.build(),
);
Ok(pipeline)
}
fn create_sender_pipeline(
video_info: &gst_video::VideoInfo,
sender: UnixStream,
) -> Result<gst::Pipeline, Error> {
let sender = Arc::new(Mutex::new(sender));
let caps = video_info.to_caps()?;
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("videotestsrc")
.property("num-buffers", 250i32)
.build()?;
let sink = gst::ElementFactory::make("appsink").build()?;
sink.downcast_ref::<gst_app::AppSink>()
.ok_or_else(|| anyhow::anyhow!("is not a appsink"))?
.set_caps(Some(&caps));
pipeline.add_many([&src, &sink])?;
gst::Element::link_many([&src, &sink])?;
let appsink = sink
.downcast::<gst_app::AppSink>()
.map_err(|_| anyhow::anyhow!("is not a appsink"))?;
appsink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
// Add a handler to the "eos" signal
.eos({
let sender = sender.clone();
move |_| {
// Close the sender part of the UnixSocket pair, this will automatically
// create a eos in the receiving part.
let _ = sender.lock().unwrap().shutdown(std::net::Shutdown::Write);
}
})
// Add a handler to the "new-sample" signal.
.new_sample(move |appsink| {
// Pull the sample in question out of the appsink's buffer.
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or_else(|| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to get buffer from appsink")
);
gst::FlowError::Error
})?;
if buffer.n_memory() != 1 {
element_error!(
appsink,
gst::ResourceError::Failed,
("Expected buffer with single memory")
);
return Err(gst::FlowError::Error);
}
let mem = buffer.peek_memory(0);
// We can use downcast_memory_ref to check if the provided
// memory is allocated by FdMemoryAllocator or a subtype of it.
// Note: This is not used in the example, we will always copy
// the memory to a new shared memory file.
if let Some(fd_memory) = mem.downcast_memory_ref::<gst_allocators::FdMemory>() {
// As we already got a fd we can just directly send it over the socket.
// NOTE: Synchronization is left out of this example, in a real world
// application access to the memory should be synchronized.
// For example wayland provides a release callback to signal that
// the memory is no longer in use.
sender
.lock()
.unwrap()
.send_fds(&[0u8; 1], &[fd_memory.fd()])
.map_err(|_| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to send fd over unix stream")
);
gst::FlowError::Error
})?;
} else {
// At this point, buffer is only a reference to an existing memory region somewhere.
// When we want to access its content, we have to map it while requesting the required
// mode of access (read, read/write).
// This type of abstraction is necessary, because the buffer in question might not be
// on the machine's main memory itself, but rather in the GPU's memory.
// So mapping the buffer makes the underlying memory region accessible to us.
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
let map = buffer.map_readable().map_err(|_| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to map buffer readable")
);
gst::FlowError::Error
})?;
// Note: To simplify this example we always create a new shared memory file instead
// of using a pool of buffers. When using a pool we need to make sure access to the
// file is synchronized.
let opts = memfd::MemfdOptions::default().allow_sealing(true);
let mfd = opts.create("gst-examples").map_err(|err| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to allocated fd: {}", err)
);
gst::FlowError::Error
})?;
mfd.as_file().set_len(map.size() as u64).map_err(|err| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to resize fd memory: {}", err)
);
gst::FlowError::Error
})?;
let mut seals = memfd::SealsHashSet::new();
seals.insert(memfd::FileSeal::SealShrink);
seals.insert(memfd::FileSeal::SealGrow);
mfd.add_seals(&seals).map_err(|err| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to add fd seals: {}", err)
);
gst::FlowError::Error
})?;
mfd.add_seal(memfd::FileSeal::SealSeal).map_err(|err| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to add fd seals: {}", err)
);
gst::FlowError::Error
})?;
unsafe {
let mut mmap = MmapMut::map_mut(mfd.as_file()).map_err(|_| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to mmap fd")
);
gst::FlowError::Error
})?;
mmap.copy_from_slice(map.as_slice());
};
sender
.lock()
.unwrap()
.send_fds(&[0u8; 1], &[mfd.as_raw_fd()])
.map_err(|_| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to send fd over unix stream")
);
gst::FlowError::Error
})?;
};
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
Ok(pipeline)
}
async fn message_loop(bus: gst::Bus) {
let mut messages = bus.stream();
while let Some(msg) = messages.next().await {
use gst::MessageView;
// Determine whether we want to quit: on EOS or error message
// we quit, otherwise simply continue.
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
};
}
}
fn example_main() -> Result<(), Error> {
gst::init()?;
let video_info = gst_video::VideoInfo::builder(gst_video::VideoFormat::Bgra, 1920, 1080)
.fps(gst::Fraction::new(30, 1))
.build()?;
let (sender, receiver) = std::os::unix::net::UnixStream::pair()?;
let sender_pipeline = create_sender_pipeline(&video_info, sender)?;
let receiver_pipeline = create_receiver_pipeline(&video_info, receiver)?;
let receiver_bus = receiver_pipeline.bus().expect("pipeline without bus");
receiver_pipeline.set_state(gst::State::Playing)?;
let sender_bus = sender_pipeline.bus().expect("pipeline without bus");
sender_pipeline.set_state(gst::State::Playing)?;
futures::executor::block_on(futures::future::join(
message_loop(sender_bus),
message_loop(receiver_bus),
));
sender_pipeline.set_state(gst::State::Null)?;
receiver_pipeline.set_state(gst::State::Null)?;
Ok(())
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
}
}
// The purpose of this custom video filter is to demonstrate how
// the file descriptor of a FdMemory can be accessed.
mod video_filter {
glib::wrapper! {
pub struct FdMemoryFadeInVideoFilter(ObjectSubclass<imp::FdMemoryFadeInVideoFilter>) @extends gst_video::VideoFilter, gst_base::BaseTransform, gst::Element, gst::Object;
}
impl Default for FdMemoryFadeInVideoFilter {
fn default() -> Self {
glib::Object::builder().build()
}
}
mod imp {
use std::{mem::ManuallyDrop, os::unix::prelude::FromRawFd};
use anyhow::Error;
use gst::{subclass::prelude::*, PadDirection, PadPresence, PadTemplate};
use gst_app::gst_base::subclass::BaseTransformMode;
use gst_video::{prelude::*, subclass::prelude::*, VideoFrameRef};
use memmap2::MmapMut;
use once_cell::sync::Lazy;
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"fdmemoryfilter",
gst::DebugColorFlags::empty(),
Some("Example FdMemory filter"),
)
});
#[derive(Debug, Default)]
pub struct FdMemoryFadeInVideoFilter;
impl FdMemoryFadeInVideoFilter {
fn transform_fd_mem_ip(
&self,
frame: &mut VideoFrameRef<&mut gst::BufferRef>,
) -> Result<(), Error> {
let buffer = frame.buffer();
if buffer.n_memory() != 1 {
return Err(anyhow::anyhow!(
"only buffers with single memory are supported"
));
}
let mem = buffer.peek_memory(0);
if !mem.is_memory_type::<gst_allocators::FdMemory>() {
return Err(anyhow::anyhow!("only fd memory is supported"));
}
let timestamp = buffer.pts().unwrap();
let factor = (timestamp.nseconds() as f64
/ (5 * gst::ClockTime::SECOND).nseconds() as f64)
.min(1.0f64);
// If the fade-in has finished return early
if factor >= 1.0f64 {
return Ok(());
}
let fd = mem
.downcast_memory_ref::<gst_allocators::FdMemory>()
.unwrap()
.fd();
unsafe {
// We wrap the Memmfd in ManuallyDrop here because from_raw_fd takes ownership of
// the file descriptor which would close it on drop
//
// see: https://github.com/lucab/memfd-rs/issues/29
let mfd = ManuallyDrop::new(memfd::Memfd::from_raw_fd(fd));
let mut mmap = MmapMut::map_mut(mfd.as_file())?;
for pixel in mmap.chunks_exact_mut(4) {
pixel[0] = (pixel[0] as f64 * factor).clamp(0.0, 255.0) as u8;
pixel[1] = (pixel[1] as f64 * factor).clamp(0.0, 255.0) as u8;
pixel[2] = (pixel[2] as f64 * factor).clamp(0.0, 255.0) as u8;
}
}
Ok(())
}
}
impl ElementImpl for FdMemoryFadeInVideoFilter {
fn pad_templates() -> &'static [PadTemplate] {
static PAD_TEMPLATES: std::sync::OnceLock<Vec<PadTemplate>> =
std::sync::OnceLock::new();
PAD_TEMPLATES.get_or_init(|| {
let caps = gst_video::VideoCapsBuilder::new()
.format(gst_video::VideoFormat::Bgra)
.build();
vec![
PadTemplate::new("sink", PadDirection::Sink, PadPresence::Always, &caps)
.unwrap(),
PadTemplate::new("src", PadDirection::Src, PadPresence::Always, &caps)
.unwrap(),
]
})
}
}
impl BaseTransformImpl for FdMemoryFadeInVideoFilter {
const MODE: BaseTransformMode = BaseTransformMode::AlwaysInPlace;
const PASSTHROUGH_ON_SAME_CAPS: bool = false;
const TRANSFORM_IP_ON_PASSTHROUGH: bool = true;
}
impl VideoFilterImpl for FdMemoryFadeInVideoFilter {
fn transform_frame_ip(
&self,
frame: &mut VideoFrameRef<&mut gst::BufferRef>,
) -> Result<gst::FlowSuccess, gst::FlowError> {
self.transform_fd_mem_ip(frame).map_err(|err| {
gst::error!(CAT, imp: self, "Failed to transform frame`: {}", err);
gst::FlowError::Error
})?;
Ok(gst::FlowSuccess::Ok)
}
}
impl ObjectImpl for FdMemoryFadeInVideoFilter {}
impl GstObjectImpl for FdMemoryFadeInVideoFilter {}
#[glib::object_subclass]
impl ObjectSubclass for FdMemoryFadeInVideoFilter {
const NAME: &'static str = "FdMemoryVideoFilter";
type Type = super::FdMemoryFadeInVideoFilter;
type ParentType = gst_video::VideoFilter;
}
}
}

View file

@ -1,67 +1,60 @@
// This example demonstrates how to use the gstreamer crate in conjunction
// with the future trait. The example waits for either an error to occur,
// or for an EOS message. When a message notifying about either of both
// is received, the future is resolved.
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate futures;
use futures::executor::block_on;
use futures::prelude::*;
use std::env;
use futures::{executor::LocalPool, prelude::*};
use gst::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
async fn message_loop(bus: gst::Bus) {
let mut messages = bus.stream();
while let Some(msg) = messages.next().await {
use gst::MessageView;
// Determine whether we want to quit: on EOS or error message
// we quit, otherwise simply continue.
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
};
}
}
fn example_main() {
// Read the pipeline to launch from the commandline, using the launch syntax.
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
gst::init().unwrap();
// Create a pipeline from the launch-syntax given on the cli.
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
let bus = pipeline.bus().unwrap();
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
// Use a LocalPool as executor. This runs single threaded on this very thread.
let mut pool = LocalPool::new();
let messages = gst::BusStream::new(&bus)
.for_each(|msg| {
use gst::MessageView;
// Run until our message loop finishes, e.g. EOS/error happens
pool.run_until(message_loop(bus));
let quit = match msg.view() {
MessageView::Eos(..) => true,
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
true
}
_ => false,
};
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
if quit {
Err(())
} else {
Ok(())
}
})
.and_then(|_| Ok(()));
let _ = block_on(messages);
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,136 +1,63 @@
// This example demonstrates how to use the gstreamer editing services.
// This is gstreamer's framework to implement non-linear editing.
// It provides a timeline API that internally manages a dynamically changing
// pipeline. (e.g.: alternating video streams in second 1, 2, and 3)
// Timeline:
// _________________________________________________
// | 00:01 | 00:02 | 00:03 |
// =================================================
// Layer0: || ###CLIP####|| || ###CLIP###||
// || ####00#####|| || ####01####||
// =================================================
// Layer1: || ###CLIP#### || ||
// || ####00##### || ||
// =================================================
extern crate gstreamer as gst;
use gst::prelude::*;
// - Assets are the base of most components in GES. One asset essentially represents
// one resource (e.g. a file). Different files and filetypes can contain different
// types of things. Thus - you can extract different high-level types from an
// asset. If you created an asset from a video file, you could for example "extract"
// a GESClip from it. Same goes for audio files.
// - There even is the GESProject subclass of GESAsset, which can be used to load a whole
// previously saved project. And since GESProject essentially is a GESAsset itself, you
// can then extract the stored components (like the timeline e.g.) from it.
// - Clips are the high-level types (above assets), managing multimedia elements (such as
// videos or audio clips). Within the timeline, they are arranged in layers.
// Those layers essentially behave like in common photo editing software: They specify
// the order in which they are composited, and can therefore overlay each other.
// Clips are essentially wrappers around the underlying GStreamer elements needed
// to work with them. They also provide high-level APIs to add effects into the
// clip's internal pipeline.
// Multiple clips can also be grouped together (even across layers!) to one, making it
// possible to work with all of them as if they were one.
// - Like noted above, Layers specify the order in which the different layers are composited.
// This is specified by their priority. Layers with higher priority (lower number) trump
// those with lowers (higher number). Thus, Layers with higher priority are "in the front".
// - The timeline is the enclosing element, grouping all layers and providing a timeframe.
extern crate gstreamer_editing_services as ges;
use ges::prelude::*;
use std::env;
use ges::prelude::*;
#[allow(unused_imports)]
#[path = "../examples-common.rs"]
mod examples_common;
fn configure_pipeline(pipeline: &ges::Pipeline, output_name: &str) {
// Every audiostream piped into the encodebin should be encoded using opus.
let audio_profile =
gst_pbutils::EncodingAudioProfile::builder(&gst::Caps::builder("audio/x-opus").build())
.build();
extern crate failure;
#[allow(unused_imports)]
use failure::Error;
// Every videostream piped into the encodebin should be encoded using vp8.
let video_profile =
gst_pbutils::EncodingVideoProfile::builder(&gst::Caps::builder("video/x-vp8").build())
.build();
extern crate glib;
// All streams are then finally combined into a webm container.
let container_profile =
gst_pbutils::EncodingContainerProfile::builder(&gst::Caps::builder("video/webm").build())
.name("container")
.add_profile(video_profile)
.add_profile(audio_profile)
.build();
// Apply the EncodingProfile to the pipeline, and set it to render mode
let output_uri = format!("{output_name}.webm");
pipeline
.set_render_settings(&output_uri, &container_profile)
.expect("Failed to set render settings");
pipeline
.set_mode(ges::PipelineFlags::RENDER)
.expect("Failed to set pipeline to render mode");
}
fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError> {
fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
ges::init()?;
// Begin by creating a timeline with audio and video tracks
let timeline = ges::Timeline::new_audio_video();
// Create a new layer that will contain our timed clips.
let layer = timeline.append_layer();
let pipeline = ges::Pipeline::new();
pipeline.set_timeline(&timeline)?;
pipeline.set_timeline(&timeline);
// If requested, configure the pipeline so it renders to a file.
if let Some(output_name) = output {
configure_pipeline(&pipeline, output_name);
}
let clip = ges::UriClip::new(uri);
layer.add_clip(&clip);
// Load a clip from the given uri and add it to the layer.
let clip = ges::UriClip::new(uri).expect("Failed to create clip");
layer.add_clip(&clip)?;
// Add an effect to the clip's video stream.
let effect = ges::Effect::new("agingtv").expect("Failed to create effect");
let effect = ges::Effect::new("agingtv");
clip.add(&effect).unwrap();
println!(
"Agingtv scratch-lines: {}",
clip.child_property("scratch-lines")
clip.get_child_property("scratch-lines")
.unwrap()
.serialize()
.unwrap()
);
// Retrieve the asset that was automatically used behind the scenes, to
// extract the clip from.
let asset = clip.asset().unwrap();
let asset = clip.get_asset().unwrap();
let duration = asset
.downcast::<ges::UriClipAsset>()
.unwrap()
.duration()
.expect("unknown duration");
.get_duration();
println!(
"Clip duration: {} - playing file from {} for {}",
duration,
duration / 2,
duration / 4,
duration / 4
);
// The inpoint specifies where in the clip we start, the duration specifies
// how much we play from that point onwards. Setting the inpoint to something else
// than 0, or the duration something smaller than the clip's actual duration will
// cut the clip.
clip.set_inpoint(duration / 2);
clip.set_duration(duration / 4);
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
let bus = pipeline.bus().unwrap();
for msg in bus.iter_timed(gst::ClockTime::NONE) {
let bus = pipeline.get_bus().unwrap();
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -138,9 +65,9 @@ fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError>
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
@ -148,9 +75,8 @@ fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError>
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
Ok(())
}
@ -158,22 +84,21 @@ fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError>
#[allow(unused_variables)]
fn example_main() {
let args: Vec<_> = env::args().collect();
if args.len() < 2 || args.len() > 3 {
println!("Usage: ges input [output]");
let uri: &str = if args.len() == 2 {
args[1].as_ref()
} else {
println!("Usage: ges launch");
std::process::exit(-1)
}
};
let input_uri: &str = args[1].as_ref();
let output = args.get(2);
match main_loop(input_uri, output) {
match main_loop(uri) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,174 +0,0 @@
#![allow(clippy::non_send_fields_in_send_ty)]
use anyhow::Result;
#[path = "../glupload.rs"]
mod glupload;
use glupload::*;
#[path = "../examples-common.rs"]
pub mod examples_common;
/// The fragment shader used for transforming GL textures travelling through the
/// pipeline. This fragment shader links against the default vertex shader
/// provided by [`GLSLStage::new_default_vertex`].
const FRAGMENT_SHADER: &str = r#"
#ifdef GL_ES
precision mediump float;
#endif
// The filter draws a fullscreen quad and provides its coordinates here:
varying vec2 v_texcoord;
// The input texture is bound on a uniform sampler named `tex`:
uniform sampler2D tex;
void main () {
// Flip texture read coordinate on the x axis to create a mirror effect:
gl_FragColor = texture2D(tex, vec2(1.0 - v_texcoord.x, v_texcoord.y));
}
"#;
mod mirror {
use std::sync::Mutex;
use gst_base::subclass::BaseTransformMode;
use gst_gl::{
prelude::*,
subclass::{prelude::*, GLFilterMode},
*,
};
use once_cell::sync::Lazy;
use super::{gl, FRAGMENT_SHADER};
pub static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"rsglmirrorfilter",
gst::DebugColorFlags::empty(),
Some("Rust GL Mirror Filter"),
)
});
glib::wrapper! {
pub struct GLMirrorFilter(ObjectSubclass<imp::GLMirrorFilter>) @extends gst_gl::GLFilter, gst_gl::GLBaseFilter, gst_base::BaseTransform, gst::Element, gst::Object;
}
impl GLMirrorFilter {
pub fn new(name: Option<&str>) -> Self {
glib::Object::builder().property("name", name).build()
}
}
mod imp {
use super::*;
/// Private data consists of the transformation shader which is compiled
/// in advance to running the actual filter.
#[derive(Default)]
pub struct GLMirrorFilter {
shader: Mutex<Option<GLShader>>,
}
impl GLMirrorFilter {
fn create_shader(&self, context: &GLContext) -> Result<(), gst::LoggableError> {
let shader = GLShader::new(context);
let vertex = GLSLStage::new_default_vertex(context);
vertex.compile().unwrap();
shader.attach_unlocked(&vertex)?;
gst::debug!(
CAT,
imp: self,
"Compiling fragment shader {}",
FRAGMENT_SHADER
);
let fragment = GLSLStage::with_strings(
context,
gl::FRAGMENT_SHADER,
// new_default_vertex is compiled with this version and profile:
GLSLVersion::None,
GLSLProfile::ES | GLSLProfile::COMPATIBILITY,
&[FRAGMENT_SHADER],
);
fragment.compile().unwrap();
shader.attach_unlocked(&fragment)?;
shader.link().unwrap();
gst::debug!(
CAT,
imp: self,
"Successfully compiled and linked {:?}",
shader
);
*self.shader.lock().unwrap() = Some(shader);
Ok(())
}
}
// See `subclass.rs` for general documentation on creating a subclass. Extended
// information like element metadata have been omitted for brevity.
#[glib::object_subclass]
impl ObjectSubclass for GLMirrorFilter {
const NAME: &'static str = "RsGLMirrorFilter";
type Type = super::GLMirrorFilter;
type ParentType = gst_gl::GLFilter;
}
impl ElementImpl for GLMirrorFilter {}
impl GstObjectImpl for GLMirrorFilter {}
impl ObjectImpl for GLMirrorFilter {}
impl BaseTransformImpl for GLMirrorFilter {
const MODE: BaseTransformMode = BaseTransformMode::NeverInPlace;
const PASSTHROUGH_ON_SAME_CAPS: bool = false;
const TRANSFORM_IP_ON_PASSTHROUGH: bool = false;
}
impl GLBaseFilterImpl for GLMirrorFilter {
fn gl_start(&self) -> Result<(), gst::LoggableError> {
let filter = self.obj();
// Create a shader when GL is started, knowing that the OpenGL context is
// available.
let context = GLBaseFilterExt::context(&*filter).unwrap();
self.create_shader(&context)?;
self.parent_gl_start()
}
}
impl GLFilterImpl for GLMirrorFilter {
const MODE: GLFilterMode = GLFilterMode::Texture;
fn filter_texture(
&self,
input: &gst_gl::GLMemory,
output: &gst_gl::GLMemory,
) -> Result<(), gst::LoggableError> {
let filter = self.obj();
let shader = self.shader.lock().unwrap();
// Use the underlying filter implementation to transform the input texture into
// an output texture with the shader.
filter.render_to_target_with_shader(
input,
output,
shader
.as_ref()
.expect("No shader, call `create_shader` first!"),
);
self.parent_filter_texture(input, output)
}
}
}
}
fn example_main() -> Result<()> {
gst::init().unwrap();
let glfilter = mirror::GLMirrorFilter::new(Some("Mirror filter"));
App::new(Some(glfilter.as_ref())).and_then(main_loop)
}
fn main() -> Result<()> {
examples_common::run(example_main)
}

View file

@ -1,68 +1,63 @@
use std::env;
use futures::prelude::*;
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate glib;
extern crate futures;
use futures::prelude::*;
use std::env;
#[path = "../examples-common.rs"]
mod examples_common;
async fn message_handler(loop_: glib::MainLoop, bus: gst::Bus) {
let mut messages = bus.stream();
while let Some(msg) = messages.next().await {
use gst::MessageView;
// Determine whether we want to quit: on EOS or error message
// we quit, otherwise simply continue.
match msg.view() {
MessageView::Eos(..) => loop_.quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
loop_.quit();
}
_ => (),
}
}
}
fn example_main() {
// Get the default main context and make it also the thread default, then create
// a main loop for it
let ctx = glib::MainContext::default();
let loop_ = glib::MainLoop::new(Some(&ctx), false);
// Read the pipeline to launch from the commandline, using the launch syntax.
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
gst::init().unwrap();
// Create a pipeline from the launch-syntax given on the cli.
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
let bus = pipeline.bus().unwrap();
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
// Spawn our message handling stream
ctx.spawn_local(message_handler(loop_.clone(), bus));
let messages = gst::BusStream::new(&bus)
.for_each(|msg| {
use gst::MessageView;
// And run until something is quitting the loop, i.e. an EOS
// or error message is received above
loop_.run();
let quit = match msg.view() {
MessageView::Eos(..) => true,
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
true
}
_ => false,
};
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
if quit {
Err(())
} else {
Ok(())
}
})
.and_then(|_| Ok(()));
let _ = ctx.block_on(messages);
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,18 +0,0 @@
#![allow(clippy::non_send_fields_in_send_ty)]
use anyhow::Result;
#[path = "../glupload.rs"]
mod glupload;
use glupload::*;
#[path = "../examples-common.rs"]
pub mod examples_common;
fn example_main() -> Result<()> {
App::new(None).and_then(main_loop)
}
fn main() -> Result<()> {
examples_common::run(example_main)
}

126
examples/src/bin/gtksink.rs Normal file
View file

@ -0,0 +1,126 @@
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate glib;
extern crate gio;
use gio::prelude::*;
extern crate gtk;
use gtk::prelude::*;
use std::cell::RefCell;
use std::env;
fn create_ui(app: &gtk::Application) {
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
let (sink, widget) = if let Some(gtkglsink) = gst::ElementFactory::make("gtkglsink", None) {
let glsinkbin = gst::ElementFactory::make("glsinkbin", None).unwrap();
glsinkbin
.set_property("sink", &gtkglsink.to_value())
.unwrap();
let widget = gtkglsink.get_property("widget").unwrap();
(glsinkbin, widget.get::<gtk::Widget>().unwrap())
} else {
let sink = gst::ElementFactory::make("gtksink", None).unwrap();
let widget = sink.get_property("widget").unwrap();
(sink, widget.get::<gtk::Widget>().unwrap())
};
pipeline.add_many(&[&src, &sink]).unwrap();
src.link(&sink).unwrap();
let window = gtk::Window::new(gtk::WindowType::Toplevel);
window.set_default_size(320, 240);
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
vbox.pack_start(&widget, true, true, 0);
let label = gtk::Label::new("Position: 00:00:00");
vbox.pack_start(&label, true, true, 5);
window.add(&vbox);
window.show_all();
app.add_window(&window);
let pipeline_weak = pipeline.downgrade();
let timeout_id = gtk::timeout_add(500, move || {
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::Continue(true),
};
let position = pipeline
.query_position::<gst::ClockTime>()
.unwrap_or_else(|| 0.into());
label.set_text(&format!("Position: {:.0}", position));
glib::Continue(true)
});
let app_weak = app.downgrade();
window.connect_delete_event(move |_, _| {
let app = match app_weak.upgrade() {
Some(app) => app,
None => return Inhibit(false),
};
app.quit();
Inhibit(false)
});
let bus = pipeline.get_bus().unwrap();
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
let app_weak = glib::SendWeakRef::from(app.downgrade());
bus.add_watch(move |_, msg| {
use gst::MessageView;
let app = match app_weak.upgrade() {
Some(app) => app,
None => return glib::Continue(false),
};
match msg.view() {
MessageView::Eos(..) => gtk::main_quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
app.quit();
}
_ => (),
};
glib::Continue(true)
});
// Pipeline reference is owned by the closure below, so will be
// destroyed once the app is destroyed
let timeout_id = RefCell::new(Some(timeout_id));
app.connect_shutdown(move |_| {
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
bus.remove_watch();
if let Some(timeout_id) = timeout_id.borrow_mut().take() {
glib::source_remove(timeout_id);
}
});
}
fn main() {
gst::init().unwrap();
gtk::init().unwrap();
let app = gtk::Application::new(None, gio::ApplicationFlags::FLAGS_NONE).unwrap();
app.connect_activate(create_ui);
let args = env::args().collect::<Vec<_>>();
app.run(&args);
}

View file

@ -0,0 +1,201 @@
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_video as gst_video;
use gst_video::prelude::*;
extern crate glib;
use glib::translate::ToGlibPtr;
extern crate gio;
use gio::prelude::*;
extern crate gtk;
use gtk::prelude::*;
extern crate gdk;
use gdk::prelude::*;
use std::env;
use std::os::raw::c_void;
use std::cell::RefCell;
use std::process;
fn create_ui(app: &gtk::Application) {
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
let sink = if cfg!(feature = "gtkvideooverlay-x11") {
gst::ElementFactory::make("xvimagesink", None).unwrap()
} else if cfg!(feature = "gtkvideooverlay-quartz") {
gst::ElementFactory::make("glimagesink", None).unwrap()
} else {
unreachable!()
};
pipeline.add_many(&[&src, &sink]).unwrap();
src.link(&sink).unwrap();
let window = gtk::Window::new(gtk::WindowType::Toplevel);
window.set_default_size(320, 240);
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
let video_window = gtk::DrawingArea::new();
video_window.set_size_request(320, 240);
let video_overlay = sink
.clone()
.dynamic_cast::<gst_video::VideoOverlay>()
.unwrap()
.downgrade();
video_window.connect_realize(move |video_window| {
let video_overlay = match video_overlay.upgrade() {
Some(video_overlay) => video_overlay,
None => return,
};
let gdk_window = video_window.get_window().unwrap();
if !gdk_window.ensure_native() {
println!("Can't create native window for widget");
process::exit(-1);
}
let display_type_name = gdk_window.get_display().get_type().name();
if cfg!(feature = "gtkvideooverlay-x11") {
// Check if we're using X11 or ...
if display_type_name == "GdkX11Display" {
extern "C" {
pub fn gdk_x11_window_get_xid(
window: *mut glib::object::GObject,
) -> *mut c_void;
}
unsafe {
let xid = gdk_x11_window_get_xid(gdk_window.to_glib_none().0);
video_overlay.set_window_handle(xid as usize);
}
} else {
println!("Add support for display type '{}'", display_type_name);
process::exit(-1);
}
} else if cfg!(feature = "gtkvideooverlay-quartz") {
if display_type_name == "GdkQuartzDisplay" {
extern "C" {
pub fn gdk_quartz_window_get_nsview(
window: *mut glib::object::GObject,
) -> *mut c_void;
}
unsafe {
let window = gdk_quartz_window_get_nsview(gdk_window.to_glib_none().0);
video_overlay.set_window_handle(window as usize);
}
} else {
println!("Unsupported display type '{}", display_type_name);
process::exit(-1);
}
}
});
vbox.pack_start(&video_window, true, true, 0);
let label = gtk::Label::new("Position: 00:00:00");
vbox.pack_start(&label, true, true, 5);
window.add(&vbox);
window.show_all();
app.add_window(&window);
let pipeline_weak = pipeline.downgrade();
let timeout_id = gtk::timeout_add(500, move || {
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::Continue(true),
};
let position = pipeline
.query_position::<gst::ClockTime>()
.unwrap_or_else(|| 0.into());
label.set_text(&format!("Position: {:.0}", position));
glib::Continue(true)
});
let app_weak = app.downgrade();
window.connect_delete_event(move |_, _| {
let app = match app_weak.upgrade() {
Some(app) => app,
None => return Inhibit(false),
};
app.quit();
Inhibit(false)
});
let bus = pipeline.get_bus().unwrap();
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
let app_weak = glib::SendWeakRef::from(app.downgrade());
bus.add_watch(move |_, msg| {
use gst::MessageView;
let app = match app_weak.upgrade() {
Some(app) => app,
None => return glib::Continue(false),
};
match msg.view() {
MessageView::Eos(..) => gtk::main_quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
app.quit();
}
_ => (),
};
glib::Continue(true)
});
// Pipeline reference is owned by the closure below, so will be
// destroyed once the app is destroyed
let timeout_id = RefCell::new(Some(timeout_id));
app.connect_shutdown(move |_| {
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
bus.remove_watch();
if let Some(timeout_id) = timeout_id.borrow_mut().take() {
glib::source_remove(timeout_id);
}
});
}
fn main() {
#[cfg(not(unix))]
{
println!("Add support for target platform");
process::exit(-1);
}
gst::init().unwrap();
gtk::init().unwrap();
let app = gtk::Application::new(None, gio::ApplicationFlags::FLAGS_NONE).unwrap();
app.connect_activate(create_ui);
let args = env::args().collect::<Vec<_>>();
app.run(&args);
}

View file

@ -1,7 +1,4 @@
// This example demonstrates how to use GStreamer's iteration APIs.
// This is used at multiple occasions - for example to iterate an
// element's pads.
extern crate gstreamer as gst;
use gst::prelude::*;
#[path = "../examples-common.rs"]
@ -10,28 +7,15 @@ mod examples_common;
fn example_main() {
gst::init().unwrap();
// Create and use an identity element here.
// This element does nothing, really. We also never add it to a pipeline.
// We just want to iterate the identity element's pads.
let identity = gst::ElementFactory::make("identity").build().unwrap();
// Get an iterator over all pads of the identity-element.
let identity = gst::ElementFactory::make("identity", None).unwrap();
let mut iter = identity.iterate_pads();
loop {
// In an endless-loop, we use the iterator until we either reach the end
// or we hit an error.
match iter.next() {
Ok(Some(pad)) => println!("Pad: {}", pad.name()),
Ok(Some(pad)) => println!("Pad: {}", pad.get_name()),
Ok(None) => {
// We reached the end of the iterator, there are no more pads
println!("Done");
break;
}
// It is very important to handle this resync error by calling resync
// on the iterator. This error happens, when the container that is iterated
// changed during iteration. (e.g. a pad was added while we used the
// iterator to iterate over all of an element's pads).
// After calling resync on the iterator, iteration will start from the beginning
// again. So the application should be able to handle that.
Err(gst::IteratorError::Resync) => {
println!("Iterator resync");
iter.resync();
@ -45,7 +29,7 @@ fn example_main() {
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,54 +1,37 @@
// This is a simplified rust-reimplementation of the gst-launch-<version>
// cli tool. It has no own parameters and simply parses the cli arguments
// as launch syntax.
// When the parsing succeeded, the pipeline is run until the stream ends or an error happens.
use std::{env, process};
extern crate gstreamer as gst;
use gst::prelude::*;
use std::env;
use std::process;
#[path = "../examples-common.rs"]
mod examples_common;
fn example_main() {
// Get a string containing the passed pipeline launch syntax
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
gst::init().unwrap();
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
// In comparison to the launch_glib_main example, this is using the advanced launch syntax
// parsing API of GStreamer. The function returns a Result, handing us the pipeline if
// parsing and creating succeeded, and hands us detailed error information if something
// went wrong. The error is passed as gst::ParseError. In this example, we separately
// handle the NoSuchElement error, that GStreamer uses to notify us about elements
// used within the launch syntax, that are not available (not installed).
// Especially GUIs should probably handle this case, to tell users that they need to
// install the corresponding gstreamer plugins.
let mut context = gst::ParseContext::new();
let pipeline = match gst::parse::launch_full(
&pipeline_str,
Some(&mut context),
gst::ParseFlags::empty(),
) {
Ok(pipeline) => pipeline,
Err(err) => {
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
println!("Missing element(s): {:?}", context.missing_elements());
} else {
println!("Failed to parse pipeline: {err}");
let pipeline =
match gst::parse_launch_full(&pipeline_str, Some(&mut context), gst::ParseFlags::NONE) {
Ok(pipeline) => pipeline,
Err(err) => {
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
println!("Missing element(s): {:?}", context.get_missing_elements());
} else {
println!("Failed to parse pipeline: {}", err);
}
process::exit(-1)
}
};
let bus = pipeline.get_bus().unwrap();
process::exit(-1)
}
};
let bus = pipeline.bus().unwrap();
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -56,9 +39,9 @@ fn example_main() {
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
@ -66,13 +49,12 @@ fn example_main() {
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,72 +1,61 @@
// This is a simplified rust-reimplementation of the gst-launch-<version>
// cli tool. It has no own parameters and simply parses the cli arguments
// as launch syntax.
// When the parsing succeeded, the pipeline is run until it exits.
// Main difference between this example and the launch example is the use of
// GLib's main loop to operate GStreamer's bus. This allows to also do other
// things from the main loop (timeouts, UI events, socket events, ...) instead
// of just handling messages from GStreamer's bus.
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate glib;
use std::env;
use gst::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
fn example_main() {
// Get a string containing the passed pipeline launch syntax
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
gst::init().unwrap();
// Like teasered above, we use GLib's main loop to operate GStreamer's bus.
let main_loop = glib::MainLoop::new(None, false);
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
let bus = pipeline.bus().unwrap();
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
let main_loop_clone = main_loop.clone();
//bus.add_signal_watch();
//bus.connect_message(None, move |_, msg| {
let _bus_watch = bus
.add_watch(move |_, msg| {
use gst::MessageView;
//bus.connect_message(move |_, msg| {
bus.add_watch(move |_, msg| {
use gst::MessageView;
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => main_loop.quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
main_loop.quit();
}
_ => (),
};
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => main_loop.quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
main_loop.quit();
}
_ => (),
};
glib::ControlFlow::Continue
})
.expect("Failed to add bus watch");
glib::Continue(true)
});
main_loop.run();
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
bus.remove_watch();
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,359 +0,0 @@
// This example demonstrates how to draw an overlay on a video stream using
// Direct2D/DirectWrite/WIC and the overlay composition element.
// {videotestsrc} - {overlaycomposition} - {capsfilter} - {videoconvert} - {autovideosink}
// The capsfilter element allows us to dictate the video resolution we want for the
// videotestsrc and the overlaycomposition element.
use std::sync::{Arc, Mutex};
use byte_slice_cast::*;
use anyhow::Error;
use derive_more::{Display, Error};
use gst::prelude::*;
use windows::{
Foundation::Numerics::*,
Win32::{
Graphics::{
Direct2D::{Common::*, *},
DirectWrite::*,
Dxgi::Common::*,
Imaging::*,
},
System::Com::*,
},
};
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
}
struct DrawingContext {
// Factory for creating render target
d2d_factory: ID2D1Factory,
// Used to create WIC bitmap surface
wic_factory: IWICImagingFactory,
// text layout holding text information (string, font, size, etc)
text_layout: IDWriteTextLayout,
// Holding rendred image
bitmap: Option<IWICBitmap>,
// Bound to bitmap and used to actual Direct2D rendering
render_target: Option<ID2D1RenderTarget>,
info: Option<gst_video::VideoInfo>,
}
// Required for IWICBitmap
unsafe impl Send for DrawingContext {}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
let pipeline = gst::Pipeline::default();
// The videotestsrc supports multiple test patterns. In this example, we will use the
// pattern with a white ball moving around the video's center point.
let src = gst::ElementFactory::make("videotestsrc")
.property_from_str("pattern", "ball")
.build()?;
let overlay = gst::ElementFactory::make("overlaycomposition").build()?;
let caps = gst_video::VideoCapsBuilder::new()
.width(800)
.height(800)
.framerate((30, 1).into())
.build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps)
.build()?;
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
pipeline.add_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
gst::Element::link_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
// Most Direct2D/DirectWrite APIs (including factory methods) are marked as
// "unsafe", but they shouldn't fail in practice
let drawer = unsafe {
let d2d_factory =
D2D1CreateFactory::<ID2D1Factory>(D2D1_FACTORY_TYPE_MULTI_THREADED, None).unwrap();
let dwrite_factory =
DWriteCreateFactory::<IDWriteFactory>(DWRITE_FACTORY_TYPE_SHARED).unwrap();
let text_format = dwrite_factory
.CreateTextFormat(
windows::core::w!("Arial"),
None,
DWRITE_FONT_WEIGHT_BOLD,
DWRITE_FONT_STYLE_NORMAL,
DWRITE_FONT_STRETCH_NORMAL,
32f32,
windows::core::w!("en-us"),
)
.unwrap();
let text_layout = dwrite_factory
.CreateTextLayout(
windows::core::w!("GStreamer").as_wide(),
&text_format,
// Size will be updated later on "caps-changed" signal
800f32,
800f32,
)
.unwrap();
// Top (default) and center alignment
text_layout
.SetTextAlignment(DWRITE_TEXT_ALIGNMENT_CENTER)
.unwrap();
let wic_factory: IWICImagingFactory =
CoCreateInstance(&CLSID_WICImagingFactory, None, CLSCTX_ALL).unwrap();
Arc::new(Mutex::new(DrawingContext {
d2d_factory,
wic_factory,
text_layout,
bitmap: None,
render_target: None,
info: None,
}))
};
overlay.connect_closure(
"draw",
false,
glib::closure!(@strong drawer => move |_overlay: &gst::Element,
sample: &gst::Sample| {
use std::f64::consts::PI;
let drawer = drawer.lock().unwrap();
let buffer = sample.buffer().unwrap();
let timestamp = buffer.pts().unwrap();
let info = drawer.info.as_ref().unwrap();
let text_layout = &drawer.text_layout;
let bitmap = drawer.bitmap.as_ref().unwrap();
let render_target = drawer.render_target.as_ref().unwrap();
let global_angle = 360. * (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
let center_x = (info.width() / 2) as f32;
let center_y = (info.height() / 2) as f32;
let top_margin = (info.height() / 20) as f32;
unsafe {
// Begin drawing
render_target.BeginDraw();
// Clear background
render_target.Clear(Some(&D2D1_COLOR_F {
r: 0f32,
g: 0f32,
b: 0f32,
a: 0f32,
}));
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
let angle = (360. * f64::from(i)) / 10.0;
let red = ((1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0) as f32;
let text_brush = render_target
.CreateSolidColorBrush(
&D2D1_COLOR_F {
r: red,
g: 0f32,
b: 1f32 - red,
a: 1f32,
},
None,
)
.unwrap();
let angle = (angle + global_angle) as f32;
let matrix = Matrix3x2::rotation(angle, center_x, center_y);
render_target.SetTransform(&matrix);
render_target.DrawTextLayout(
D2D_POINT_2F { x: 0f32, y: top_margin },
text_layout,
&text_brush,
D2D1_DRAW_TEXT_OPTIONS_NONE,
);
}
// EndDraw may not be successful for some reasons.
// Ignores any error in this example
let _ = render_target.EndDraw(None, None);
// Make sure all operations is completed before copying
// bitmap to buffer
let _ = render_target.Flush(None::<*mut u64>, None::<*mut u64>);
}
let mut buffer = gst::Buffer::with_size((info.width() * info.height() * 4) as usize).unwrap();
{
let buffer_mut = buffer.get_mut().unwrap();
let mut map = buffer_mut.map_writable().unwrap();
let dst = map.as_mut_slice_of::<u8>().unwrap();
unsafe {
// Bitmap size is equal to the background image size.
// Copy entire memory
bitmap.CopyPixels(std::ptr::null(), info.width() * 4, dst).unwrap();
}
}
gst_video::VideoMeta::add_full(
buffer.get_mut().unwrap(),
gst_video::VideoFrameFlags::empty(),
gst_video::VideoFormat::Bgra,
info.width(),
info.height(),
&[0],
&[(info.width() * 4) as i32],
)
.unwrap();
// Turn the buffer into a VideoOverlayRectangle, then place
// that into a VideoOverlayComposition and return it.
//
// A VideoOverlayComposition can take a Vec of such rectangles
// spaced around the video frame, but we're just outputting 1
// here
let rect = gst_video::VideoOverlayRectangle::new_raw(
&buffer,
0,
0,
info.width(),
info.height(),
gst_video::VideoOverlayFormatFlags::PREMULTIPLIED_ALPHA,
);
gst_video::VideoOverlayComposition::new(Some(&rect))
.unwrap()
}),
);
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
// be called when the sink that we render to does not support resizing the image
// itself - but the user just changed the window-size. The element after the overlay
// will then change its caps and we use the notification about this change to
// resize our canvas's size.
// Another possibility for when this might happen is, when our video is a network
// stream that dynamically changes resolution when enough bandwidth is available.
overlay.connect_closure(
"caps-changed",
false,
glib::closure!(move |_overlay: &gst::Element,
caps: &gst::Caps,
_width: u32,
_height: u32| {
let mut drawer = drawer.lock().unwrap();
let info = gst_video::VideoInfo::from_caps(caps).unwrap();
unsafe {
// Update text layout to be identical to new video resolution
drawer.text_layout.SetMaxWidth(info.width() as f32).unwrap();
drawer
.text_layout
.SetMaxHeight(info.height() as f32)
.unwrap();
// Create new WIC bitmap with PBGRA format (pre-multiplied BGRA)
let bitmap = drawer
.wic_factory
.CreateBitmap(
info.width(),
info.height(),
&GUID_WICPixelFormat32bppPBGRA,
WICBitmapCacheOnDemand,
)
.unwrap();
let render_target = drawer
.d2d_factory
.CreateWicBitmapRenderTarget(
&bitmap,
&D2D1_RENDER_TARGET_PROPERTIES {
r#type: D2D1_RENDER_TARGET_TYPE_DEFAULT,
pixelFormat: D2D1_PIXEL_FORMAT {
format: DXGI_FORMAT_B8G8R8A8_UNORM,
alphaMode: D2D1_ALPHA_MODE_PREMULTIPLIED,
},
// zero means default DPI
dpiX: 0f32,
dpiY: 0f32,
usage: D2D1_RENDER_TARGET_USAGE_NONE,
minLevel: D2D1_FEATURE_LEVEL_DEFAULT,
},
)
.unwrap();
drawer.render_target = Some(render_target);
drawer.bitmap = Some(bitmap);
}
drawer.info = Some(info);
}),
);
Ok(pipeline)
}
fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
}
.into());
}
_ => (),
}
}
pipeline.set_state(gst::State::Null)?;
Ok(())
}
fn main() {
// WIC requires COM initialization
unsafe {
CoInitializeEx(None, COINIT_MULTITHREADED).unwrap();
}
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {}", e),
}
unsafe {
CoUninitialize();
}
}

View file

@ -1,304 +0,0 @@
// This example demonstrates how to draw an overlay on a video stream using
// cairo and the overlay composition element.
// Additionally, this example uses functionality of the pango library, which handles
// text layouting. The pangocairo crate is a nice wrapper combining both libraries
// into a nice interface.
// {videotestsrc} - {overlaycomposition} - {capsfilter} - {videoconvert} - {autovideosink}
// The capsfilter element allows us to dictate the video resolution we want for the
// videotestsrc and the overlaycomposition element.
use std::{
ops,
sync::{Arc, Mutex},
};
use anyhow::Error;
use derive_more::{Display, Error};
use gst::prelude::*;
use pango::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
}
struct DrawingContext {
layout: LayoutWrapper,
info: Option<gst_video::VideoInfo>,
}
#[derive(Debug)]
struct LayoutWrapper(pango::Layout);
impl ops::Deref for LayoutWrapper {
type Target = pango::Layout;
fn deref(&self) -> &pango::Layout {
assert_eq!(self.0.ref_count(), 1);
&self.0
}
}
// SAFETY: We ensure that there are never multiple references to the layout.
unsafe impl Send for LayoutWrapper {}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
let pipeline = gst::Pipeline::default();
// The videotestsrc supports multiple test patterns. In this example, we will use the
// pattern with a white ball moving around the video's center point.
let src = gst::ElementFactory::make("videotestsrc")
.property_from_str("pattern", "ball")
.build()?;
let overlay = gst::ElementFactory::make("overlaycomposition").build()?;
// Plug in a capsfilter element that will force the videotestsrc and the overlay to work
// with images of the size 800x800, and framerate of 15 fps, since my laptop struggles
// rendering it at the default 30 fps
let caps = gst_video::VideoCapsBuilder::new()
.width(800)
.height(800)
.framerate((15, 1).into())
.build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps)
.build()?;
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
pipeline.add_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
gst::Element::link_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
// The PangoFontMap represents the set of fonts available for a particular rendering system.
let fontmap = pangocairo::FontMap::new();
// Create a new pango layouting context for the fontmap.
let context = fontmap.create_context();
// Create a pango layout object. This object is a string of text we want to layout.
// It is wrapped in a LayoutWrapper (defined above) to be able to send it across threads.
let layout = LayoutWrapper(pango::Layout::new(&context));
// Select the text content and the font we want to use for the piece of text.
let font_desc = pango::FontDescription::from_string("Sans Bold 26");
layout.set_font_description(Some(&font_desc));
layout.set_text("GStreamer");
// The following is a context struct (containing the pango layout and the configured video info).
// We have to wrap it in an Arc (or Rc) to get reference counting, that is: to be able to have
// shared ownership of it in multiple different places (the two signal handlers here).
// We have to wrap it in a Mutex because Rust's type-system can't know that both signals are
// only ever called from a single thread (the streaming thread). It would be enough to have
// something that is Send in theory but that's not how signal handlers are generated unfortunately.
// The Mutex (or otherwise if we didn't need the Sync bound we could use a RefCell) is to implement
// interior mutability (see Rust docs). Via this we can get a mutable reference to the contained
// data which is checked at runtime for uniqueness (blocking in case of mutex, panic in case
// of refcell) instead of compile-time (like with normal references).
let drawer = Arc::new(Mutex::new(DrawingContext { layout, info: None }));
// Connect to the overlaycomposition element's "draw" signal, which is emitted for
// each videoframe piped through the element. The signal handler needs to
// return a gst_video::VideoOverlayComposition to be drawn on the frame
//
// Signals connected with the connect(<name>, ...) API get their arguments
// passed as array of glib::Value. For a documentation about the actual arguments
// it is always a good idea to check the element's signals using either
// gst-inspect, or the online documentation.
//
// In this case, the signal passes the gst::Element and a gst::Sample with
// the current buffer
overlay.connect_closure(
"draw",
false,
glib::closure!(@strong drawer => move |_overlay: &gst::Element,
sample: &gst::Sample| {
use std::f64::consts::PI;
let drawer = drawer.lock().unwrap();
let buffer = sample.buffer().unwrap();
let timestamp = buffer.pts().unwrap();
let info = drawer.info.as_ref().unwrap();
let layout = &drawer.layout;
let angle = 2.0 * PI * (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
/* Create a Cairo image surface to draw into and the context around it. */
let surface = cairo::ImageSurface::create(
cairo::Format::ARgb32,
info.width() as i32,
info.height() as i32,
)
.unwrap();
let cr = cairo::Context::new(&surface).expect("Failed to create cairo context");
cr.save().expect("Failed to save state");
cr.set_operator(cairo::Operator::Clear);
cr.paint().expect("Failed to clear background");
cr.restore().expect("Failed to restore state");
// The image we draw (the text) will be static, but we will change the
// transformation on the drawing context, which rotates and shifts everything
// that we draw afterwards. Like this, we have no complicated calculations
// in the actual drawing below.
// Calling multiple transformation methods after each other will apply the
// new transformation on top. If you repeat the cr.rotate(angle) line below
// this a second time, everything in the canvas will rotate twice as fast.
cr.translate(
f64::from(info.width()) / 2.0,
f64::from(info.height()) / 2.0,
);
cr.rotate(angle);
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
// Cairo, like most rendering frameworks, is using a stack for transformations
// with this, we push our current transformation onto this stack - allowing us
// to make temporary changes / render something / and then returning to the
// previous transformations.
cr.save().expect("Failed to save state");
let angle = (360. * f64::from(i)) / 10.0;
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
cr.set_source_rgb(red, 0.0, 1.0 - red);
cr.rotate(angle * PI / 180.0);
// Update the text layout. This function is only updating pango's internal state.
// So e.g. that after a 90 degree rotation it knows that what was previously going
// to end up as a 200x100 rectangle would now be 100x200.
pangocairo::functions::update_layout(&cr, layout);
let (width, _height) = layout.size();
// Using width and height of the text, we can properly position it within
// our canvas.
cr.move_to(
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
-(f64::from(info.height())) / 2.0,
);
// After telling the layout object where to draw itself, we actually tell
// it to draw itself into our cairo context.
pangocairo::functions::show_layout(&cr, layout);
// Here we go one step up in our stack of transformations, removing any
// changes we did to them since the last call to cr.save();
cr.restore().expect("Failed to restore state");
}
/* Drop the Cairo context to release the additional reference to the data and
* then take ownership of the data. This only works if we have the one and only
* reference to the image surface */
drop(cr);
let stride = surface.stride();
let data = surface.take_data().unwrap();
/* Create an RGBA buffer, and add a video meta that the videooverlaycomposition expects */
let mut buffer = gst::Buffer::from_mut_slice(data);
gst_video::VideoMeta::add_full(
buffer.get_mut().unwrap(),
gst_video::VideoFrameFlags::empty(),
gst_video::VideoFormat::Bgra,
info.width(),
info.height(),
&[0],
&[stride],
)
.unwrap();
/* Turn the buffer into a VideoOverlayRectangle, then place
* that into a VideoOverlayComposition and return it.
*
* A VideoOverlayComposition can take a Vec of such rectangles
* spaced around the video frame, but we're just outputting 1
* here */
let rect = gst_video::VideoOverlayRectangle::new_raw(
&buffer,
0,
0,
info.width(),
info.height(),
gst_video::VideoOverlayFormatFlags::PREMULTIPLIED_ALPHA,
);
gst_video::VideoOverlayComposition::new(Some(&rect))
.unwrap()
}),
);
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
// be called when the sink that we render to does not support resizing the image
// itself - but the user just changed the window-size. The element after the overlay
// will then change its caps and we use the notification about this change to
// resize our canvas's size.
// Another possibility for when this might happen is, when our video is a network
// stream that dynamically changes resolution when enough bandwidth is available.
overlay.connect_closure(
"caps-changed",
false,
glib::closure!(move |_overlay: &gst::Element,
caps: &gst::Caps,
_width: u32,
_height: u32| {
let mut drawer = drawer.lock().unwrap();
drawer.info = Some(gst_video::VideoInfo::from_caps(caps).unwrap());
}),
);
Ok(pipeline)
}
fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
}
.into());
}
_ => (),
}
}
pipeline.set_state(gst::State::Null)?;
Ok(())
}
fn example_main() {
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
examples_common::run(example_main);
}

View file

@ -1,85 +1,56 @@
// This example demonstrates the use of GStreamer's pad probe APIs.
// Probes are callbacks that can be installed by the application and will notify
// the application about the states of the dataflow. Those are mostly used for
// changing pipelines dynamically at runtime or for inspecting/modifying buffers or events
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_audio as gst_audio;
// |-[probe]
// /
// {audiotestsrc} - {fakesink}
#![allow(clippy::question_mark)]
extern crate byte_slice_cast;
use byte_slice_cast::*;
use std::i16;
use byte_slice_cast::*;
use gst::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
fn example_main() {
gst::init().unwrap();
// Parse the pipeline we want to probe from a static in-line string.
// Here we give our audiotestsrc a name, so we can retrieve that element
// from the resulting pipeline.
let pipeline = gst::parse::launch(&format!(
let pipeline = gst::parse_launch(&format!(
"audiotestsrc name=src ! audio/x-raw,format={},channels=1 ! fakesink",
gst_audio::AUDIO_FORMAT_S16
gst_audio::AUDIO_FORMAT_S16.to_string()
))
.unwrap();
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
// Get the audiotestsrc element from the pipeline that GStreamer
// created for us while parsing the launch syntax above.
let src = pipeline.by_name("src").unwrap();
// Get the audiotestsrc's src-pad.
let src_pad = src.static_pad("src").unwrap();
// Add a probe handler on the audiotestsrc's src-pad.
// This handler gets called for every buffer that passes the pad we probe.
let src = pipeline.get_by_name("src").unwrap();
let src_pad = src.get_static_pad("src").unwrap();
src_pad.add_probe(gst::PadProbeType::BUFFER, |_, probe_info| {
// Interpret the data sent over the pad as one buffer
let Some(buffer) = probe_info.buffer() else {
return gst::PadProbeReturn::Ok;
};
if let Some(gst::PadProbeData::Buffer(ref buffer)) = probe_info.data {
let map = buffer.map_readable().unwrap();
// At this point, buffer is only a reference to an existing memory region somewhere.
// When we want to access its content, we have to map it while requesting the required
// mode of access (read, read/write).
// This type of abstraction is necessary, because the buffer in question might not be
// on the machine's main memory itself, but rather in the GPU's memory.
// So mapping the buffer makes the underlying memory region accessible to us.
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
let map = buffer.map_readable().unwrap();
let samples = if let Ok(samples) = map.as_slice_of::<i16>() {
samples
} else {
return gst::PadProbeReturn::Ok;
};
// We know what format the data in the memory region has, since we requested
// it by setting the appsink's caps. So what we do here is interpret the
// memory region we mapped as an array of signed 16 bit integers.
let samples = if let Ok(samples) = map.as_slice_of::<i16>() {
samples
} else {
return gst::PadProbeReturn::Ok;
};
// For buffer (= chunk of samples), we calculate the root mean square:
let sum: f64 = samples
.iter()
.map(|sample| {
let f = f64::from(*sample) / f64::from(i16::MAX);
f * f
})
.sum();
let rms = (sum / (samples.len() as f64)).sqrt();
println!("rms: {rms}");
let sum: f64 = samples
.iter()
.map(|sample| {
let f = f64::from(*sample) / f64::from(i16::MAX);
f * f
})
.sum();
let rms = (sum / (samples.len() as f64)).sqrt();
println!("rms: {}", rms);
}
gst::PadProbeReturn::Ok
});
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
let bus = pipeline.bus().unwrap();
for msg in bus.iter_timed(gst::ClockTime::NONE) {
let bus = pipeline.get_bus().unwrap();
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -87,9 +58,9 @@ fn example_main() {
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
@ -97,13 +68,12 @@ fn example_main() {
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,39 +1,46 @@
// This example demonstrates how to overlay a video using the cairo
// library. For this, the cairooverlay element is used on a video stream.
// Additionally, this example uses functionality of the pango library, which handles
// text layouting. The pangocairo crate is a nice wrapper combining both libraries
// into a nice interface.
// The drawing surface which the cairooverlay element creates internally can then
// normally be drawn on using the cairo library.
// The operated pipeline looks like this:
extern crate glib;
// {videotestsrc} - {cairooverlay} - {capsfilter} - {videoconvert} - {autovideosink}
// The capsfilter element allows us to dictate the video resolution we want for the
// videotestsrc and the cairooverlay element.
use std::{
ops,
sync::{Arc, Mutex},
};
use anyhow::Error;
use derive_more::{Display, Error};
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate cairo;
extern crate gstreamer_video as gst_video;
extern crate pango;
use pango::prelude::*;
extern crate pangocairo;
use std::error::Error as StdError;
use std::ops;
use std::sync::{Arc, Mutex};
extern crate failure;
use failure::Error;
#[macro_use]
extern crate failure_derive;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[derive(Debug, Fail)]
#[fail(display = "Missing element {}", _0)]
struct MissingElement(&'static str);
#[derive(Debug, Fail)]
#[fail(
display = "Received error from {}: {} (debug: {:?})",
src, error, debug
)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
#[cause]
cause: glib::Error,
}
struct DrawingContext {
layout: LayoutWrapper,
layout: glib::SendUniqueCell<LayoutWrapper>,
info: Option<gst_video::VideoInfo>,
}
@ -44,192 +51,148 @@ impl ops::Deref for LayoutWrapper {
type Target = pango::Layout;
fn deref(&self) -> &pango::Layout {
assert_eq!(self.0.ref_count(), 1);
&self.0
}
}
// SAFETY: We ensure that there are never multiple references to the layout.
unsafe impl Send for LayoutWrapper {}
unsafe impl glib::SendUnique for LayoutWrapper {
fn is_unique(&self) -> bool {
self.0.ref_count() == 1
}
}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("videotestsrc")
// The videotestsrc supports multiple test patterns. In this example, we will use the
// pattern with a white ball moving around the video's center point.
.property_from_str("pattern", "ball")
.build()?;
let overlay = gst::ElementFactory::make("cairooverlay").build()?;
let pipeline = gst::Pipeline::new(None);
let src =
gst::ElementFactory::make("videotestsrc", None).ok_or(MissingElement("videotestsrc"))?;
let overlay =
gst::ElementFactory::make("cairooverlay", None).ok_or(MissingElement("cairooverlay"))?;
let capsfilter =
gst::ElementFactory::make("capsfilter", None).ok_or(MissingElement("capsfilter"))?;
let videoconvert =
gst::ElementFactory::make("videoconvert", None).ok_or(MissingElement("videoconvert"))?;
let sink =
gst::ElementFactory::make("autovideosink", None).ok_or(MissingElement("autovideosink"))?;
// Plug in a capsfilter element that will force the videotestsrc and the cairooverlay to work
// with images of the size 800x800.
let caps = gst_video::VideoCapsBuilder::new()
.width(800)
.height(800)
pipeline.add_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
gst::Element::link_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
let caps = gst::Caps::builder("video/x-raw")
.field("width", &800i32)
.field("height", &800i32)
.build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps)
.build()?;
capsfilter.set_property("caps", &caps).unwrap();
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
src.set_property_from_str("pattern", "ball");
pipeline.add_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
gst::Element::link_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
// The PangoFontMap represents the set of fonts available for a particular rendering system.
let fontmap = pangocairo::FontMap::new();
// Create a new pango layouting context for the fontmap.
let context = fontmap.create_context();
// Create a pango layout object. This object is a string of text we want to layout.
// It is wrapped in a LayoutWrapper (defined above) to be able to send it across threads.
let fontmap = pangocairo::FontMap::new().unwrap();
let context = fontmap.create_context().unwrap();
let layout = LayoutWrapper(pango::Layout::new(&context));
// Select the text content and the font we want to use for the piece of text.
let font_desc = pango::FontDescription::from_string("Sans Bold 26");
layout.set_font_description(Some(&font_desc));
layout.set_font_description(&font_desc);
layout.set_text("GStreamer");
// The following is a context struct (containing the pango layout and the configured video info).
// We have to wrap it in an Arc (or Rc) to get reference counting, that is: to be able to have
// shared ownership of it in multiple different places (the two signal handlers here).
// We have to wrap it in a Mutex because Rust's type-system can't know that both signals are
// only ever called from a single thread (the streaming thread). It would be enough to have
// something that is Send in theory but that's not how signal handlers are generated unfortunately.
// The Mutex (or otherwise if we didn't need the Sync bound we could use a RefCell) is to implement
// interior mutability (see Rust docs). Via this we can get a mutable reference to the contained
// data which is checked at runtime for uniqueness (blocking in case of mutex, panic in case
// of refcell) instead of compile-time (like with normal references).
let drawer = Arc::new(Mutex::new(DrawingContext { layout, info: None }));
let drawer = Arc::new(Mutex::new(DrawingContext {
layout: glib::SendUniqueCell::new(layout).unwrap(),
info: None,
}));
let drawer_clone = drawer.clone();
// Connect to the cairooverlay element's "draw" signal, which is emitted for
// each videoframe piped through the element. Here we have the possibility to
// draw on top of the frame (overlay it), using the cairo render api.
// Signals connected with the connect(<name>, ...) API get their arguments
// passed as array of glib::Value. For a documentation about the actual arguments
// it is always a good idea to either check the element's signals using either
// gst-inspect, or the online documentation.
overlay.connect("draw", false, move |args| {
use std::f64::consts::PI;
overlay
.connect("draw", false, move |args| {
use std::f64::consts::PI;
let drawer = &drawer_clone;
let drawer = drawer.lock().unwrap();
let drawer = &drawer_clone;
let drawer = drawer.lock().unwrap();
// Get the signal's arguments
let _overlay = args[0].get::<gst::Element>().unwrap();
// This is the cairo context. This is the root of all of cairo's
// drawing functionality.
let cr = args[1].get::<cairo::Context>().unwrap();
let timestamp = args[2].get::<gst::ClockTime>().unwrap();
let _duration = args[3].get::<gst::ClockTime>().unwrap();
let _overlay = args[0].get::<gst::Element>().unwrap();
let cr = args[1].get::<cairo::Context>().unwrap();
let timestamp = args[2].get::<gst::ClockTime>().unwrap();
let _duration = args[3].get::<gst::ClockTime>().unwrap();
let info = drawer.info.as_ref().unwrap();
let layout = &drawer.layout;
let info = drawer.info.as_ref().unwrap();
let layout = drawer.layout.borrow();
let angle = 2.0 * PI * (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
let angle = 2.0
* PI
* ((timestamp % (10 * gst::SECOND)).unwrap() as f64
/ (10.0 * gst::SECOND_VAL as f64));
// The image we draw (the text) will be static, but we will change the
// transformation on the drawing context, which rotates and shifts everything
// that we draw afterwards. Like this, we have no complicated calculations
// in the actual drawing below.
// Calling multiple transformation methods after each other will apply the
// new transformation on top. If you repeat the cr.rotate(angle) line below
// this a second time, everything in the canvas will rotate twice as fast.
cr.translate(
f64::from(info.width()) / 2.0,
f64::from(info.height()) / 2.0,
);
cr.rotate(angle);
cr.translate(info.width() as f64 / 2.0, info.height() as f64 / 2.0);
cr.rotate(angle);
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
// Cairo, like most rendering frameworks, is using a stack for transformations
// with this, we push our current transformation onto this stack - allowing us
// to make temporary changes / render something / and then returning to the
// previous transformations.
cr.save().expect("Failed to save state");
for i in 0..10 {
cr.save();
let angle = (360. * f64::from(i)) / 10.0;
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
cr.set_source_rgb(red, 0.0, 1.0 - red);
cr.rotate(angle * PI / 180.0);
let angle = (360. * i as f64) / 10.0;
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
cr.set_source_rgb(red, 0.0, 1.0 - red);
cr.rotate(angle * PI / 180.0);
// Update the text layout. This function is only updating pango's internal state.
// So e.g. that after a 90 degree rotation it knows that what was previously going
// to end up as a 200x100 rectangle would now be 100x200.
pangocairo::functions::update_layout(&cr, layout);
let (width, _height) = layout.size();
// Using width and height of the text, we can properly position it within
// our canvas.
cr.move_to(
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
-(f64::from(info.height())) / 2.0,
);
// After telling the layout object where to draw itself, we actually tell
// it to draw itself into our cairo context.
pangocairo::functions::show_layout(&cr, layout);
pangocairo::functions::update_layout(&cr, &layout);
let (width, _height) = layout.get_size();
cr.move_to(
-(width as f64 / pango::SCALE as f64) / 2.0,
-(info.height() as f64) / 2.0,
);
pangocairo::functions::show_layout(&cr, &layout);
// Here we go one step up in our stack of transformations, removing any
// changes we did to them since the last call to cr.save();
cr.restore().expect("Failed to restore state");
}
cr.restore();
}
None
});
None
})
.unwrap();
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
// be called when the sink that we render to does not support resizing the image
// itself - but the user just changed the window-size. The element after the overlay
// will then change its caps and we use the notification about this change to
// resize our canvas's size.
// Another possibility for when this might happen is, when our video is a network
// stream that dynamically changes resolution when enough bandwidth is available.
overlay.connect("caps-changed", false, move |args| {
let _overlay = args[0].get::<gst::Element>().unwrap();
let caps = args[1].get::<gst::Caps>().unwrap();
let drawer_clone = drawer.clone();
overlay
.connect("caps-changed", false, move |args| {
let _overlay = args[0].get::<gst::Element>().unwrap();
let caps = args[1].get::<gst::Caps>().unwrap();
let mut drawer = drawer.lock().unwrap();
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
let drawer = &drawer_clone;
let mut drawer = drawer.lock().unwrap();
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
None
});
None
})
.unwrap();
Ok(pipeline)
}
fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
pipeline.set_state(gst::State::Playing).into_result()?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
}
.into());
pipeline.set_state(gst::State::Null).into_result()?;
Err(ErrorMessage {
src: err
.get_src()
.map(|s| s.get_path_string())
.unwrap_or_else(|| String::from("None")),
error: err.get_error().description().into(),
debug: err.get_debug(),
cause: err.get_error(),
})?;
}
_ => (),
}
}
pipeline.set_state(gst::State::Null)?;
pipeline.set_state(gst::State::Null).into_result()?;
Ok(())
}
@ -237,12 +200,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
fn example_main() {
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,67 +0,0 @@
// This example shows how to use the GstPlay API.
// The GstPlay API is a convenience API to allow implement playback applications
// without having to write too much code.
// Most of the tasks a play needs to support (such as seeking and switching
// audio / subtitle streams or changing the volume) are all supported by simple
// one-line function calls on the GstPlay.
use std::env;
use anyhow::Error;
#[path = "../examples-common.rs"]
mod examples_common;
use gst_play::{Play, PlayMessage, PlayVideoRenderer};
fn main_loop(uri: &str) -> Result<(), Error> {
gst::init()?;
let play = Play::new(None::<PlayVideoRenderer>);
play.set_uri(Some(uri));
play.play();
let mut result = Ok(());
for msg in play.message_bus().iter_timed(gst::ClockTime::NONE) {
match PlayMessage::parse(&msg) {
Ok(PlayMessage::EndOfStream) => {
play.stop();
break;
}
Ok(PlayMessage::Error { error, details: _ }) => {
result = Err(error);
play.stop();
break;
}
Ok(_) => (),
Err(_) => unreachable!(),
}
}
// Set the message bus to flushing to ensure that all pending messages are dropped and there
// are no further references to the play instance.
play.message_bus().set_flushing(true);
result.map_err(|e| e.into())
}
fn example_main() {
let args: Vec<_> = env::args().collect();
let uri: &str = if args.len() == 2 {
args[1].as_ref()
} else {
println!("Usage: play uri");
std::process::exit(-1)
};
match main_loop(uri) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
examples_common::run(example_main);
}

View file

@ -1,18 +1,10 @@
// This example demonstrates GStreamer's playbin element.
// This element takes an arbitrary URI as parameter, and if there is a source
// element within gstreamer, that supports this uri, the playbin will try
// to automatically create a pipeline that properly plays this media source.
// For this, the playbin internally relies on more bin elements, like the
// autovideosink and the decodebin.
// Essentially, this element is a single-element pipeline able to play
// any format from any uri-addressable source that gstreamer supports.
// Much of the playbin's behavior can be controlled by so-called flags, as well
// as the playbin's properties and signals.
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate glib;
use std::env;
use gst::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
@ -27,84 +19,56 @@ fn example_main() {
std::process::exit(-1)
};
// Create a new playbin element, and tell it what uri to play back.
let playbin = gst::ElementFactory::make("playbin")
.property("uri", uri)
.build()
let playbin = gst::ElementFactory::make("playbin", None).unwrap();
playbin
.set_property("uri", &glib::Value::from(uri))
.unwrap();
// For flags handling
// With flags, one can configure playbin's behavior such as whether it
// should play back contained video streams, or if it should render subtitles.
// let flags = playbin.property_value("flags");
// let flags_class = FlagsClass::with_type(flags.type_()).unwrap();
// let flags = playbin.get_property("flags").unwrap();
// let flags_class = FlagsClass::new(flags.type_()).unwrap();
// let flags = flags_class.builder_with_value(flags).unwrap()
// .unset_by_nick("text")
// .unset_by_nick("video")
// .build()
// .unwrap();
// playbin.set_property_from_value("flags", &flags);
// playbin.set_property("flags", &flags).unwrap();
// The playbin also provides any kind of metadata that it found in the played stream.
// For this, the playbin provides signals notifying about changes in the metadata.
// Doing this with a signal makes sense for multiple reasons.
// - The metadata is only found after the pipeline has been started
// - Live streams (such as internet radios) update this metadata during the stream
// Note that this signal will be emitted from the streaming threads usually,
// not the application's threads!
playbin.connect("audio-tags-changed", false, |values| {
// The metadata of any of the contained audio streams changed
// In the case of a live-stream from an internet radio, this could for example
// mark the beginning of a new track, or a new DJ.
let playbin = values[0]
.get::<glib::Object>()
.expect("playbin \"audio-tags-changed\" signal values[1]");
// This gets the index of the stream that changed. This is necessary, since
// there could e.g. be multiple audio streams (english, spanish, ...).
let idx = values[1]
.get::<i32>()
.expect("playbin \"audio-tags-changed\" signal values[1]");
playbin
.connect("audio-tags-changed", false, |values| {
let playbin = values[0].get::<glib::Object>().unwrap();
let idx = values[1].get::<i32>().unwrap();
println!("audio tags of audio stream {idx} changed:");
println!("audio tags of audio stream {} changed:", idx);
// HELP: is this correct?
// We were only notified about the change of metadata. If we want to do
// something with it, we first need to actually query the metadata from the playbin.
// We do this by facilliating the get-audio-tags action-signal on playbin.
// Sending an action-signal to an element essentially is a function call on the element.
// It is done that way, because elements do not have their own function API, they are
// relying on GStreamer and GLib's API. The only way an element can communicate with an
// application is via properties, signals or action signals (or custom messages, events, queries).
// So what the following code does, is essentially asking playbin to tell us its already
// internally stored tag list for this stream index.
let tags = playbin.emit_by_name::<Option<gst::TagList>>("get-audio-tags", &[&idx]);
let tags = playbin
.emit("get-audio-tags", &[&idx.to_value()])
.unwrap()
.unwrap();
let tags = tags.get::<gst::TagList>().unwrap();
if let Some(tags) = tags {
if let Some(artist) = tags.get::<gst::tags::Artist>() {
println!(" Artist: {}", artist.get());
println!(" Artist: {}", artist.get().unwrap());
}
if let Some(title) = tags.get::<gst::tags::Title>() {
println!(" Title: {}", title.get());
println!(" Title: {}", title.get().unwrap());
}
if let Some(album) = tags.get::<gst::tags::Album>() {
println!(" Album: {}", album.get());
println!(" Album: {}", album.get().unwrap());
}
}
None
});
None
})
.unwrap();
// The playbin element itself is a playbin, so it can be used as one, despite being
// created from an element factory.
let bus = playbin.bus().unwrap();
let bus = playbin.get_bus().unwrap();
playbin
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let ret = playbin.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
for msg in bus.iter_timed(gst::ClockTime::NONE) {
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -112,35 +76,22 @@ fn example_main() {
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
MessageView::StateChanged(state_changed) =>
// We are only interested in state-changed messages from playbin
{
if state_changed.src().map(|s| s == &playbin).unwrap_or(false)
&& state_changed.current() == gst::State::Playing
{
// Generate a dot graph of the pipeline to GST_DEBUG_DUMP_DOT_DIR if defined
let bin_ref = playbin.downcast_ref::<gst::Bin>().unwrap();
bin_ref.debug_to_dot_file(gst::DebugGraphDetails::all(), "PLAYING");
}
}
_ => (),
}
}
playbin
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
let ret = playbin.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,18 +1,18 @@
// This example shows how to use the GstPlayer API.
// The GstPlayer API is a convenience API to allow implement playback applications
// without having to write too much code.
// Most of the tasks a player needs to support (such as seeking and switching
// audio / subtitle streams or changing the volume) are all supported by simple
// one-line function calls on the GstPlayer.
use std::{
env,
sync::{Arc, Mutex},
};
use anyhow::Error;
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_player as gst_player;
extern crate glib;
use std::env;
use std::sync::{Arc, Mutex};
extern crate failure;
#[allow(unused_imports)]
use failure::Error;
#[allow(unused_imports)]
#[path = "../examples-common.rs"]
mod examples_common;
@ -24,18 +24,15 @@ fn main_loop(uri: &str) -> Result<(), Error> {
let dispatcher = gst_player::PlayerGMainContextSignalDispatcher::new(None);
let player = gst_player::Player::new(
None::<gst_player::PlayerVideoRenderer>,
Some(dispatcher.upcast::<gst_player::PlayerSignalDispatcher>()),
None,
Some(&dispatcher.upcast::<gst_player::PlayerSignalDispatcher>()),
);
// Tell the player what uri to play.
player.set_uri(Some(uri));
player.set_property("uri", &glib::Value::from(uri))?;
let error = Arc::new(Mutex::new(Ok(())));
let main_loop_clone = main_loop.clone();
// Connect to the player's "end-of-stream" signal, which will tell us when the
// currently played media stream reached its end.
player.connect_end_of_stream(move |player| {
let main_loop = &main_loop_clone;
player.stop();
@ -44,8 +41,6 @@ fn main_loop(uri: &str) -> Result<(), Error> {
let main_loop_clone = main_loop.clone();
let error_clone = Arc::clone(&error);
// Connect to the player's "error" signal, which will inform us about eventual
// errors (such as failing to retrieve a http stream).
player.connect_error(move |player, err| {
let main_loop = &main_loop_clone;
let error = &error_clone;
@ -76,12 +71,12 @@ fn example_main() {
match main_loop(uri) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,132 +1,98 @@
// This example demonstrates how to use GStreamer's query functionality.
// These are a way to query information from either elements or pads.
// Such information could for example be the current position within
// the stream (i.e. the playing time). Queries can traverse the pipeline
// (both up and downstream). This functionality is essential, since most
// queries can only answered by specific elements in a pipeline (such as the
// stream's duration, which often can only be answered by the demuxer).
// Since gstreamer has many elements that itself contain other elements that
// we don't know of, we can simply send a query for the duration into the
// pipeline and the query is passed along until an element feels capable
// of answering.
// For convenience, the API has a set of pre-defined queries, but also
// allows custom queries (which can be defined and used by your own elements).
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate glib;
use std::env;
use gst::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
fn example_main() {
// Get a string containing the passed pipeline launch syntax
let pipeline_str = env::args().collect::<Vec<String>>()[1..].join(" ");
gst::init().unwrap();
let main_loop = glib::MainLoop::new(None, false);
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
let bus = pipeline.bus().unwrap();
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
let main_loop_clone = main_loop.clone();
// Need to move a new reference into the closure.
// !!ATTENTION!!:
// It might seem appealing to use pipeline.clone() here, because that greatly
// simplifies the code within the callback. What this actually dose, however, is creating
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
// which is in turn stored in another strong reference on the pipeline is creating a
// reference cycle.
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
let pipeline_weak = pipeline.downgrade();
// Add a timeout to the main loop. This closure will be executed
// in an interval of 1 second.
let timeout_id = glib::timeout_add_seconds(1, move || {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let Some(pipeline) = pipeline_weak.upgrade() else {
return glib::ControlFlow::Break;
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::Continue(true),
};
//let pos = pipeline.query_position(gst::Format::Time).unwrap_or(-1);
//let dur = pipeline.query_duration(gst::Format::Time).unwrap_or(-1);
let pos: Option<gst::ClockTime> = {
// Create a new position query and send it to the pipeline.
// This will traverse all elements in the pipeline, until one feels
// capable of answering the query.
let mut q = gst::query::Position::new(gst::Format::Time);
let pos = {
let mut q = gst::Query::new_position(gst::Format::Time);
if pipeline.query(&mut q) {
Some(q.result())
Some(q.get_result())
} else {
None
}
}
.and_then(|pos| pos.try_into().ok())
.and_then(|pos| pos.try_into_time().ok())
.unwrap();
let dur: Option<gst::ClockTime> = {
// Create a new duration query and send it to the pipeline.
// This will traverse all elements in the pipeline, until one feels
// capable of answering the query.
let mut q = gst::query::Duration::new(gst::Format::Time);
let dur = {
let mut q = gst::Query::new_duration(gst::Format::Time);
if pipeline.query(&mut q) {
Some(q.result())
Some(q.get_result())
} else {
None
}
}
.and_then(|dur| dur.try_into().ok())
.and_then(|dur| dur.try_into_time().ok())
.unwrap();
println!("{} / {}", pos.display(), dur.display());
println!("{} / {}", pos, dur);
glib::ControlFlow::Continue
glib::Continue(true)
});
// Need to move a new reference into the closure.
let main_loop_clone = main_loop.clone();
//bus.add_signal_watch();
//bus.connect_message(None, move |_, msg| {
let _bus_watch = bus
.add_watch(move |_, msg| {
use gst::MessageView;
//bus.connect_message(move |_, msg| {
bus.add_watch(move |_, msg| {
use gst::MessageView;
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => main_loop.quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
main_loop.quit();
}
_ => (),
};
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => main_loop.quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
main_loop.quit();
}
_ => (),
};
glib::ControlFlow::Continue
})
.expect("Failed to add bus watch");
glib::Continue(true)
});
main_loop.run();
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
timeout_id.remove();
bus.remove_watch();
glib::source_remove(timeout_id);
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,63 +1,90 @@
use std::env;
#[macro_use]
extern crate gstreamer as gst;
use gst::prelude::*;
use gst::{element_error, prelude::*};
extern crate glib;
use std::env;
use std::error::Error as StdError;
#[path = "../examples-common.rs"]
mod examples_common;
use anyhow::Error;
use derive_more::{Display, Error};
extern crate failure;
use failure::Error;
#[derive(Debug, Display, Error)]
#[display(fmt = "No such pad {_0} in {_1}")]
struct NoSuchPad(#[error(not(source))] &'static str, String);
#[macro_use]
extern crate failure_derive;
#[derive(Debug, Display, Error)]
#[display(fmt = "Unknown payload type {_0}")]
struct UnknownPT(#[error(not(source))] u32);
#[derive(Debug, Fail)]
#[fail(display = "Missing element {}", _0)]
struct MissingElement(&'static str);
#[derive(Debug, Display, Error)]
#[display(fmt = "Usage: {_0} (play | record) DROP_PROBABILITY")]
struct UsageError(#[error(not(source))] String);
#[derive(Debug, Fail)]
#[fail(display = "No such pad {} in {}", _0, _1)]
struct NoSuchPad(&'static str, String);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[derive(Debug, Fail)]
#[fail(display = "Unknown payload type {}", _0)]
struct UnknownPT(u32);
#[derive(Debug, Fail)]
#[fail(display = "Usage: {} (play | record) DROP_PROBABILITY", _0)]
struct UsageError(String);
#[derive(Debug, Fail)]
#[fail(
display = "Received error from {}: {} (debug: {:?})",
src, error, debug
)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
#[cause]
cause: glib::Error,
}
fn static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.static_pad(pad_name) {
fn make_element<'a, P: Into<Option<&'a str>>>(
factory_name: &'static str,
element_name: P,
) -> Result<gst::Element, Error> {
match gst::ElementFactory::make(factory_name, element_name.into()) {
Some(elem) => Ok(elem),
None => Err(Error::from(MissingElement(factory_name))),
}
}
fn get_static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.get_static_pad(pad_name) {
Some(pad) => Ok(pad),
None => {
let element_name = element.name();
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
let element_name = element.get_name();
Err(Error::from(NoSuchPad(pad_name, element_name)))
}
}
}
fn request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.request_pad_simple(pad_name) {
fn get_request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.get_request_pad(pad_name) {
Some(pad) => Ok(pad),
None => {
let element_name = element.name();
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
let element_name = element.get_name();
Err(Error::from(NoSuchPad(pad_name, element_name)))
}
}
}
fn connect_rtpbin_srcpad(src_pad: &gst::Pad, sink: &gst::Element) -> Result<(), Error> {
let name = src_pad.name();
let name = src_pad.get_name();
let split_name = name.split('_');
let split_name = split_name.collect::<Vec<&str>>();
let pt = split_name[5].parse::<u32>()?;
match pt {
96 => {
let sinkpad = static_pad(sink, "sink")?;
src_pad.link(&sinkpad)?;
let sinkpad = get_static_pad(sink, "sink")?;
src_pad.link(&sinkpad).into_result()?;
Ok(())
}
_ => Err(Error::from(UnknownPT(pt))),
@ -65,11 +92,14 @@ fn connect_rtpbin_srcpad(src_pad: &gst::Pad, sink: &gst::Element) -> Result<(),
}
fn make_fec_decoder(rtpbin: &gst::Element, sess_id: u32) -> Result<gst::Element, Error> {
let internal_storage = rtpbin.emit_by_name::<glib::Object>("get-internal-storage", &[&sess_id]);
let fecdec = gst::ElementFactory::make("rtpulpfecdec")
.property("storage", &internal_storage)
.property("pt", 100u32)
.build()?;
let fecdec = make_element("rtpulpfecdec", None)?;
let internal_storage = rtpbin
.emit("get-internal-storage", &[&sess_id.to_value()])
.unwrap()
.unwrap();
fecdec.set_property("storage", &internal_storage.to_value())?;
fecdec.set_property("pt", &100u32.to_value())?;
Ok(fecdec)
}
@ -85,55 +115,34 @@ fn example_main() -> Result<(), Error> {
let drop_probability = args[2].parse::<f32>()?;
let pipeline = gst::Pipeline::default();
let pipeline = gst::Pipeline::new(None);
let src = make_element("udpsrc", None)?;
let netsim = make_element("netsim", None)?;
let rtpbin = make_element("rtpbin", None)?;
let depay = make_element("rtpvp8depay", None)?;
let dec = make_element("vp8dec", None)?;
let conv = make_element("videoconvert", None)?;
let scale = make_element("videoscale", None)?;
let filter = make_element("capsfilter", None)?;
let rtp_caps = gst::Caps::builder("application/x-rtp")
.field("clock-rate", 90000i32)
.build();
let video_caps = gst_video::VideoCapsBuilder::new()
.width(1920)
.height(1080)
.build();
let src = gst::ElementFactory::make("udpsrc")
.property("address", "127.0.0.1")
.property("caps", &rtp_caps)
.build()?;
let netsim = gst::ElementFactory::make("netsim")
.property("drop-probability", drop_probability)
.build()?;
let rtpbin = gst::ElementFactory::make("rtpbin")
.property("do-lost", true)
.build()?;
let depay = gst::ElementFactory::make("rtpvp8depay").build()?;
let dec = gst::ElementFactory::make("vp8dec").build()?;
let conv = gst::ElementFactory::make("videoconvert").build()?;
let scale = gst::ElementFactory::make("videoscale").build()?;
let filter = gst::ElementFactory::make("capsfilter")
.property("caps", &video_caps)
.build()?;
pipeline.add_many([&src, &netsim, &rtpbin, &depay, &dec, &conv, &scale, &filter])?;
gst::Element::link_many([&depay, &dec, &conv, &scale, &filter])?;
pipeline.add_many(&[&src, &netsim, &rtpbin, &depay, &dec, &conv, &scale, &filter])?;
gst::Element::link_many(&[&depay, &dec, &conv, &scale, &filter])?;
match args[1].as_str() {
"play" => {
let sink = gst::ElementFactory::make("autovideosink").build()?;
let sink = make_element("autovideosink", None)?;
pipeline.add(&sink)?;
filter.link(&sink)?;
}
"record" => {
let enc = gst::ElementFactory::make("x264enc")
.property_from_str("tune", "zerolatency")
.build()?;
let mux = gst::ElementFactory::make("matroskamux").build()?;
let sink = gst::ElementFactory::make("filesink")
.property("location", "out.mkv")
.build()?;
let enc = make_element("x264enc", None)?;
let mux = make_element("matroskamux", None)?;
let sink = make_element("filesink", None)?;
pipeline.add_many([&enc, &mux, &sink])?;
gst::Element::link_many([&filter, &enc, &mux, &sink])?;
pipeline.add_many(&[&enc, &mux, &sink])?;
gst::Element::link_many(&[&filter, &enc, &mux, &sink])?;
sink.set_property("location", &"out.mkv".to_value())?;
enc.set_property_from_str("tune", "zerolatency");
eprintln!("Recording to out.mkv");
}
_ => return Err(Error::from(UsageError(args[0].clone()))),
@ -142,51 +151,51 @@ fn example_main() -> Result<(), Error> {
src.link(&netsim)?;
rtpbin.connect("new-storage", false, |values| {
let storage = values[1]
.get::<gst::Element>()
.expect("rtpbin \"new-storage\" signal values[1]");
storage.set_property("size-time", 250_000_000u64);
let storage = values[1].get::<gst::Element>().expect("Invalid argument");
storage
.set_property("size-time", &250_000_000u64.to_value())
.unwrap();
None
});
})?;
rtpbin.connect("request-pt-map", false, |values| {
let pt = values[2]
.get::<u32>()
.expect("rtpbin \"new-storage\" signal values[2]");
let pt = values[2].get::<u32>().expect("Invalid argument");
match pt {
100 => Some(
gst::Caps::builder("application/x-rtp")
.field("media", "video")
.field("clock-rate", 90000i32)
.field("is-fec", true)
.build()
.to_value(),
gst::Caps::new_simple(
"application/x-rtp",
&[
("media", &"video"),
("clock-rate", &90000i32),
("is-fec", &true),
],
)
.to_value(),
),
96 => Some(
gst::Caps::builder("application/x-rtp")
.field("media", "video")
.field("clock-rate", 90000i32)
.field("encoding-name", "VP8")
.build()
.to_value(),
gst::Caps::new_simple(
"application/x-rtp",
&[
("media", &"video"),
("clock-rate", &90000i32),
("encoding-name", &"VP8"),
],
)
.to_value(),
),
_ => None,
}
});
})?;
rtpbin.connect("request-fec-decoder", false, |values| {
let rtpbin = values[0]
.get::<gst::Element>()
.expect("rtpbin \"request-fec-decoder\" signal values[0]");
let sess_id = values[1]
.get::<u32>()
.expect("rtpbin \"request-fec-decoder\" signal values[1]");
let rtpbin = values[0].get::<gst::Element>().expect("Invalid argument");
let sess_id = values[1].get::<u32>().expect("Invalid argument");
match make_fec_decoder(&rtpbin, sess_id) {
Ok(elem) => Some(elem.to_value()),
Err(err) => {
element_error!(
gst_element_error!(
rtpbin,
gst::LibraryError::Failed,
("Failed to make FEC decoder"),
@ -195,74 +204,90 @@ fn example_main() -> Result<(), Error> {
None
}
}
});
})?;
let srcpad = static_pad(&netsim, "src")?;
let sinkpad = request_pad(&rtpbin, "recv_rtp_sink_0")?;
srcpad.link(&sinkpad)?;
let srcpad = get_static_pad(&netsim, "src")?;
let sinkpad = get_request_pad(&rtpbin, "recv_rtp_sink_0")?;
srcpad.link(&sinkpad).into_result()?;
let depay_weak = depay.downgrade();
rtpbin.connect_pad_added(move |rtpbin, src_pad| {
let Some(depay) = depay_weak.upgrade() else {
return;
let depay = match depay_weak.upgrade() {
Some(depay) => depay,
None => return,
};
match connect_rtpbin_srcpad(src_pad, &depay) {
match connect_rtpbin_srcpad(&src_pad, &depay) {
Ok(_) => (),
Err(err) => {
element_error!(
gst_element_error!(
rtpbin,
gst::LibraryError::Failed,
("Failed to link srcpad"),
["{}", err]
);
()
}
}
});
let rtp_caps = gst::Caps::new_simple("application/x-rtp", &[("clock-rate", &90000i32)]);
let video_caps =
gst::Caps::new_simple("video/x-raw", &[("width", &1920i32), ("height", &1080i32)]);
src.set_property("address", &"127.0.0.1".to_value())?;
src.set_property("caps", &rtp_caps.to_value())?;
netsim.set_property("drop-probability", &drop_probability.to_value())?;
rtpbin.set_property("do-lost", &true.to_value())?;
filter.set_property("caps", &video_caps.to_value())?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
for msg in bus.iter_timed(gst::ClockTime::NONE) {
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
.get_src()
.map(|s| s.get_path_string())
.unwrap_or_else(|| String::from("None")),
error: err.get_error().description().into(),
debug: err.get_debug(),
cause: err.get_error(),
}
.into());
}
MessageView::StateChanged(s) => {
if let Some(element) = msg.src() {
if element == &pipeline && s.current() == gst::State::Playing {
MessageView::StateChanged(s) => match msg.get_src() {
Some(element) => {
if element == pipeline && s.get_current() == gst::State::Playing {
eprintln!("PLAYING");
pipeline.debug_to_dot_file(gst::DebugGraphDetails::all(), "client-playing");
gst::debug_bin_to_dot_file(
&pipeline,
gst::DebugGraphDetails::all(),
"client-playing",
);
}
}
}
None => (),
},
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
Ok(())
}
@ -270,6 +295,6 @@ fn example_main() -> Result<(), Error> {
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -1,62 +1,90 @@
use gst::{element_error, prelude::*};
#[macro_use]
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate glib;
use std::error::Error as StdError;
#[path = "../examples-common.rs"]
mod examples_common;
use std::env;
use anyhow::Error;
use derive_more::{Display, Error};
extern crate failure;
use failure::Error;
#[derive(Debug, Display, Error)]
#[display(fmt = "No such pad {_0} in {_1}")]
#[macro_use]
extern crate failure_derive;
#[derive(Debug, Fail)]
#[fail(display = "Missing element {}", _0)]
struct MissingElement(&'static str);
#[derive(Debug, Fail)]
#[fail(display = "No such pad {} in {}", _0, _1)]
struct NoSuchPad(&'static str, String);
#[derive(Debug, Display, Error)]
#[display(fmt = "Usage: {_0} URI FEC_PERCENTAGE")]
struct UsageError(#[error(not(source))] String);
#[derive(Debug, Fail)]
#[fail(display = "Usage: {} URI FEC_PERCENTAGE", _0)]
struct UsageError(String);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[derive(Debug, Fail)]
#[fail(
display = "Received error from {}: {} (debug: {:?})",
src, error, debug
)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
#[cause]
cause: glib::Error,
}
fn static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.static_pad(pad_name) {
fn make_element<'a, P: Into<Option<&'a str>>>(
factory_name: &'static str,
element_name: P,
) -> Result<gst::Element, Error> {
match gst::ElementFactory::make(factory_name, element_name.into()) {
Some(elem) => Ok(elem),
None => Err(Error::from(MissingElement(factory_name))),
}
}
fn get_static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.get_static_pad(pad_name) {
Some(pad) => Ok(pad),
None => {
let element_name = element.name();
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
let element_name = element.get_name();
Err(Error::from(NoSuchPad(pad_name, element_name)))
}
}
}
fn request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.request_pad_simple(pad_name) {
fn get_request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.get_request_pad(pad_name) {
Some(pad) => Ok(pad),
None => {
let element_name = element.name();
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
let element_name = element.get_name();
Err(Error::from(NoSuchPad(pad_name, element_name)))
}
}
}
fn connect_decodebin_pad(src_pad: &gst::Pad, sink: &gst::Element) -> Result<(), Error> {
let sinkpad = static_pad(sink, "sink")?;
src_pad.link(&sinkpad)?;
let sinkpad = get_static_pad(&sink, "sink")?;
src_pad.link(&sinkpad).into_result()?;
Ok(())
}
fn make_fec_encoder(fec_percentage: u32) -> Result<gst::Element, Error> {
let fecenc = gst::ElementFactory::make("rtpulpfecenc")
.property("pt", 100u32)
.property("multipacket", true)
.property("percentage", fec_percentage)
.build()?;
let fecenc = make_element("rtpulpfecenc", None)?;
fecenc.set_property("pt", &100u32.to_value())?;
fecenc.set_property("multipacket", &true.to_value())?;
fecenc.set_property("percentage", &fec_percentage.to_value())?;
Ok(fecenc)
}
@ -73,33 +101,17 @@ fn example_main() -> Result<(), Error> {
let uri = &args[1];
let fec_percentage = args[2].parse::<u32>()?;
let video_caps = gst::Caps::builder("video/x-raw").build();
let pipeline = gst::Pipeline::new(None);
let src = make_element("uridecodebin", None)?;
let conv = make_element("videoconvert", None)?;
let q1 = make_element("queue", None)?;
let enc = make_element("vp8enc", None)?;
let q2 = make_element("queue", None)?;
let pay = make_element("rtpvp8pay", None)?;
let rtpbin = make_element("rtpbin", None)?;
let sink = make_element("udpsink", None)?;
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("uridecodebin")
.property_from_str("pattern", "ball")
.property("expose-all-streams", false)
.property("caps", video_caps)
.property("uri", uri)
.build()?;
let conv = gst::ElementFactory::make("videoconvert").build()?;
let q1 = gst::ElementFactory::make("queue").build()?;
let enc = gst::ElementFactory::make("vp8enc")
.property("keyframe-max-dist", 30i32)
.property("threads", 12i32)
.property("cpu-used", -16i32)
.property("deadline", 1i64)
.property_from_str("error-resilient", "default")
.build()?;
let q2 = gst::ElementFactory::make("queue").build()?;
let pay = gst::ElementFactory::make("rtpvp8pay").build()?;
let rtpbin = gst::ElementFactory::make("rtpbin").build()?;
let sink = gst::ElementFactory::make("udpsink")
.property("host", "127.0.0.1")
.property("sync", true)
.build()?;
pipeline.add_many([&src, &conv, &q1, &enc, &q2, &pay, &rtpbin, &sink])?;
pipeline.add_many(&[&src, &conv, &q1, &enc, &q2, &pay, &rtpbin, &sink])?;
conv.link(&q1)?;
q1.link(&enc)?;
@ -107,14 +119,12 @@ fn example_main() -> Result<(), Error> {
pay.link(&q2)?;
rtpbin.connect("request-fec-encoder", false, move |values| {
let rtpbin = values[0]
.get::<gst::Element>()
.expect("rtpbin \"request-fec-encoder\" signal values[0]");
let rtpbin = values[0].get::<gst::Element>().expect("Invalid argument");
match make_fec_encoder(fec_percentage) {
Ok(elem) => Some(elem.to_value()),
Err(err) => {
element_error!(
gst_element_error!(
rtpbin,
gst::LibraryError::Failed,
("Failed to make FEC encoder"),
@ -123,73 +133,92 @@ fn example_main() -> Result<(), Error> {
None
}
}
});
})?;
let srcpad = static_pad(&q2, "src")?;
let sinkpad = request_pad(&rtpbin, "send_rtp_sink_0")?;
srcpad.link(&sinkpad)?;
let srcpad = get_static_pad(&q2, "src")?;
let sinkpad = get_request_pad(&rtpbin, "send_rtp_sink_0")?;
srcpad.link(&sinkpad).into_result()?;
let srcpad = static_pad(&rtpbin, "send_rtp_src_0")?;
let sinkpad = static_pad(&sink, "sink")?;
srcpad.link(&sinkpad)?;
let srcpad = get_static_pad(&rtpbin, "send_rtp_src_0")?;
let sinkpad = get_static_pad(&sink, "sink")?;
srcpad.link(&sinkpad).into_result()?;
src.connect_pad_added(
move |decodebin, src_pad| match connect_decodebin_pad(src_pad, &conv) {
let convclone = conv.clone();
src.connect_pad_added(move |decodebin, src_pad| {
match connect_decodebin_pad(&src_pad, &convclone) {
Ok(_) => (),
Err(err) => {
element_error!(
gst_element_error!(
decodebin,
gst::LibraryError::Failed,
("Failed to link decodebin srcpad"),
["{}", err]
);
()
}
},
);
}
});
let video_caps = gst::Caps::new_simple("video/x-raw", &[]);
src.set_property_from_str("pattern", "ball");
sink.set_property("host", &"127.0.0.1".to_value())?;
sink.set_property("sync", &true.to_value())?;
enc.set_property("keyframe-max-dist", &30i32.to_value())?;
enc.set_property("threads", &12i32.to_value())?;
enc.set_property("cpu-used", &(-16i32).to_value())?;
enc.set_property("deadline", &1i64.to_value())?;
enc.set_property_from_str("error-resilient", "default");
src.set_property("expose-all-streams", &false.to_value())?;
src.set_property("caps", &video_caps.to_value())?;
src.set_property("uri", &uri.to_value())?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let ret = pipeline.set_state(gst::State::Playing);
assert_ne!(ret, gst::StateChangeReturn::Failure);
for msg in bus.iter_timed(gst::ClockTime::NONE) {
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
.get_src()
.map(|s| s.get_path_string())
.unwrap_or_else(|| String::from("None")),
error: err.get_error().description().into(),
debug: err.get_debug(),
cause: err.get_error(),
}
.into());
}
MessageView::StateChanged(s) => {
if let Some(element) = msg.src() {
if element == &pipeline && s.current() == gst::State::Playing {
MessageView::StateChanged(s) => match msg.get_src() {
Some(element) => {
if element == pipeline && s.get_current() == gst::State::Playing {
eprintln!("PLAYING");
pipeline.debug_to_dot_file(gst::DebugGraphDetails::all(), "server-playing");
gst::debug_bin_to_dot_file(
&pipeline,
gst::DebugGraphDetails::all(),
"server-playing",
);
}
}
}
None => (),
},
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
let ret = pipeline.set_state(gst::State::Null);
assert_ne!(ret, gst::StateChangeReturn::Failure);
Ok(())
}
@ -197,6 +226,6 @@ fn example_main() -> Result<(), Error> {
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -1,223 +0,0 @@
// This example demonstrates how to set up a rtsp server using GStreamer
// and extending the default auth module behaviour by subclassing RTSPAuth
// For this, the example creates a videotestsrc pipeline manually to be used
// by the RTSP server for providing data
#![allow(clippy::non_send_fields_in_send_ty)]
use anyhow::Error;
use derive_more::{Display, Error};
use gst_rtsp_server::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Could not get mount points")]
struct NoMountPoints;
fn main_loop() -> Result<(), Error> {
let main_loop = glib::MainLoop::new(None, false);
let server = gst_rtsp_server::RTSPServer::new();
// We create our custom auth module.
// The job of the auth module is to authenticate users and authorize
// factories access/construction.
let auth = auth::Auth::default();
server.set_auth(Some(&auth));
// Much like HTTP servers, RTSP servers have multiple endpoints that
// provide different streams. Here, we ask our server to give
// us a reference to his list of endpoints, so we can add our
// test endpoint, providing the pipeline from the cli.
let mounts = server.mount_points().ok_or(NoMountPoints)?;
// Next, we create a factory for the endpoint we want to create.
// The job of the factory is to create a new pipeline for each client that
// connects, or (if configured to do so) to reuse an existing pipeline.
let factory = gst_rtsp_server::RTSPMediaFactory::new();
// Here we tell the media factory the media we want to serve.
// This is done in the launch syntax. When the first client connects,
// the factory will use this syntax to create a new pipeline instance.
factory.set_launch("( videotestsrc ! vp8enc ! rtpvp8pay name=pay0 )");
// This setting specifies whether each connecting client gets the output
// of a new instance of the pipeline, or whether all connected clients share
// the output of the same pipeline.
// If you want to stream a fixed video you have stored on the server to any
// client, you would not set this to shared here (since every client wants
// to start at the beginning of the video). But if you want to distribute
// a live source, you will probably want to set this to shared, to save
// computing and memory capacity on the server.
factory.set_shared(true);
// Now we add a new mount-point and tell the RTSP server to serve the content
// provided by the factory we configured above, when a client connects to
// this specific path.
mounts.add_factory("/test", factory);
// Attach the server to our main context.
// A main context is the thing where other stuff is registering itself for its
// events (e.g. sockets, GStreamer bus, ...) and the main loop is something that
// polls the main context for its events and dispatches them to whoever is
// interested in them. In this example, we only do have one, so we can
// leave the context parameter empty, it will automatically select
// the default one.
let id = server.attach(None)?;
println!(
"Stream ready at rtsp://127.0.0.1:{}/test",
server.bound_port()
);
println!("user admin/password can access stream");
println!("user demo/demo passes authentication but receives 404");
println!("other users do not pass pass authentication and receive 401");
// Start the mainloop. From this point on, the server will start to serve
// our quality content to connecting clients.
main_loop.run();
id.remove();
Ok(())
}
// Our custom auth module
mod auth {
// In the imp submodule we include the actual implementation
mod imp {
use gst_rtsp::{RTSPHeaderField, RTSPStatusCode};
use gst_rtsp_server::{prelude::*, subclass::prelude::*, RTSPContext};
// This is the private data of our auth
#[derive(Default)]
pub struct Auth;
impl Auth {
// Simulate external auth validation and user extraction
// authorized users are admin/password and demo/demo
fn external_auth(&self, auth: &str) -> Option<String> {
if let Ok(decoded) = data_encoding::BASE64.decode(auth.as_bytes()) {
if let Ok(decoded) = std::str::from_utf8(&decoded) {
let tokens = decoded.split(':').collect::<Vec<_>>();
if tokens == vec!["admin", "password"] || tokens == vec!["demo", "demo"] {
return Some(tokens[0].into());
}
}
}
None
}
// Simulate external role check
// admin user can construct and access media factory
fn external_access_check(&self, user: &str) -> bool {
user == "admin"
}
}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for Auth {
const NAME: &'static str = "RsRTSPAuth";
type Type = super::Auth;
type ParentType = gst_rtsp_server::RTSPAuth;
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for Auth {}
// Implementation of gst_rtsp_server::RTSPAuth virtual methods
impl RTSPAuthImpl for Auth {
fn authenticate(&self, ctx: &RTSPContext) -> bool {
// authenticate should always be called with a valid context request
let req = ctx
.request()
.expect("Context without request. Should not happen !");
if let Some(auth_credentials) = req.parse_auth_credentials().first() {
if let Some(authorization) = auth_credentials.authorization() {
if let Some(user) = self.external_auth(authorization) {
// Update context token with authenticated username
ctx.set_token(
gst_rtsp_server::RTSPToken::builder()
.field("user", user)
.build(),
);
return true;
}
}
}
false
}
fn check(&self, ctx: &RTSPContext, role: &glib::GString) -> bool {
// We only check media factory access
if !role.starts_with("auth.check.media.factory") {
return true;
}
if ctx.token().is_none() {
// If we do not have a context token yet, check if there are any auth credentials in request
if !self.authenticate(ctx) {
// If there were no credentials, send a "401 Unauthorized" response
if let Some(resp) = ctx.response() {
resp.init_response(RTSPStatusCode::Unauthorized, ctx.request());
resp.add_header(
RTSPHeaderField::WwwAuthenticate,
"Basic realm=\"CustomRealm\"",
);
if let Some(client) = ctx.client() {
client.send_message(resp, ctx.session());
}
}
return false;
}
}
if let Some(token) = ctx.token() {
// If we already have a user token...
if self.external_access_check(&token.string("user").unwrap_or_default()) {
// grant access if user may access factory
return true;
} else {
// send a "404 Not Found" response if user may not access factory
if let Some(resp) = ctx.response() {
resp.init_response(RTSPStatusCode::NotFound, ctx.request());
if let Some(client) = ctx.client() {
client.send_message(resp, ctx.session());
}
}
}
}
false
}
}
}
// This here defines the public interface of our auth and implements
// the corresponding traits so that it behaves like any other RTSPAuth
glib::wrapper! {
pub struct Auth(ObjectSubclass<imp::Auth>) @extends gst_rtsp_server::RTSPAuth;
}
impl Default for Auth {
// Creates a new instance of our auth
fn default() -> Self {
glib::Object::new()
}
}
}
fn example_main() -> Result<(), Error> {
gst::init()?;
main_loop()
}
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
}
}

View file

@ -1,25 +1,34 @@
// This example demonstrates how to set up a rtsp server using GStreamer.
// While the "rtsp-server" example is about streaming media to connecting
// clients, this example is mainly about recording media that clients
// send to the server. For this, the launch syntax pipeline, that is passed
// to this example's cli is spawned and the client's media is streamed into it.
extern crate failure;
extern crate gio;
extern crate glib;
#[macro_use]
extern crate failure_derive;
extern crate gstreamer as gst;
extern crate gstreamer_rtsp as gst_rtsp;
extern crate gstreamer_rtsp_server as gst_rtsp_server;
extern crate gstreamer_rtsp_server_sys as ffi;
use failure::Error;
use std::env;
use std::ptr;
use anyhow::Error;
use derive_more::{Display, Error};
use glib::translate::*;
use gst_rtsp::*;
use gst_rtsp_server::prelude::*;
use gst_rtsp_server::*;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Could not get mount points")]
#[derive(Debug, Fail)]
#[fail(display = "Could not get mount points")]
struct NoMountPoints;
#[derive(Debug, Display, Error)]
#[display(fmt = "Usage: {_0} LAUNCH_LINE")]
struct UsageError(#[error(not(source))] String);
#[derive(Debug, Fail)]
#[fail(display = "Usage: {} LAUNCH_LINE", _0)]
struct UsageError(String);
fn main_loop() -> Result<(), Error> {
let args: Vec<_> = env::args().collect();
@ -28,29 +37,14 @@ fn main_loop() -> Result<(), Error> {
return Err(Error::from(UsageError(args[0].clone())));
}
// Mostly analog to the rtsp-server example, the server is created
// and the factory for our test mount is configured.
let main_loop = glib::MainLoop::new(None, false);
let server = gst_rtsp_server::RTSPServer::new();
// Much like HTTP servers, RTSP servers have multiple endpoints that
// provide or take different streams. Here, we ask our server to give
// us a reference to its list of endpoints, so we can add our
// test endpoint.
let mounts = server.mount_points().ok_or(NoMountPoints)?;
// Next, we create a factory for the endpoint we want to create.
// The job of the factory is to create a new pipeline for each client that
// connects, or (if configured to do so) to reuse an existing pipeline.
let factory = gst_rtsp_server::RTSPMediaFactory::new();
// Here we configure a method of authentication that we want the
// server to require from clients.
let auth = gst_rtsp_server::RTSPAuth::new();
let token = gst_rtsp_server::RTSPToken::builder()
.field(gst_rtsp_server::RTSP_TOKEN_MEDIA_FACTORY_ROLE, "user")
.build();
let basic = gst_rtsp_server::RTSPAuth::make_basic("user", "password");
// For proper authentication, we want to use encryption. And there's no
// encryption without a certificate!
let cert = gio::TlsCertificate::from_pem(
let server = RTSPServer::new();
let factory = RTSPMediaFactory::new();
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
let auth = RTSPAuth::new();
let token = RTSPToken::new(&[(*RTSP_TOKEN_MEDIA_FACTORY_ROLE, &"user")]);
let basic = RTSPAuth::make_basic("user", "password");
let cert = gio::TlsCertificate::new_from_pem(
"-----BEGIN CERTIFICATE-----\
MIICJjCCAY+gAwIBAgIBBzANBgkqhkiG9w0BAQUFADCBhjETMBEGCgmSJomT8ixk\
ARkWA0NPTTEXMBUGCgmSJomT8ixkARkWB0VYQU1QTEUxHjAcBgNVBAsTFUNlcnRp\
@ -76,58 +70,40 @@ fn main_loop() -> Result<(), Error> {
W535W8UBbEg=-----END PRIVATE KEY-----",
)?;
// This declares that the user "user" (once authenticated) has a role that
// allows them to access and construct media factories.
factory.add_role_from_structure(
&gst::Structure::builder("user")
.field(gst_rtsp_server::RTSP_PERM_MEDIA_FACTORY_ACCESS, true)
.field(gst_rtsp_server::RTSP_PERM_MEDIA_FACTORY_CONSTRUCT, true)
.build(),
);
// Bindable versions were added in b1f515178a363df0322d7adbd5754e1f6e2083c9
unsafe {
ffi::gst_rtsp_media_factory_add_role(
factory.to_glib_none().0,
"user".to_glib_none().0,
RTSP_PERM_MEDIA_FACTORY_ACCESS.to_glib_none().0,
<bool as StaticType>::static_type().to_glib() as *const u8,
true.to_glib() as *const u8,
RTSP_PERM_MEDIA_FACTORY_CONSTRUCT.as_ptr() as *const u8,
<bool as StaticType>::static_type().to_glib() as *const u8,
true.to_glib() as *const u8,
ptr::null_mut::<u8>(),
);
}
auth.set_tls_certificate(Some(&cert));
auth.set_tls_certificate(&cert);
auth.add_basic(basic.as_str(), &token);
// Here, we tell the RTSP server about the authentication method we
// configured above.
server.set_auth(Some(&auth));
server.set_auth(&auth);
factory.set_launch(args[1].as_str());
// Tell the RTSP server that we want to work in RECORD mode (clients send)
// data to us.
factory.set_transport_mode(gst_rtsp_server::RTSPTransportMode::RECORD);
// The RTSP protocol allows a couple of different profiles for the actually
// used protocol of data-transmission. With this, we can limit the selection
// from which connecting clients have to choose.
// SAVP/SAVPF are via SRTP (encrypted), that's what the S is for.
// The F in the end is for feedback (an extension that allows more bidirectional
// feedback between sender and receiver). AV is just Audio/Video, P is Profile :)
// The default, old RTP profile is AVP
factory.set_profiles(gst_rtsp::RTSPProfile::SAVP | gst_rtsp::RTSPProfile::SAVPF);
factory.set_transport_mode(RTSPTransportMode::RECORD);
factory.set_profiles(RTSPProfile::SAVP | RTSPProfile::SAVPF);
// Now we add a new mount-point and tell the RTSP server to use the factory
// we configured beforehand. This factory will take on the job of creating
// a pipeline, which will take on the incoming data of connected clients.
mounts.add_factory("/test", factory);
mounts.add_factory("/test", &factory);
// Attach the server to our main context.
// A main context is the thing where other stuff is registering itself for its
// events (e.g. sockets, GStreamer bus, ...) and the main loop is something that
// polls the main context for its events and dispatches them to whoever is
// interested in them. In this example, we only do have one, so we can
// leave the context parameter empty, it will automatically select
// the default one.
let id = server.attach(None)?;
let id = server.attach(None);
println!(
"Stream ready at rtsps://127.0.0.1:{}/test",
server.bound_port()
server.get_bound_port()
);
// Start the mainloop. From this point on, the server will start to take
// incoming connections from clients.
main_loop.run();
id.remove();
glib::source_remove(id);
Ok(())
}
@ -140,6 +116,6 @@ fn example_main() -> Result<(), Error> {
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -1,379 +0,0 @@
// This example demonstrates how to set up a rtsp server using GStreamer
// and extending the behaviour by subclass RTSPMediaFactory and RTSPMedia.
// For this, the example creates a videotestsrc pipeline manually to be used
// by the RTSP server for providing data, and adds a custom attribute to the
// SDP provided to the client.
//
// It also comes with a custom RTSP server/client subclass for hooking into
// the client machinery and printing some status.
#![allow(clippy::non_send_fields_in_send_ty)]
use anyhow::Error;
use derive_more::{Display, Error};
use gst_rtsp_server::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Could not get mount points")]
struct NoMountPoints;
fn main_loop() -> Result<(), Error> {
let main_loop = glib::MainLoop::new(None, false);
let server = server::Server::default();
let mounts = mount_points::MountPoints::default();
server.set_mount_points(Some(&mounts));
// Much like HTTP servers, RTSP servers have multiple endpoints that
// provide different streams. Here, we ask our server to give
// us a reference to his list of endpoints, so we can add our
// test endpoint, providing the pipeline from the cli.
let mounts = server.mount_points().ok_or(NoMountPoints)?;
// Next, we create our custom factory for the endpoint we want to create.
// The job of the factory is to create a new pipeline for each client that
// connects, or (if configured to do so) to reuse an existing pipeline.
let factory = media_factory::Factory::default();
// This setting specifies whether each connecting client gets the output
// of a new instance of the pipeline, or whether all connected clients share
// the output of the same pipeline.
// If you want to stream a fixed video you have stored on the server to any
// client, you would not set this to shared here (since every client wants
// to start at the beginning of the video). But if you want to distribute
// a live source, you will probably want to set this to shared, to save
// computing and memory capacity on the server.
factory.set_shared(true);
// Now we add a new mount-point and tell the RTSP server to serve the content
// provided by the factory we configured above, when a client connects to
// this specific path.
mounts.add_factory("/test", factory);
// Attach the server to our main context.
// A main context is the thing where other stuff is registering itself for its
// events (e.g. sockets, GStreamer bus, ...) and the main loop is something that
// polls the main context for its events and dispatches them to whoever is
// interested in them. In this example, we only do have one, so we can
// leave the context parameter empty, it will automatically select
// the default one.
let id = server.attach(None)?;
println!(
"Stream ready at rtsp://127.0.0.1:{}/test",
server.bound_port()
);
// Start the mainloop. From this point on, the server will start to serve
// our quality content to connecting clients.
main_loop.run();
id.remove();
Ok(())
}
// Our custom media factory that creates a media input manually
mod media_factory {
use gst_rtsp_server::subclass::prelude::*;
use super::*;
// In the imp submodule we include the actual implementation
mod imp {
use super::*;
// This is the private data of our factory
#[derive(Default)]
pub struct Factory {}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for Factory {
const NAME: &'static str = "RsRTSPMediaFactory";
type Type = super::Factory;
type ParentType = gst_rtsp_server::RTSPMediaFactory;
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for Factory {
fn constructed(&self) {
self.parent_constructed();
let factory = self.obj();
// All media created by this factory are our custom media type. This would
// not require a media factory subclass and can also be called on the normal
// RTSPMediaFactory.
factory.set_media_gtype(super::media::Media::static_type());
}
}
// Implementation of gst_rtsp_server::RTSPMediaFactory virtual methods
impl RTSPMediaFactoryImpl for Factory {
fn create_element(&self, _url: &gst_rtsp::RTSPUrl) -> Option<gst::Element> {
// Create a simple VP8 videotestsrc input
let bin = gst::Bin::default();
let src = gst::ElementFactory::make("videotestsrc")
// Configure the videotestsrc live
.property("is-live", true)
.build()
.unwrap();
let enc = gst::ElementFactory::make("vp8enc")
// Produce encoded data as fast as possible
.property("deadline", 1i64)
.build()
.unwrap();
// The names of the payloaders must be payX
let pay = gst::ElementFactory::make("rtpvp8pay")
.name("pay0")
.build()
.unwrap();
bin.add_many([&src, &enc, &pay]).unwrap();
gst::Element::link_many([&src, &enc, &pay]).unwrap();
Some(bin.upcast())
}
}
}
// This here defines the public interface of our factory and implements
// the corresponding traits so that it behaves like any other RTSPMediaFactory
glib::wrapper! {
pub struct Factory(ObjectSubclass<imp::Factory>) @extends gst_rtsp_server::RTSPMediaFactory;
}
impl Default for Factory {
// Creates a new instance of our factory
fn default() -> Factory {
glib::Object::new()
}
}
}
// Our custom media subclass that adds a custom attribute to the SDP returned by DESCRIBE
mod media {
use gst_rtsp_server::subclass::prelude::*;
// In the imp submodule we include the actual implementation
mod imp {
use super::*;
// This is the private data of our media
#[derive(Default)]
pub struct Media {}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for Media {
const NAME: &'static str = "RsRTSPMedia";
type Type = super::Media;
type ParentType = gst_rtsp_server::RTSPMedia;
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for Media {}
// Implementation of gst_rtsp_server::RTSPMedia virtual methods
impl RTSPMediaImpl for Media {
fn setup_sdp(
&self,
sdp: &mut gst_sdp::SDPMessageRef,
info: &gst_rtsp_server::subclass::SDPInfo,
) -> Result<(), gst::LoggableError> {
self.parent_setup_sdp(sdp, info)?;
sdp.add_attribute("my-custom-attribute", Some("has-a-value"));
Ok(())
}
}
}
// This here defines the public interface of our factory and implements
// the corresponding traits so that it behaves like any other RTSPMedia
glib::wrapper! {
pub struct Media(ObjectSubclass<imp::Media>) @extends gst_rtsp_server::RTSPMedia;
}
}
// Our custom RTSP server subclass that reports when clients are connecting and uses
// our custom RTSP client subclass for each client
mod server {
use gst_rtsp_server::subclass::prelude::*;
use super::*;
// In the imp submodule we include the actual implementation
mod imp {
use super::*;
// This is the private data of our server
#[derive(Default)]
pub struct Server {}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for Server {
const NAME: &'static str = "RsRTSPServer";
type Type = super::Server;
type ParentType = gst_rtsp_server::RTSPServer;
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for Server {}
// Implementation of gst_rtsp_server::RTSPServer virtual methods
impl RTSPServerImpl for Server {
fn create_client(&self) -> Option<gst_rtsp_server::RTSPClient> {
let server = self.obj();
let client = super::client::Client::default();
// Duplicated from the default implementation
client.set_session_pool(server.session_pool().as_ref());
client.set_mount_points(server.mount_points().as_ref());
client.set_auth(server.auth().as_ref());
client.set_thread_pool(server.thread_pool().as_ref());
Some(client.upcast())
}
fn client_connected(&self, client: &gst_rtsp_server::RTSPClient) {
self.parent_client_connected(client);
println!("Client {client:?} connected");
}
}
}
// This here defines the public interface of our factory and implements
// the corresponding traits so that it behaves like any other RTSPServer
glib::wrapper! {
pub struct Server(ObjectSubclass<imp::Server>) @extends gst_rtsp_server::RTSPServer;
}
impl Default for Server {
// Creates a new instance of our factory
fn default() -> Server {
glib::Object::new()
}
}
}
// Our custom RTSP client subclass.
mod client {
use gst_rtsp_server::subclass::prelude::*;
// In the imp submodule we include the actual implementation
mod imp {
use super::*;
// This is the private data of our server
#[derive(Default)]
pub struct Client {}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for Client {
const NAME: &'static str = "RsRTSPClient";
type Type = super::Client;
type ParentType = gst_rtsp_server::RTSPClient;
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for Client {}
// Implementation of gst_rtsp_server::RTSPClient virtual methods
impl RTSPClientImpl for Client {
fn closed(&self) {
let client = self.obj();
self.parent_closed();
println!("Client {client:?} closed");
}
fn describe_request(&self, ctx: &gst_rtsp_server::RTSPContext) {
self.parent_describe_request(ctx);
let request_uri = ctx.uri().unwrap().request_uri();
println!("Describe request for uri: {request_uri:?}");
}
}
}
// This here defines the public interface of our factory and implements
// the corresponding traits so that it behaves like any other RTSPClient
glib::wrapper! {
pub struct Client(ObjectSubclass<imp::Client>) @extends gst_rtsp_server::RTSPClient;
}
impl Default for Client {
// Creates a new instance of our factory
fn default() -> Client {
glib::Object::new()
}
}
}
mod mount_points {
use gst_rtsp_server::subclass::prelude::*;
mod imp {
use super::*;
// This is the private data of our mount points
#[derive(Default)]
pub struct MountPoints {}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for MountPoints {
const NAME: &'static str = "RsRTSPMountPoints";
type Type = super::MountPoints;
type ParentType = gst_rtsp_server::RTSPMountPoints;
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for MountPoints {}
// Implementation of gst_rtsp_server::RTSPClient virtual methods
impl RTSPMountPointsImpl for MountPoints {
fn make_path(&self, url: &gst_rtsp::RTSPUrl) -> Option<glib::GString> {
println!("Make path called for {url:?} ");
self.parent_make_path(url)
}
}
}
glib::wrapper! {
pub struct MountPoints(ObjectSubclass<imp::MountPoints>) @extends gst_rtsp_server::RTSPMountPoints;
}
impl Default for MountPoints {
// Creates a new instance of our factory
fn default() -> Self {
glib::Object::new()
}
}
}
fn example_main() -> Result<(), Error> {
gst::init()?;
main_loop()
}
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
}
}

View file

@ -1,24 +1,28 @@
// This example demonstrates how to set up a rtsp server using GStreamer.
// For this, the example parses an arbitrary pipeline in launch syntax
// from the cli and provides this pipeline's output as stream, served
// using GStreamers rtsp server.
use std::env;
use anyhow::Error;
use derive_more::{Display, Error};
extern crate gstreamer as gst;
extern crate gstreamer_rtsp_server as gst_rtsp_server;
use gst_rtsp_server::prelude::*;
extern crate glib;
extern crate failure;
use failure::Error;
#[macro_use]
extern crate failure_derive;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Could not get mount points")]
#[derive(Debug, Fail)]
#[fail(display = "Could not get mount points")]
struct NoMountPoints;
#[derive(Debug, Display, Error)]
#[display(fmt = "Usage: {_0} LAUNCH_LINE")]
struct UsageError(#[error(not(source))] String);
#[derive(Debug, Fail)]
#[fail(display = "Usage: {} LAUNCH_LINE", _0)]
struct UsageError(String);
fn main_loop() -> Result<(), Error> {
let args: Vec<_> = env::args().collect();
@ -29,54 +33,24 @@ fn main_loop() -> Result<(), Error> {
let main_loop = glib::MainLoop::new(None, false);
let server = gst_rtsp_server::RTSPServer::new();
// Much like HTTP servers, RTSP servers have multiple endpoints that
// provide different streams. Here, we ask our server to give
// us a reference to his list of endpoints, so we can add our
// test endpoint, providing the pipeline from the cli.
let mounts = server.mount_points().ok_or(NoMountPoints)?;
// Next, we create a factory for the endpoint we want to create.
// The job of the factory is to create a new pipeline for each client that
// connects, or (if configured to do so) to reuse an existing pipeline.
let factory = gst_rtsp_server::RTSPMediaFactory::new();
// Here we tell the media factory the media we want to serve.
// This is done in the launch syntax. When the first client connects,
// the factory will use this syntax to create a new pipeline instance.
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
factory.set_launch(args[1].as_str());
// This setting specifies whether each connecting client gets the output
// of a new instance of the pipeline, or whether all connected clients share
// the output of the same pipeline.
// If you want to stream a fixed video you have stored on the server to any
// client, you would not set this to shared here (since every client wants
// to start at the beginning of the video). But if you want to distribute
// a live source, you will probably want to set this to shared, to save
// computing and memory capacity on the server.
factory.set_shared(true);
// Now we add a new mount-point and tell the RTSP server to serve the content
// provided by the factory we configured above, when a client connects to
// this specific path.
mounts.add_factory("/test", factory);
mounts.add_factory("/test", &factory);
// Attach the server to our main context.
// A main context is the thing where other stuff is registering itself for its
// events (e.g. sockets, GStreamer bus, ...) and the main loop is something that
// polls the main context for its events and dispatches them to whoever is
// interested in them. In this example, we only do have one, so we can
// leave the context parameter empty, it will automatically select
// the default one.
let id = server.attach(None)?;
let id = server.attach(None);
println!(
"Stream ready at rtsp://127.0.0.1:{}/test",
server.bound_port()
server.get_bound_port()
);
// Start the mainloop. From this point on, the server will start to serve
// our quality content to connecting clients.
main_loop.run();
id.remove();
glib::source_remove(id);
Ok(())
}
@ -89,6 +63,6 @@ fn example_main() -> Result<(), Error> {
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -1,334 +0,0 @@
// This example demonstrates subclassing, implementing an audio filter and providing some Rust API
// on it. It operates the following pipeline:
//
// {audiotestsrc} - {our filter} - {audioconvert} - {autoaudiosink}
//
// Our filter can only handle F32 mono and acts as a FIR filter. The filter impulse response /
// coefficients are provided via Rust API on the filter as a Vec<f32>.
#![allow(clippy::non_send_fields_in_send_ty)]
use anyhow::Error;
use derive_more::{Display, Error};
use gst::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
// Our custom FIR filter element is defined in this module
mod fir_filter {
use byte_slice_cast::*;
use gst_base::subclass::prelude::*;
use once_cell::sync::Lazy;
// The debug category we use below for our filter
pub static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"rsfirfilter",
gst::DebugColorFlags::empty(),
Some("Rust FIR Filter"),
)
});
// In the imp submodule we include the actual implementation
mod imp {
use std::{collections::VecDeque, sync::Mutex};
use super::*;
// This is the private data of our filter
#[derive(Default)]
pub struct FirFilter {
pub(super) coeffs: Mutex<Vec<f32>>,
history: Mutex<VecDeque<f32>>,
}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for FirFilter {
const NAME: &'static str = "RsFirFilter";
type Type = super::FirFilter;
type ParentType = gst_base::BaseTransform;
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for FirFilter {}
impl GstObjectImpl for FirFilter {}
// Implementation of gst::Element virtual methods
impl ElementImpl for FirFilter {
// The element specific metadata. This information is what is visible from
// gst-inspect-1.0 and can also be programmatically retrieved from the gst::Registry
// after initial registration without having to load the plugin in memory.
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: std::sync::OnceLock<gst::subclass::ElementMetadata> =
std::sync::OnceLock::new();
Some(ELEMENT_METADATA.get_or_init(|| {
gst::subclass::ElementMetadata::new(
"FIR Filter",
"Filter/Effect/Audio",
"A FIR audio filter",
"Sebastian Dröge <sebastian@centricular.com>",
)
}))
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: std::sync::OnceLock<Vec<gst::PadTemplate>> =
std::sync::OnceLock::new();
PAD_TEMPLATES.get_or_init(|| {
// Create pad templates for our sink and source pad. These are later used for
// actually creating the pads and beforehand already provide information to
// GStreamer about all possible pads that could exist for this type.
// On both of pads we can only handle F32 mono at any sample rate.
let caps = gst_audio::AudioCapsBuilder::new_interleaved()
.format(gst_audio::AUDIO_FORMAT_F32)
.channels(1)
.build();
vec![
// The src pad template must be named "src" for basetransform
// and specific a pad that is always there
gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap(),
// The sink pad template must be named "sink" for basetransform
// and specific a pad that is always there
gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap(),
]
})
}
}
// Implementation of gst_base::BaseTransform virtual methods
impl BaseTransformImpl for FirFilter {
// Configure basetransform so that we are always running in-place,
// don't passthrough on same caps and also never call transform_ip
// in passthrough mode (which does not matter for us here).
//
// The way how our processing is implemented, in-place transformation
// is simpler.
const MODE: gst_base::subclass::BaseTransformMode =
gst_base::subclass::BaseTransformMode::AlwaysInPlace;
const PASSTHROUGH_ON_SAME_CAPS: bool = false;
const TRANSFORM_IP_ON_PASSTHROUGH: bool = false;
// Returns the size of one processing unit (i.e. a frame in our case) corresponding
// to the given caps. This is used for allocating a big enough output buffer and
// sanity checking the input buffer size, among other things.
fn unit_size(&self, caps: &gst::Caps) -> Option<usize> {
let audio_info = gst_audio::AudioInfo::from_caps(caps).ok();
audio_info.map(|info| info.bpf() as usize)
}
// Called when shutting down the element so we can release all stream-related state
// There's also start(), which is called whenever starting the element again
fn stop(&self) -> Result<(), gst::ErrorMessage> {
// Drop state
self.history.lock().unwrap().clear();
gst::info!(CAT, imp: self, "Stopped");
Ok(())
}
// Does the actual transformation of the input buffer to the output buffer
fn transform_ip(
&self,
buf: &mut gst::BufferRef,
) -> Result<gst::FlowSuccess, gst::FlowError> {
// Get coefficients and return directly if we have none
let coeffs = self.coeffs.lock().unwrap();
if coeffs.is_empty() {
gst::trace!(CAT, imp: self, "No coefficients set -- passthrough");
return Ok(gst::FlowSuccess::Ok);
}
// Try mapping the input buffer as writable
let mut data = buf.map_writable().map_err(|_| {
gst::element_imp_error!(
self,
gst::CoreError::Failed,
["Failed to map input buffer readable"]
);
gst::FlowError::Error
})?;
// And reinterprete it as a slice of f32
let samples = data.as_mut_slice_of::<f32>().map_err(|err| {
gst::element_imp_error!(
self,
gst::CoreError::Failed,
["Failed to cast input buffer as f32 slice: {}", err]
);
gst::FlowError::Error
})?;
let mut history = self.history.lock().unwrap();
gst::trace!(
CAT,
imp: self,
"Transforming {} samples with filter of length {}",
samples.len(),
coeffs.len()
);
// Now calculate the output for each sample by doing convolution
for sample in samples.iter_mut() {
history.push_front(*sample);
history.truncate(coeffs.len());
let val = history
.iter()
.copied()
.chain(std::iter::repeat(0.0))
.zip(coeffs.iter())
.map(|(x, a)| x * a)
.sum();
*sample = val;
}
Ok(gst::FlowSuccess::Ok)
}
}
}
// This here defines the public interface of our element and implements
// the corresponding traits so that it behaves like any other gst::Element
glib::wrapper! {
pub struct FirFilter(ObjectSubclass<imp::FirFilter>) @extends gst_base::BaseTransform, gst::Element, gst::Object;
}
impl FirFilter {
// Creates a new instance of our filter with the given name
pub fn new(name: Option<&str>) -> FirFilter {
glib::Object::builder().property("name", name).build()
}
// Sets the coefficients by getting access to the private
// struct and simply setting them
pub fn set_coeffs(&self, coeffs: Vec<f32>) {
let imp = self.imp();
*imp.coeffs.lock().unwrap() = coeffs;
}
}
}
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
// Create our pipeline with the custom element
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("audiotestsrc")
.property_from_str("wave", "white-noise")
.build()?;
let filter = fir_filter::FirFilter::new(None);
let conv = gst::ElementFactory::make("audioconvert").build()?;
let sink = gst::ElementFactory::make("autoaudiosink").build()?;
pipeline.add_many([&src, filter.upcast_ref(), &conv, &sink])?;
src.link(&filter)?;
filter.link(&conv)?;
conv.link(&sink)?;
// Create a windowed sinc lowpass filter at 1/64 sample rate,
// i.e. 689Hz for 44.1kHz sample rate
let w = 2.0 * std::f32::consts::PI / 64.0;
let len = 9;
let mut kernel = (0..len)
.map(|i| {
let v = if i == (len - 1) / 2 {
w
} else {
let p = i as f32 - (len - 1) as f32 / 2.0;
(w * p).sin() / p
};
// Hamming window
let win =
0.54 - 0.46 * (2.0 * std::f32::consts::PI * i as f32 / (len as f32 - 1.0)).cos();
v * win
})
.collect::<Vec<f32>>();
// Normalize
let sum: f32 = kernel.iter().sum();
for v in kernel.iter_mut() {
*v /= sum;
}
filter.set_coeffs(kernel);
Ok(pipeline)
}
fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
}
.into());
}
_ => (),
}
}
pipeline.set_state(gst::State::Null)?;
Ok(())
}
fn example_main() {
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
examples_common::run(example_main);
}

View file

@ -1,188 +0,0 @@
// In the imp submodule we include the actual implementation
use std::{collections::VecDeque, sync::Mutex};
use glib::prelude::*;
use gst_audio::subclass::prelude::*;
use once_cell::sync::Lazy;
use byte_slice_cast::*;
use atomic_refcell::AtomicRefCell;
// The debug category we use below for our filter
pub static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"rsiirfilter",
gst::DebugColorFlags::empty(),
Some("Rust IIR Filter"),
)
});
#[derive(Default)]
// This is the state of our filter
struct State {
a: Vec<f64>,
b: Vec<f64>,
x: VecDeque<f64>,
y: VecDeque<f64>,
}
// This is the private data of our filter
#[derive(Default)]
pub struct IirFilter {
coeffs: Mutex<Option<(Vec<f64>, Vec<f64>)>>,
state: AtomicRefCell<State>,
}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for IirFilter {
const NAME: &'static str = "RsIirFilter";
const ABSTRACT: bool = true;
type Type = super::IirFilter;
type ParentType = gst_audio::AudioFilter;
type Class = super::Class;
// Here we set default implementations for all the virtual methods.
// This is mandatory for all virtual methods that are not `Option`s.
fn class_init(class: &mut Self::Class) {
class.set_rate = |obj, rate| obj.imp().set_rate_default(rate);
}
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for IirFilter {}
impl GstObjectImpl for IirFilter {}
// Implementation of gst::Element virtual methods
impl ElementImpl for IirFilter {}
// Implementation of gst_base::BaseTransform virtual methods
impl BaseTransformImpl for IirFilter {
// Configure basetransform so that we are always running in-place,
// don't passthrough on same caps and also never call transform_ip
// in passthrough mode (which does not matter for us here).
//
// The way how our processing is implemented, in-place transformation
// is simpler.
const MODE: gst_base::subclass::BaseTransformMode =
gst_base::subclass::BaseTransformMode::AlwaysInPlace;
const PASSTHROUGH_ON_SAME_CAPS: bool = false;
const TRANSFORM_IP_ON_PASSTHROUGH: bool = false;
fn start(&self) -> Result<(), gst::ErrorMessage> {
self.parent_start()?;
*self.state.borrow_mut() = State::default();
Ok(())
}
fn stop(&self) -> Result<(), gst::ErrorMessage> {
self.parent_stop()?;
*self.state.borrow_mut() = State::default();
Ok(())
}
fn transform_ip(&self, buf: &mut gst::BufferRef) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut state = self.state.borrow_mut();
// Update coefficients if new coefficients were set
{
let mut coeffs = self.coeffs.lock().unwrap();
if let Some((a, b)) = coeffs.take() {
state.x.clear();
state.y.clear();
if !a.is_empty() {
state.y.resize(a.len() - 1, 0.0);
}
if !b.is_empty() {
state.x.resize(b.len() - 1, 0.0);
}
state.a = a;
state.b = b;
}
}
if state.a.is_empty() | state.b.is_empty() {
return Ok(gst::FlowSuccess::Ok);
}
let mut map = buf.map_writable().map_err(|_| {
gst::error!(CAT, imp: self, "Failed to map buffer writable");
gst::FlowError::Error
})?;
let samples = map.as_mut_slice_of::<f32>().unwrap();
assert!(state.b.len() - 1 == state.x.len());
assert!(state.a.len() - 1 == state.y.len());
for sample in samples.iter_mut() {
let mut val = state.b[0] * *sample as f64;
for (b, x) in Iterator::zip(state.b.iter().skip(1), state.x.iter()) {
val += b * x;
}
for (a, y) in Iterator::zip(state.a.iter().skip(1), state.y.iter()) {
val -= a * y;
}
val /= state.a[0];
let _ = state.x.pop_back().unwrap();
state.x.push_front(*sample as f64);
let _ = state.y.pop_back().unwrap();
state.y.push_front(val);
*sample = val as f32;
}
Ok(gst::FlowSuccess::Ok)
}
}
impl AudioFilterImpl for IirFilter {
fn allowed_caps() -> &'static gst::Caps {
static CAPS: std::sync::OnceLock<gst::Caps> = std::sync::OnceLock::new();
CAPS.get_or_init(|| {
// On both of pads we can only handle F32 mono at any sample rate.
gst_audio::AudioCapsBuilder::new_interleaved()
.format(gst_audio::AUDIO_FORMAT_F32)
.channels(1)
.build()
})
}
fn setup(&self, info: &gst_audio::AudioInfo) -> Result<(), gst::LoggableError> {
self.parent_setup(info)?;
gst::debug!(CAT, imp: self, "Rate changed to {}", info.rate());
let obj = self.obj();
(obj.class().as_ref().set_rate)(&obj, info.rate());
Ok(())
}
}
/// Wrappers for public methods and associated helper functions.
impl IirFilter {
pub(super) fn set_coeffs(&self, a: Vec<f64>, b: Vec<f64>) {
gst::debug!(CAT, imp: self, "Setting coefficients a: {a:?}, b: {b:?}");
*self.coeffs.lock().unwrap() = Some((a, b));
}
}
/// Default virtual method implementations.
impl IirFilter {
fn set_rate_default(&self, _rate: u32) {}
}

View file

@ -1,86 +0,0 @@
use gst::{prelude::*, subclass::prelude::*};
use gst_audio::subclass::prelude::*;
mod imp;
// This here defines the public interface of our element and implements
// the corresponding traits so that it behaves like any other gst::Element
//
// GObject
// ╰──GstObject
// ╰──GstElement
// ╰──GstBaseTransform
// ╰──GstAudioFilter
// ╰──IirFilter
glib::wrapper! {
pub struct IirFilter(ObjectSubclass<imp::IirFilter>) @extends gst_audio::AudioFilter, gst_base::BaseTransform, gst::Element, gst::Object;
}
/// Trait containing extension methods for `IirFilter`.
pub trait IirFilterExt: IsA<IirFilter> {
// Sets the coefficients by getting access to the private struct and simply setting them
fn set_coeffs(&self, a: Vec<f64>, b: Vec<f64>) {
self.upcast_ref::<IirFilter>().imp().set_coeffs(a, b)
}
}
impl<O: IsA<IirFilter>> IirFilterExt for O {}
/// Trait to implement in `IirFilter` subclasses.
pub trait IirFilterImpl: AudioFilterImpl {
/// Called whenever the sample rate is changing.
fn set_rate(&self, rate: u32) {
self.parent_set_rate(rate);
}
}
/// Trait containing extension methods for `IirFilterImpl`, specifically methods for chaining
/// up to the parent implementation of virtual methods.
pub trait IirFilterImplExt: IirFilterImpl {
fn parent_set_rate(&self, rate: u32) {
unsafe {
let data = Self::type_data();
let parent_class = &*(data.as_ref().parent_class() as *mut Class);
(parent_class.set_rate)(self.obj().unsafe_cast_ref(), rate)
}
}
}
impl<T: IirFilterImpl> IirFilterImplExt for T {}
/// Class struct for `IirFilter`.
#[repr(C)]
pub struct Class {
parent: <<imp::IirFilter as ObjectSubclass>::ParentType as ObjectType>::GlibClassType,
set_rate: fn(&IirFilter, rate: u32),
}
unsafe impl ClassStruct for Class {
type Type = imp::IirFilter;
}
/// This allows directly using `Class` as e.g. `gst_audio::AudioFilterClass` or
/// `gst_base::BaseTransformClass` without having to cast.
impl std::ops::Deref for Class {
type Target = glib::Class<<<Self as ClassStruct>::Type as ObjectSubclass>::ParentType>;
fn deref(&self) -> &Self::Target {
unsafe { &*(&self.parent as *const _ as *const _) }
}
}
/// Overrides the virtual methods with the actual implementation of the subclass as is provided by
/// the subclass' implementation of the `Impl` trait.
unsafe impl<T: IirFilterImpl> IsSubclassable<T> for IirFilter {
fn class_init(class: &mut glib::Class<Self>) {
Self::parent_class_init::<T>(class);
let class = class.as_mut();
class.set_rate = |obj, rate| unsafe {
let imp = obj.unsafe_cast_ref::<T::Type>().imp();
imp.set_rate(rate);
};
}
}

View file

@ -1,170 +0,0 @@
// In the imp submodule we include the actual implementation
use std::sync::Mutex;
use glib::prelude::*;
use gst::prelude::*;
use gst_audio::subclass::prelude::*;
use crate::iirfilter::{IirFilterExt, IirFilterImpl};
// These are the property values of our filter
pub struct Settings {
cutoff: f32,
}
impl Default for Settings {
fn default() -> Self {
Settings { cutoff: 0.0 }
}
}
// This is the state of our filter
#[derive(Default)]
pub struct State {
rate: Option<u32>,
}
// This is the private data of our filter
#[derive(Default)]
pub struct Lowpass {
settings: Mutex<Settings>,
state: Mutex<State>,
}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for Lowpass {
const NAME: &'static str = "RsLowpass";
type Type = super::Lowpass;
type ParentType = crate::iirfilter::IirFilter;
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for Lowpass {
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: std::sync::OnceLock<Vec<glib::ParamSpec>> = std::sync::OnceLock::new();
PROPERTIES.get_or_init(|| {
vec![glib::ParamSpecFloat::builder("cutoff")
.nick("Cutoff")
.blurb("Cutoff frequency in Hz")
.default_value(Settings::default().cutoff)
.minimum(0.0)
.mutable_playing()
.build()]
})
}
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
match pspec.name() {
"cutoff" => {
self.settings.lock().unwrap().cutoff = value.get().unwrap();
self.calculate_coeffs();
}
_ => unimplemented!(),
};
}
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
match pspec.name() {
"cutoff" => self.settings.lock().unwrap().cutoff.to_value(),
_ => unimplemented!(),
}
}
}
impl GstObjectImpl for Lowpass {}
// Implementation of gst::Element virtual methods
impl ElementImpl for Lowpass {
// The element specific metadata. This information is what is visible from
// gst-inspect-1.0 and can also be programmatically retrieved from the gst::Registry
// after initial registration without having to load the plugin in memory.
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: std::sync::OnceLock<gst::subclass::ElementMetadata> =
std::sync::OnceLock::new();
Some(ELEMENT_METADATA.get_or_init(|| {
gst::subclass::ElementMetadata::new(
"Lowpass Filter",
"Filter/Effect/Audio",
"A Lowpass audio filter",
"Sebastian Dröge <sebastian@centricular.com>",
)
}))
}
}
// Implementation of gst_base::BaseTransform virtual methods
impl BaseTransformImpl for Lowpass {
const MODE: gst_base::subclass::BaseTransformMode =
<<crate::iirfilter::IirFilter as glib::object::ObjectSubclassIs>::Subclass>::MODE;
const PASSTHROUGH_ON_SAME_CAPS: bool =
<<crate::iirfilter::IirFilter as glib::object::ObjectSubclassIs>::Subclass>::PASSTHROUGH_ON_SAME_CAPS;
const TRANSFORM_IP_ON_PASSTHROUGH: bool =
<<crate::iirfilter::IirFilter as glib::object::ObjectSubclassIs>::Subclass>::TRANSFORM_IP_ON_PASSTHROUGH;
fn start(&self) -> Result<(), gst::ErrorMessage> {
self.parent_start()?;
*self.state.lock().unwrap() = State::default();
Ok(())
}
}
// Implement of gst_audio::AudioFilter virtual methods
impl AudioFilterImpl for Lowpass {}
// Implement of IirFilter virtual methods
impl IirFilterImpl for Lowpass {
fn set_rate(&self, rate: u32) {
// Could call
// self.parent_set_rate(rate);
// here but chaining up is not necessary if the base class doesn't require that
// or if the behaviour of the parent class should be completely overridden.
self.state.lock().unwrap().rate = Some(rate);
self.calculate_coeffs();
}
}
impl Lowpass {
fn calculate_coeffs(&self) {
use std::f64;
let Some(rate) = self.state.lock().unwrap().rate else {
return;
};
let cutoff = self.settings.lock().unwrap().cutoff;
// See Audio EQ Cookbook
// https://www.w3.org/TR/audio-eq-cookbook
let cutoff = cutoff as f64 / rate as f64;
let omega = 2.0 * f64::consts::PI * cutoff;
let q = 1.0;
let alpha = f64::sin(omega) / (2.0 * q);
let mut b = vec![
(1.0 - f64::cos(omega)) / 2.0,
1.0 - f64::cos(omega),
(1.0 - f64::cos(omega) / 2.0),
];
let mut a = vec![1.0 + alpha, -2.0 * f64::cos(omega), 1.0 - alpha];
let a0 = a[0];
for a in &mut a {
*a /= a0;
}
for b in &mut b {
*b /= a0;
}
self.obj().set_coeffs(a, b);
}
}

View file

@ -1,15 +0,0 @@
mod imp;
// This here defines the public interface of our element and implements
// the corresponding traits so that it behaves like any other gst::Element
//
// GObject
// ╰──GstObject
// ╰──GstElement
// ╰──GstBaseTransform
// ╰──GstAudioFilter
// ╰──IirFilter
// ╰──Lowpass
glib::wrapper! {
pub struct Lowpass(ObjectSubclass<imp::Lowpass>) @extends crate::iirfilter::IirFilter, gst_audio::AudioFilter, gst_base::BaseTransform, gst::Element, gst::Object;
}

View file

@ -1,66 +0,0 @@
// This example implements a baseclass IirFilter, and a subclass Lowpass of that.
//
// The example shows how to provide and implement virtual methods, and how to provide non-virtual
// methods on the base class.
use gst::prelude::*;
mod iirfilter;
mod lowpass;
#[path = "../../examples-common.rs"]
mod examples_common;
fn example_main() {
gst::init().unwrap();
let pipeline = gst::Pipeline::new();
let src = gst::ElementFactory::make("audiotestsrc")
.property_from_str("wave", "white-noise")
.build()
.unwrap();
let filter = glib::Object::builder::<lowpass::Lowpass>()
.property("cutoff", 4000.0f32)
.build();
let conv = gst::ElementFactory::make("audioconvert").build().unwrap();
let sink = gst::ElementFactory::make("autoaudiosink").build().unwrap();
pipeline
.add_many([&src, filter.as_ref(), &conv, &sink])
.unwrap();
gst::Element::link_many([&src, filter.as_ref(), &conv, &sink]).unwrap();
let bus = pipeline.bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
examples_common::run(example_main);
}

View file

@ -1,56 +1,49 @@
// This example demonstrates how to set and store metadata using
// GStreamer. Some elements support setting tags on a media stream.
// An example would be id3v2mux. The element signals this by implementing
// The GstTagsetter interface. You can query any element implementing this
// interface from the pipeline, and then tell the returned implementation
// of GstTagsetter what tags to apply to the media stream.
// This example's pipeline creates a new flac file from the testaudiosrc
// that the example application will add tags to using GstTagsetter.
// The operated pipeline looks like this:
// {audiotestsrc} - {flacenc} - {filesink}
// For example for pipelines that transcode a multimedia file, the input
// already has tags. For cases like this, the GstTagsetter has the merge
// setting, which the application can configure to tell the element
// implementing the interface whether to merge newly applied tags to the
// already existing ones, or if all existing ones should replace, etc.
// (More modes of operation are possible, see: gst::TagMergeMode)
// This merge-mode can also be supplied to any method that adds new tags.
use anyhow::{anyhow, Error};
use derive_more::{Display, Error};
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate glib;
use std::error::Error as StdError;
extern crate failure;
use failure::Error;
#[macro_use]
extern crate failure_derive;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Missing element {_0}")]
struct MissingElement(#[error(not(source))] String);
#[derive(Debug, Fail)]
#[fail(display = "Missing element {}", _0)]
struct MissingElement(String);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[derive(Debug, Fail)]
#[fail(
display = "Received error from {}: {} (debug: {:?})",
src, error, debug
)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
#[cause]
cause: glib::Error,
}
fn example_main() -> Result<(), Error> {
gst::init()?;
// Parse the pipeline we want to probe from a static in-line string.
let mut context = gst::ParseContext::new();
let pipeline = match gst::parse::launch_full(
let pipeline = match gst::parse_launch_full(
"audiotestsrc wave=white-noise num-buffers=100 ! flacenc ! filesink location=test.flac",
Some(&mut context),
gst::ParseFlags::empty(),
gst::ParseFlags::NONE,
) {
Ok(pipeline) => pipeline,
Err(err) => {
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
return Err(MissingElement(context.missing_elements().join(",")).into());
return Err(MissingElement(context.get_missing_elements().join(",")).into());
} else {
return Err(err.into());
}
@ -59,60 +52,53 @@ fn example_main() -> Result<(), Error> {
let pipeline = pipeline
.downcast::<gst::Pipeline>()
.map_err(|_| anyhow!("Generated pipeline is no pipeline"))?;
.map_err(|_| failure::err_msg("Generated pipeline is no pipeline"))?;
// Query the pipeline for elements implementing the GstTagsetter interface.
// In our case, this will return the flacenc element.
let tagsetter = pipeline
.by_interface(gst::TagSetter::static_type())
.ok_or_else(|| anyhow!("No TagSetter found"))?;
.get_by_interface(gst::TagSetter::static_type())
.ok_or_else(|| failure::err_msg("No TagSetter found"))?;
let tagsetter = tagsetter
.dynamic_cast::<gst::TagSetter>()
.map_err(|_| anyhow!("No TagSetter found"))?;
.map_err(|_| failure::err_msg("No TagSetter found"))?;
// Tell the element implementing the GstTagsetter interface how to handle already existing
// metadata.
tagsetter.set_tag_merge_mode(gst::TagMergeMode::KeepAll);
// Set the "title" tag to "Special randomized white-noise".
// The second parameter gst::TagMergeMode::Append tells the tagsetter to append this title
// if there already is one.
tagsetter
.add_tag::<gst::tags::Title>(&"Special randomized white-noise", gst::TagMergeMode::Append);
tagsetter.add::<gst::tags::Title>(&"Special randomized white-noise", gst::TagMergeMode::Append);
let bus = pipeline.bus().unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline.set_state(gst::State::Playing)?;
pipeline.set_state(gst::State::Playing).into_result()?;
for msg in bus.iter_timed(gst::ClockTime::NONE) {
while let Some(msg) = bus.timed_pop(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
}
.into());
Err(ErrorMessage {
src: err
.get_src()
.map(|s| s.get_path_string())
.unwrap_or_else(|| String::from("None")),
error: err.get_error().description().into(),
debug: err.get_debug(),
cause: err.get_error(),
})?;
break;
}
_ => (),
}
}
pipeline.set_state(gst::State::Null)?;
pipeline.set_state(gst::State::Null).into_result()?;
Ok(())
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

Some files were not shown because too many files have changed in this diff Show more