Compare commits

..

3 commits
main ... 0.16.0

Author SHA1 Message Date
Sebastian Dröge
f266e6cefc Update docs 2020-07-06 14:38:47 +03:00
Sebastian Dröge
9511b19c0a Update versions from git to releases 2020-07-06 14:37:02 +03:00
Sebastian Dröge
2f69fe81f2 Update CHANGELOG.md for 0.16.0 2020-07-06 14:03:45 +03:00
1444 changed files with 444522 additions and 215864 deletions

1
.gitignore vendored
View file

@ -1,2 +1,3 @@
target/
**/*.rs.bk
Cargo.lock

View file

@ -1,8 +1,8 @@
# We use https://gitlab.freedesktop.org/freedesktop/ci-templates
# to build the images used by the ci.
#
# Here is how to properly update those images:
# - new Rust stable version: update GST_RS_IMG_TAG and update Rust version
# Here is how to properly update thoses images:
# - new Rust stable version: update GST_RS_IMG_TAG
# - add dependencies: update FDO_DISTRIBUTION_PACKAGES and update GST_RS_IMG_TAG
# - update GStreamer version: update the tag in ci/install-gst.sh and update GST_RS_IMG_TAG
#
@ -11,16 +11,13 @@
# - setting it to the current date and the version suffix to 0
# - incrementing the version suffix
#
# Same for GST_RS_IMG_WINDOWS_TAG. There's a separate tag for it to cater for
# image-only updates that only affect Windows or only Linux.
#
# After each update commit your changes and push to your personal repo.
# After each update commit your changes and push to your personnal repo.
# After review and ci approval merge the branch as usual.
#
# Updating the nightly image should be done by simply running a scheduled ci
# pipeline on the upstream repo with the $UPDATE_NIGHTLY variable defined.
.templates_sha: &templates_sha 6a40df92957c8ce9ee741aaccc5daaaf70545b1e
.templates_sha: &templates_sha 322bf2b8f29b6491caeb13861201e96969ddc169
include:
- project: 'freedesktop/ci-templates'
@ -29,48 +26,10 @@ include:
- local: "ci/images_template.yml"
workflow:
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
- if: $CI_MERGE_REQUEST_IID
# don't create a pipeline if its a commit pipeline, on a branch and that branch has
# open merge requests (bc we will get a MR build instead)
- if: $CI_OPEN_MERGE_REQUESTS
when: never
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH
default:
interruptible: true
# Auto-retry jobs in case of infra failures
retry:
max: 1
when:
- 'runner_system_failure'
- 'stuck_or_timeout_failure'
- 'scheduler_failure'
- 'api_failure'
variables:
FDO_UPSTREAM_REPO: gstreamer/gstreamer-rs
# DIY CI-templates like setup for windows
WINDOWS_RUST_MINIMUM_IMAGE: "$CI_REGISTRY_IMAGE/windows:$GST_RS_IMG_WINDOWS_TAG-main-$GST_RS_MSRV"
WINDOWS_RUST_MINIMUM_UPSTREAM_IMAGE: "$CI_REGISTRY/$FDO_UPSTREAM_REPO/windows:$GST_RS_IMG_WINDOWS_TAG-main-$GST_RS_MSRV"
WINDOWS_RUST_STABLE_IMAGE: "$CI_REGISTRY_IMAGE/windows:$GST_RS_IMG_WINDOWS_TAG-main-$GST_RS_STABLE"
WINDOWS_RUST_STABLE_UPSTREAM_IMAGE: "$CI_REGISTRY/$FDO_UPSTREAM_REPO/windows:$GST_RS_IMG_WINDOWS_TAG-main-$GST_RS_STABLE"
RUST_DOCS_FLAGS: "--cfg docsrs --extern-html-root-url=muldiv=https://docs.rs/muldiv/1.0.0/muldiv/ -Z unstable-options --generate-link-to-definition"
NAMESPACE: gstreamer
# format is <branch>=<name>
# the name is used in the URL
# latest release must be at the top
# (only relevant on main branch)
RELEASES:
0.23=0.23
stages:
- "trigger"
- "container-base"
- "container-final"
- "lint"
@ -78,533 +37,318 @@ stages:
- "extras"
- "deploy"
# This is an empty job that is used to trigger the pipeline.
trigger:
image: alpine:latest
stage: 'trigger'
.debian:10:
variables:
GIT_STRATEGY: none
GIT_SUBMODULE_STRATEGY: "none"
tags: [ 'placeholder-job' ]
script:
- echo "Trigger job done, now running the pipeline."
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
# If the MR is assigned to the Merge bot, trigger the pipeline automatically
- if: '$CI_MERGE_REQUEST_ASSIGNEES == "gstreamer-merge-bot"'
# Require explicit action to trigger tests post merge
- if: '$CI_PROJECT_NAMESPACE == "gstreamer" && $CI_COMMIT_BRANCH == "main"'
when: 'manual'
# When the assignee isn't the merge bot, require an explicit action to trigger the pipeline
# to avoid wasting CI resources
- if: '$CI_MERGE_REQUEST_ASSIGNEES != "gstreamer-merge-bot"'
when: 'manual'
allow_failure: false
.debian:12:
needs: []
variables:
FDO_DISTRIBUTION_VERSION: 'bookworm-slim'
FDO_DISTRIBUTION_VERSION: 10
FDO_DISTRIBUTION_TAG: '$RUST_VERSION-$GST_RS_IMG_TAG'
# Only stuff inside the repo directory can be cached
# Override the CARGO_HOME variable to force its location
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo_home"
before_script:
- source ./ci/env.sh
- mkdir .cargo && echo -e "[net]\ngit-fetch-with-cli = true" > .cargo/config.toml
- mkdir .cargo && echo -e "[net]\ngit-fetch-with-cli = true" > .cargo/config
.debian:12-base:
extends: .debian:12
.debian:10-base:
extends: .debian:10
variables:
FDO_DISTRIBUTION_TAG: 'base-$GST_RS_IMG_TAG'
.debian:12-stable:
extends: .debian:12
.debian:10-stable:
extends: .debian:10
variables:
RUST_IMAGE_FULL: "1"
FDO_DISTRIBUTION_TAG: '$GST_RS_STABLE-$GST_RS_IMG_TAG'
FDO_DISTRIBUTION_EXEC: 'bash ci/install-rust.sh $GST_RS_STABLE $RUST_IMAGE_FULL'
RUST_VERSION: "stable"
.debian:12-msrv:
extends: .debian:12
.debian:10-1-40:
extends: .debian:10
variables:
FDO_DISTRIBUTION_TAG: '$GST_RS_MSRV-$GST_RS_IMG_TAG'
FDO_DISTRIBUTION_EXEC: 'bash ci/install-rust.sh $GST_RS_MSRV $RUST_IMAGE_FULL'
RUST_VERSION: "1.40.0"
.debian:12-nightly:
extends: .debian:12
.debian:10-nightly:
extends: .debian:10
variables:
FDO_DISTRIBUTION_TAG: 'nightly-$GST_RS_IMG_TAG'
FDO_DISTRIBUTION_EXEC: 'bash ci/install-rust.sh nightly $RUST_IMAGE_FULL'
RUST_VERSION: "nightly"
.build-base-image:
extends:
- .fdo.container-build@debian
stage: container-base
variables:
FDO_DISTRIBUTION_PACKAGES: >-
build-essential curl python3-setuptools libglib2.0-dev libxml2-dev
libdrm-dev libegl1-mesa-dev libgl1-mesa-dev libgbm-dev libgles2-mesa-dev
libgl1-mesa-dri libegl-dev libgl1-mesa-glx libwayland-egl1-mesa xz-utils
libssl-dev git wget ca-certificates ninja-build python3-pip flex bison
libglib2.0-dev libx11-dev libx11-xcb-dev libsoup2.4-dev libvorbis-dev
libogg-dev libtheora-dev libmatroska-dev libvpx-dev libopus-dev
libgraphene-1.0-dev libjpeg-dev libwayland-dev wayland-protocols
python3-gi libavcodec-dev libavformat-dev libavutil-dev libavfilter-dev
libswscale-dev yasm libx264-dev libfontconfig-dev libfreetype-dev
libxkbcommon-dev libxi-dev libxcb-render0-dev libxcb-shm0-dev
libxcb1-dev libxext-dev libxrender-dev libxrandr-dev libxcursor-dev
libxdamage-dev libxfixes-dev libxinerama-dev libgudev-1.0-dev
libpango1.0-dev libcairo2-dev libjson-glib-dev libgdk-pixbuf-2.0-dev
libtiff-dev libpng-dev libjpeg-dev libepoxy-dev libsass-dev sassc
libcsound64-dev llvm clang nasm libsodium-dev libwebp-dev
libflac-dev libmysofa-dev libgtk-4-dev
FDO_DISTRIBUTION_EXEC: >-
bash ci/install-gst.sh &&
bash ci/install-dav1d.sh &&
pip3 install --break-system-packages git+http://gitlab.freedesktop.org/freedesktop/ci-templates
FDO_DISTRIBUTION_PACKAGES: "build-essential curl python3-setuptools liborc-0.4-dev libglib2.0-dev libxml2-dev libgtk-3-dev libegl1-mesa libgles2-mesa libgl1-mesa-dri libgl1-mesa-glx libwayland-egl1-mesa xz-utils libssl-dev git wget ca-certificates ninja-build python3-pip flex bison libglib2.0-dev"
FDO_DISTRIBUTION_EXEC: 'bash ci/install-gst.sh && pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates'
.build-final-image:
extends:
- .fdo.container-build@debian
stage: container-final
variables:
FDO_BASE_IMAGE: '$CI_REGISTRY_IMAGE/debian/bookworm-slim:base-$GST_RS_IMG_TAG'
FDO_BASE_IMAGE: '$CI_REGISTRY_IMAGE/debian/10:base-$GST_RS_IMG_TAG'
FDO_DISTRIBUTION_EXEC: 'bash ci/install-rust.sh $RUST_VERSION'
build-base:
extends:
- .build-base-image
- .debian:12-base
- .debian:10-base
build-stable:
needs: ["build-base"]
extends:
- .build-final-image
- .debian:12-stable
- .debian:10-stable
build-msrv:
needs: ["build-base"]
build-1-40:
extends:
- .build-final-image
- .debian:12-msrv
- .debian:10-1-40
build-nightly:
needs: ["build-base"]
extends:
- .build-final-image
- .debian:12-nightly
- .debian:10-nightly
update-nightly:
extends: build-nightly
rules:
- if: $UPDATE_NIGHTLY == "1"
only:
variables:
- $UPDATE_NIGHTLY == "1"
variables:
FDO_FORCE_REBUILD: 1
.dist-debian-container:
extends:
- .fdo.distribution-image@debian
cache:
key: "gst"
paths:
- "${CARGO_HOME}"
after_script:
- rm -rf target
.img-stable:
extends:
- .debian:12-stable
- .dist-debian-container
- .debian:10-stable
.img-msrv:
.img-1-40:
extends:
- .debian:12-msrv
- .dist-debian-container
- .debian:10-1-40
.img-nightly:
extends:
- .debian:12-nightly
- .dist-debian-container
- .debian:10-nightly
.cargo_test_var: &cargo_test
- ./ci/run-cargo-test.sh
# GST_PLUGINS_RS_TOKEN is a variable of type 'Var' defined in gstreamer-rs CI
# settings and containing a gst-plugins-rs pipeline trigger token
.plugins-update:
stage: deploy
script:
- |
# FDO_DISTRIBUTION_IMAGE still has indirections
- echo $FDO_DISTRIBUTION_IMAGE
- DISTRO_IMAGE=$(eval echo ${FDO_DISTRIBUTION_IMAGE})
- echo $DISTRO_IMAGE
# retrieve the infos from the registry
- JSON_IMAGE=$(skopeo inspect docker://$DISTRO_IMAGE)
- IMAGE_PIPELINE_ID=$(echo $JSON_IMAGE | jq -r '.Labels["fdo.pipeline_id"]')
- echo $IMAGE_PIPELINE_ID
- echo $CI_PIPELINE_ID
- |
if [[ x"$IMAGE_PIPELINE_ID" == x"$CI_PIPELINE_ID" ]]; then
echo "Image has been updated, notify gst-plugins-rs"
curl -X POST -F "token=$GST_PLUGINS_RS_TOKEN" -F "ref=master" -F "variables[UPDATE_IMG]=$UPDATE_IMG" https://gitlab.freedesktop.org/api/v4/projects/1400/trigger/pipeline
else
echo "Image has not been updated, ignore"
fi
rules:
- if: '$CI_COMMIT_REF_NAME == "master" && $CI_PROJECT_PATH == "gstreamer/gstreamer-rs"'
# Those jobs need to use another image as ours doesn't have 'skopeo'
# and it's not easily installable in Debian stable for now.
plugins-update-stable:
extends:
- .plugins-update
- .img-stable
image: registry.freedesktop.org/freedesktop/ci-templates/buildah:2020-03-04
variables:
UPDATE_IMG: "stable"
plugins-update-1-40:
extends:
- .plugins-update
- .img-1-40
image: registry.freedesktop.org/freedesktop/ci-templates/buildah:2020-03-04
variables:
UPDATE_IMG: "1-40"
plugins-update-nightly:
extends:
- .plugins-update
- .img-nightly
image: registry.freedesktop.org/freedesktop/ci-templates/buildah:2020-03-04
variables:
UPDATE_IMG: "nightly"
.cargo test:
stage: "test"
script:
- *cargo_test
- rustc --version
# First build and test all the crates with their relevant features
# Keep features in sync with below
- |
for crate in gstreamer*; do
if [ -n "$ALL_FEATURES" ]; then
if [ $crate = "gstreamer" ]; then
FEATURES=ser_de,v1_18
elif [ $crate = "gstreamer-gl" ]; then
FEATURES=egl,x11,wayland,v1_18
else
FEATURES=v1_18
fi
test msrv:
cargo build --color=always --manifest-path $crate/Cargo.toml --features=$FEATURES
G_DEBUG=fatal_warnings cargo test --color=always --manifest-path $crate/Cargo.toml --features=$FEATURES
else
cargo build --color=always --manifest-path $crate/Cargo.toml
G_DEBUG=fatal_warnings cargo test --color=always --manifest-path $crate/Cargo.toml
fi
done
# If we do a build with all features then also build the
# tutorials/examples with all features
- |
if [ -n "$ALL_FEATURES" ]; then
cargo build --color=always --manifest-path examples/Cargo.toml --bins --examples --all-features
cargo build --color=always --manifest-path tutorials/Cargo.toml --bins --examples --all-features
fi
test 1.40:
extends:
- '.cargo test'
- .img-msrv
needs:
- job: 'trigger'
artifacts: false
- job: 'build-msrv'
artifacts: false
- .img-1-40
test stable:
extends:
- '.cargo test'
- .img-stable
needs:
- job: 'trigger'
artifacts: false
- job: 'build-stable'
artifacts: false
test stable all-features:
variables:
ALL_FEATURES: 'yes'
EXAMPLES_TUTORIALS: 'yes'
extends:
- '.cargo test'
- .img-stable
needs:
- job: 'trigger'
artifacts: false
- job: 'build-stable'
artifacts: false
test nightly:
allow_failure: true
extends:
- '.cargo test'
- .img-nightly
needs:
- job: 'trigger'
artifacts: false
- job: 'build-nightly'
artifacts: false
test nightly all-features:
allow_failure: true
variables:
ALL_FEATURES: 'yes'
EXAMPLES_TUTORIALS: 'yes'
extends:
- '.cargo test'
- .img-nightly
needs:
- job: 'trigger'
artifacts: false
- job: 'build-nightly'
artifacts: false
.cargo test sys:
stage: "test"
script:
- ./ci/run-sys-cargo-test.sh
test stable sys:
extends:
- '.cargo test sys'
- .img-stable
needs:
- job: 'trigger'
artifacts: false
- job: 'build-stable'
artifacts: false
test msrv sys:
extends:
- '.cargo test sys'
- .img-msrv
needs:
- job: 'trigger'
artifacts: false
- job: 'build-msrv'
artifacts: false
test nightly sys:
extends:
- '.cargo test sys'
- .img-nightly
needs:
- job: 'trigger'
artifacts: false
- job: 'build-nightly'
artifacts: false
rustfmt:
extends: .img-stable
stage: "lint"
tags: [ 'placeholder-job' ]
variables:
GIT_SUBMODULE_STRATEGY: "none"
script:
- cargo fmt --version
- cargo fmt -- --color=always --check
needs:
- job: 'build-stable'
artifacts: false
check commits:
extends: .img-stable
stage: "lint"
tags: [ 'placeholder-job' ]
variables:
GIT_SUBMODULE_STRATEGY: "none"
script:
- ci-fairy check-commits --textwidth 0 --no-signed-off-by
needs:
- job: 'build-stable'
artifacts: false
typos:
extends: .img-stable
stage: "lint"
tags: [ 'placeholder-job' ]
variables:
GIT_SUBMODULE_STRATEGY: "none"
script:
- typos
needs:
- job: 'build-stable'
artifacts: false
clippy:
extends: .img-stable
stage: 'extras'
variables:
CLIPPY_LINTS: -D warnings -W unknown-lints
needs:
- job: 'trigger'
artifacts: false
- job: 'build-stable'
artifacts: false
script:
- ./ci/run-clippy.sh
- cargo clippy --version
# Keep features in sync with above
- |
for crate in gstreamer*; do
if [ $crate = "gstreamer" ]; then
FEATURES=ser_de,v1_18
elif [ $crate = "gstreamer-gl" ]; then
FEATURES=egl,x11,wayland,v1_18
else
FEATURES=v1_18
fi
cargo clippy --color=always --manifest-path $crate/Cargo.toml --features=$FEATURES --all-targets -- -A clippy::redundant_pattern_matching -A clippy::single_match -A clippy::cast_lossless -A clippy::missing_safety_doc -D warnings
done
# And also run over all the examples/tutorials
- |
cargo clippy --color=always --manifest-path examples/Cargo.toml --all-targets --all-features -- -A clippy::redundant_pattern_matching -A clippy::single_match -A clippy::cast_lossless -A clippy::missing_safety_doc -D warnings
cargo clippy --color=always --manifest-path tutorials/Cargo.toml --all-targets --all-features -- -A clippy::redundant_pattern_matching -A clippy::single_match -A clippy::cast_lossless -A clippy::missing_safety_doc -D warnings
deny:
extends: .img-stable
stage: 'extras'
needs:
- job: 'build-stable'
artifacts: false
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
only:
- schedules
script:
- cargo update --color=always
- cargo deny --color=always --workspace --all-features check all
- cargo deny check
gir-checks:
variables:
GIT_SUBMODULE_STRATEGY: recursive
extends: .img-stable
stage: 'extras'
tags:
- "gstreamer"
needs:
- job: 'build-stable'
artifacts: false
script:
- git submodule update --checkout
- python3 ci/gir-checks.py
- git clone --depth 1 https://github.com/gtk-rs/checker
- cd checker && echo '[workspace]' >> Cargo.toml
- cargo build --release
- |
cargo run --release -- \
--gir-file ../Gir_GstApp.toml ../gstreamer-app \
--gir-file ../Gir_GstAudio.toml ../gstreamer-audio/ \
--gir-file ../Gir_GstBase.toml ../gstreamer-base \
--gir-file ../Gir_GstCheck.toml ../gstreamer-check/ \
--gir-file ../Gir_GstEditingServices.toml ../gstreamer-editing-services/ \
--gir-file ../Gir_GstGL.toml ../gstreamer-gl/ \
--gir-file ../Gir_GstNet.toml ../gstreamer-net/ \
--gir-file ../Gir_GstPbutils.toml ../gstreamer-pbutils/ \
--gir-file ../Gir_GstPlayer.toml ../gstreamer-player/ \
--gir-file ../Gir_GstRtp.toml ../gstreamer-rtp/ \
--gir-file ../Gir_GstRtspServer.toml ../gstreamer-rtsp-server/ \
--gir-file ../Gir_GstRtsp.toml ../gstreamer-rtsp/ \
--gir-file ../Gir_GstSdp.toml ../gstreamer-sdp/ \
--gir-file ../Gir_Gst.toml ../gstreamer/ \
--gir-file ../Gir_GstVideo.toml ../gstreamer-video/ \
--gir-file ../Gir_GstWebRTC.toml ../gstreamer-webrtc/
- cd ..
- |
for crate in gstreamer*; do
echo '-->' $crate
(cd $crate && ../checker/check_init_asserts)
done
outdated:
extends: .img-stable
stage: 'extras'
variables:
GIT_SUBMODULE_STRATEGY: "none"
needs:
- job: 'build-stable'
artifacts: false
rules:
- if: $CI_PIPELINE_SOURCE == "schedule"
script:
- cargo update --color=always
# Ignore derive_more until we can depend on Rust 1.75 or newer
- cargo outdated --color=always --root-deps-only --exit-code 1 -v --ignore derive_more
coverage:
allow_failure: true
extends:
- '.cargo test'
- .img-stable
stage: 'extras'
needs:
- job: 'trigger'
artifacts: false
- job: 'build-stable'
artifacts: false
variables:
ALL_FEATURES: 'yes'
RUSTFLAGS: "-Cinstrument-coverage"
LLVM_PROFILE_FILE: "gstreamer-rs-%p-%m.profraw"
only:
- schedules
script:
- *cargo_test
# generate html and cobertura report for gitlab integration
- mkdir -p coverage
- grcov . --binary-path ./target/debug/ -s . -t html,cobertura --branch --ignore-not-existing --ignore "*target*" --ignore "*/sys/*" --ignore "examples/*" --ignore "tutorials/*" --ignore "*/build.rs" -o ./coverage/
# output coverage summary for gitlab parsing.
# TODO: use grcov once https://github.com/mozilla/grcov/issues/556 is fixed
- grep % coverage/html/index.html | head -1 ; true
artifacts:
paths:
- 'coverage'
reports:
coverage_report:
coverage_format: cobertura
path: "coverage/cobertura.xml"
- cargo outdated --root-deps-only --exit-code 1 -v
doc-stripping:
variables:
GIT_SUBMODULE_STRATEGY: recursive
extends: .img-nightly
tags:
- "gstreamer"
stage: 'extras'
needs:
- job: 'build-nightly'
artifacts: false
script:
- git submodule update --checkout
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --embed-docs
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --strip-docs
- git diff --quiet || (echo 'Files changed after running `rustdoc-stripper -s`, make sure all documentation is protected with `// rustdoc-stripper-ignore-next`!'; git diff; false)
regen-check:
variables:
GIT_SUBMODULE_STRATEGY: recursive
extends: .img-nightly
tags:
- "gstreamer"
stage: 'extras'
needs:
- job: 'build-nightly'
artifacts: false
script:
- git submodule update --checkout
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --yes
- git diff --quiet || (echo 'Files changed after running `generator.py`, make sure all submodules and generated files are in the correct version!'; git diff; false)
docs:
variables:
GIT_SUBMODULE_STRATEGY: recursive
extends: .img-nightly
stage: 'extras'
tags:
- "gstreamer"
needs:
- job: 'build-nightly'
artifacts: false
script:
- git submodule update --checkout
- curl --proto '=https' --tlsv1.2 -sSf -o gir-rustdoc.py
https://gitlab.gnome.org/World/Rust/gir-rustdoc/-/raw/main/gir-rustdoc.py
- chmod +x gir-rustdoc.py
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --embed-docs --no-fmt
- |
RUSTDOCFLAGS="$RUST_DOCS_FLAGS"
RUSTFLAGS="--cfg docsrs"
eval $(./gir-rustdoc.py pre-docs)
cargo +nightly doc --workspace --exclude examples --exclude tutorials --all-features --color=always --no-deps
- mv target/doc docs
artifacts:
paths:
- 'docs'
# https://docs.gitlab.com/ee/user/project/pages/#how-it-works
# GitLab automatically deploys the `public/` folder from an
# artifact generated by the job named `pages`. This step
# re-uses the docs from the build-test `docs` step above.
pages:
extends: .img-nightly
extends: .img-stable
stage: 'deploy'
needs: [ 'docs' ]
interruptible: false
script:
- curl --proto '=https' --tlsv1.2 -sSf -o gir-rustdoc.py
https://gitlab.gnome.org/World/Rust/gir-rustdoc/-/raw/main/gir-rustdoc.py
- chmod +x gir-rustdoc.py
- ./gir-rustdoc.py html-index
# development docs
- mkdir public/git
- mv docs public/git/docs
# stable docs
- ./gir-rustdoc.py docs-from-artifacts
- ls public/
- |
for crate in gstreamer*; do
cd $crate
cargo doc --features --features=dox,embed-lgpl-docs
cd ..
done
- mv target/doc public/
when: 'manual'
artifacts:
paths:
- 'public'
rules:
- if: ($CI_DEFAULT_BRANCH == $CI_COMMIT_BRANCH) && ($CI_PROJECT_NAMESPACE == $NAMESPACE)
when: 'manual'
.windows rust docker build:
stage: 'container-final'
timeout: '2h'
needs: []
variables:
# Unlike the buildah/linux jobs, this file
# needs to be relative to windows-docker/ subdir
# as it makes life easier in the powershell script
#
# We also don't need a CONTEXT_DIR var as its also
# hardcoded to be windows-docker/
DOCKERFILE: 'ci/windows-docker/Dockerfile'
tags:
- 'windows'
- 'shell'
- '2022'
- "gstreamer-windows"
script:
# We need to pass an array and to resolve the env vars, so we can't use a variable:
- $DOCKER_BUILD_ARGS = @("--build-arg", "DEFAULT_BRANCH=$GST_UPSTREAM_BRANCH", "--build-arg", "RUST_VERSION=$RUST_VERSION")
- "& ci/windows-docker/container.ps1 $CI_REGISTRY $CI_REGISTRY_USER $CI_REGISTRY_PASSWORD $RUST_IMAGE $RUST_UPSTREAM_IMAGE $DOCKERFILE"
- |
if (!($?)) {
echo "Failed to build the image"
Exit 1
}
windows rust docker stable:
extends: '.windows rust docker build'
variables:
RUST_IMAGE: !reference [variables, "WINDOWS_RUST_STABLE_IMAGE"]
RUST_UPSTREAM_IMAGE: !reference [variables, "WINDOWS_RUST_STABLE_UPSTREAM_IMAGE"]
RUST_VERSION: !reference [variables, "GST_RS_STABLE"]
windows rust docker msrv:
extends: '.windows rust docker build'
variables:
RUST_IMAGE: !reference [variables, "WINDOWS_RUST_MINIMUM_IMAGE"]
RUST_UPSTREAM_IMAGE: !reference [variables, "WINDOWS_RUST_MINIMUM_UPSTREAM_IMAGE"]
RUST_VERSION: !reference [variables, "GST_RS_MSRV"]
.msvc2019 build:
stage: 'test'
tags:
- 'docker'
- 'windows'
- '2022'
- "gstreamer-windows"
script:
# Skip -sys tests as they don't work
# https://github.com/gtk-rs/gtk3-rs/issues/54
#
# We need to build each crate separately to avoid crates like -egl,-wayland etc on windows
- cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 &&
powershell ./ci/run_windows_tests.ps1"
- |
if (!$?) {
Write-Host "Tests Failed!"
Exit 1
}
test windows msrv:
image: $WINDOWS_RUST_MINIMUM_IMAGE
needs:
- job: 'trigger'
artifacts: false
- job: 'windows rust docker msrv'
artifacts: false
extends: '.msvc2019 build'
test windows stable:
needs:
- job: 'trigger'
artifacts: false
- job: 'windows rust docker stable'
artifacts: false
image: "$WINDOWS_RUST_STABLE_IMAGE"
extends: '.msvc2019 build'

View file

@ -1,33 +0,0 @@
### Describe your issue
<!-- a clear and concise summary of the bug. -->
<!-- For any GStreamer usage question, please contact the community using the #gstreamer channel on IRC https://www.oftc.net/ or the mailing list on https://gstreamer.freedesktop.org/lists/ -->
#### Expected Behavior
<!-- What did you expect to happen -->
#### Observed Behavior
<!-- What actually happened -->
#### Setup
- **Operating System:**
- **Device:** Computer / Tablet / Mobile / Virtual Machine <!-- Delete as appropriate !-->
- **gstreamer-rs Version:**
- **GStreamer Version:**
- **Command line:**
### Steps to reproduce the bug
<!-- please fill in exact steps which reproduce the bug on your system, for example: -->
1. open terminal
2. type `command`
### How reproducible is the bug?
<!-- The reproducibility of the bug is Always/Intermittent/Only once after doing a very specific set of steps-->
### Screenshots if relevant
### Solutions you have tried
### Related non-duplicate issues
### Additional Information
<!-- Any other information such as logs. Make use of <details> for long output -->

9
.gitmodules vendored
View file

@ -1,12 +1,3 @@
[submodule "gir"]
path = gir
url = https://github.com/gtk-rs/gir
update = none
[submodule "gir-files"]
path = gir-files
url = https://github.com/gtk-rs/gir-files
update = none
[submodule "gst-gir-files"]
path = gst-gir-files
url = https://gitlab.freedesktop.org/gstreamer/gir-files-rs.git
update = none

View file

@ -1,23 +0,0 @@
The gstreamer-rs project is dual-licensed under Apache 2.0 and MIT terms, with
the exception of the sys crates which are licensed only under the terms of the
MIT license.
Copyrights in the gstreamer-rs project are retained by their contributors. No
copyright assignment is required to contribute to the gstreamer-rs project.
Some files include explicit copyright notices and/or license notices. For full
authorship information, see the version control history.
Except as otherwise noted (below and/or in individual files), gstreamer-rs is
licensed under the Apache License, Version 2.0 <LICENSE-APACHE> or
<http://www.apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT>
or <http://opensource.org/licenses/MIT>, at your option.
All the sys crates (e.g. gstreamer/sys and gstreamer-base/sys) are licensed
only under the terms of the MIT license.
This project provides interoperability with various GStreamer libraries but
doesn't distribute any parts of them. Distributing compiled libraries and
executables that link to those libraries may be subject to terms of the GNU
LGPL or other licenses. For more information check the license of each
GStreamer library.

3124
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,169 +1,25 @@
[workspace]
resolver = "2"
default-members = [
"gstreamer/sys",
"gstreamer-analytics/sys",
"gstreamer-app/sys",
"gstreamer-audio/sys",
"gstreamer-base/sys",
"gstreamer-check/sys",
"gstreamer-controller/sys",
"gstreamer-editing-services/sys",
"gstreamer-mpegts/sys",
"gstreamer-net/sys",
"gstreamer-pbutils/sys",
"gstreamer-play/sys",
"gstreamer-player/sys",
"gstreamer-rtp/sys",
"gstreamer-rtsp/sys",
"gstreamer-rtsp-server/sys",
"gstreamer-sdp/sys",
"gstreamer-tag/sys",
"gstreamer-video/sys",
"gstreamer-webrtc/sys",
"gstreamer",
"gstreamer-analytics",
"gstreamer-app",
"gstreamer-audio",
"gstreamer-base",
"gstreamer-check",
"gstreamer-controller",
"gstreamer-editing-services",
"gstreamer-mpegts",
"gstreamer-net",
"gstreamer-pbutils",
"gstreamer-play",
"gstreamer-player",
"gstreamer-rtp",
"gstreamer-rtsp",
"gstreamer-rtsp-server",
"gstreamer-sdp",
"gstreamer-tag",
"gstreamer-validate",
"gstreamer-video",
"gstreamer-webrtc",
"examples",
"tutorials",
]
members = [
"gstreamer/sys",
"gstreamer-analytics/sys",
"gstreamer-app/sys",
"gstreamer-audio/sys",
"gstreamer-base/sys",
"gstreamer-check/sys",
"gstreamer-controller/sys",
"gstreamer-editing-services/sys",
"gstreamer-gl/sys",
"gstreamer-gl/egl/sys",
"gstreamer-gl/wayland/sys",
"gstreamer-gl/x11/sys",
"gstreamer-mpegts/sys",
"gstreamer-net/sys",
"gstreamer-pbutils/sys",
"gstreamer-play/sys",
"gstreamer-player/sys",
"gstreamer-rtp/sys",
"gstreamer-rtsp/sys",
"gstreamer-rtsp-server/sys",
"gstreamer-sdp/sys",
"gstreamer-tag/sys",
"gstreamer-video/sys",
"gstreamer-webrtc/sys",
"gstreamer-allocators/sys",
"gstreamer",
"gstreamer-analytics",
"gstreamer-app",
"gstreamer-audio",
"gstreamer-base",
"gstreamer-check",
"gstreamer-controller",
"gstreamer-editing-services",
"gstreamer-gl",
"gstreamer-gl/egl",
"gstreamer-gl/wayland",
"gstreamer-gl/x11",
"gstreamer-mpegts",
"gstreamer-net",
"gstreamer-pbutils",
"gstreamer-play",
"gstreamer-player",
"gstreamer-rtp",
"gstreamer-rtsp",
"gstreamer-rtsp-server",
"gstreamer-sdp",
"gstreamer-tag",
"gstreamer-validate",
"gstreamer-video",
"gstreamer-pbutils",
"gstreamer-webrtc",
"gstreamer-allocators",
"gstreamer-utils",
"gstreamer-check",
"gstreamer-editing-services",
"gstreamer-gl",
"gstreamer-rtp",
"examples",
"tutorials",
"docs",
]
exclude = ["gir"]
[workspace.package]
version = "0.24.0"
categories = ["api-bindings", "multimedia"]
repository = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs"
homepage = "https://gstreamer.freedesktop.org"
edition = "2021"
rust-version = "1.71.1"
[workspace.dependencies]
gio = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
gio-sys = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
glib = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
glib-sys = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
gobject-sys = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
cairo-rs = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
pango = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
pangocairo = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
gstreamer-allocators-sys = { path = "gstreamer-allocators/sys" }
gstreamer-analytics-sys = { path = "gstreamer-analytics/sys" }
gstreamer-app-sys = { path = "gstreamer-app/sys" }
gstreamer-audio-sys = { path = "./gstreamer-audio/sys"}
gstreamer-base-sys = { path = "./gstreamer-base/sys"}
gstreamer-check-sys = { path = "./gstreamer-check/sys" }
gstreamer-controller-sys = { path = "./gstreamer-controller/sys" }
gstreamer-editing-services-sys = { path = "./gstreamer-editing-services/sys"}
gstreamer-gl-egl-sys = { path = "./gstreamer-gl/egl/sys"}
gstreamer-gl-wayland-sys = { path = "./gstreamer-gl/wayland/sys"}
gstreamer-gl-x11-sys = { path = "./gstreamer-gl/x11/sys"}
gstreamer-gl-sys = { path = "./gstreamer-gl/sys"}
gstreamer-mpegts-sys = { path = "./gstreamer-mpegts/sys"}
gstreamer-net-sys = { path = "./gstreamer-net/sys"}
gstreamer-pbutils-sys = { path = "./gstreamer-pbutils/sys"}
gstreamer-play-sys = { path = "./gstreamer-play/sys" }
gstreamer-player-sys = { path = "./gstreamer-player/sys" }
gstreamer-rtp-sys = { path = "./gstreamer-rtp/sys" }
gstreamer-rtsp-sys = { path = "./gstreamer-rtsp/sys"}
gstreamer-rtsp-server-sys = { path = "./gstreamer-rtsp-server/sys" }
gstreamer-sdp-sys = { path = "./gstreamer-sdp/sys"}
gstreamer-tag-sys = { path = "./gstreamer-tag/sys" }
gstreamer-sys = { path = "./gstreamer/sys"}
gstreamer-validate-sys = { path = "./gstreamer-validate/sys" }
gstreamer-video-sys = { path = "./gstreamer-video/sys"}
gstreamer-webrtc-sys = { path = "./gstreamer-webrtc/sys" }
ges = { package = "gstreamer-editing-services", path = "./gstreamer-editing-services" }
gst = { package = "gstreamer", path = "./gstreamer" }
gst-allocators = { package = "gstreamer-allocators", path = "./gstreamer-allocators" }
gst-app = { package = "gstreamer-app", path = "./gstreamer-app" }
gst-audio = { package = "gstreamer-audio", path = "./gstreamer-audio" }
gst-base = { package = "gstreamer-base", path = "./gstreamer-base" }
gst-check = { package = "gstreamer-check", path = "./gstreamer-check" }
gst-gl = { package = "gstreamer-gl", path = "./gstreamer-gl" }
gst-gl-egl = { package = "gstreamer-gl-egl", path = "./gstreamer-gl/egl" }
gst-gl-x11 = { package = "gstreamer-gl-x11", path = "./gstreamer-gl/x11" }
gst-net = { package = "gstreamer-net", path = "./gstreamer-net" }
gst-pbutils = { package = "gstreamer-pbutils", path = "./gstreamer-pbutils" }
gst-play = { package = "gstreamer-play", path = "./gstreamer-play" }
gst-player = { package = "gstreamer-player", path = "./gstreamer-player" }
gst-rtsp = { package = "gstreamer-rtsp", path = "./gstreamer-rtsp" }
gst-rtsp-server = { package = "gstreamer-rtsp-server", path = "./gstreamer-rtsp-server" }
gst-sdp = { package = "gstreamer-sdp", path = "./gstreamer-sdp" }
gst-video = { package = "gstreamer-video", path = "./gstreamer-video" }

View file

@ -1,8 +1,7 @@
# How to update the bindings
* Take the updated .gir files (e.g. from your gst-build checkout) and put
them in the gir-files directory
* In the gir-files directory, run ./fix.sh
* Make sure gstreamer-rs-sys is up to date
* Take the updated .gir files from gstreamer-rs-sys and copy them over
* If there is a new GStreamer version: Manually update `gst*/Cargo.toml`
* Run generator.py
* Investigate the diff, fix any mess-ups, look at commented functions and

File diff suppressed because it is too large Load diff

View file

@ -1,74 +1,37 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstApp"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.8"
target_path = "gstreamer-app"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"GLib",
"GObject",
"Gst",
"GstBase",
]
generate = [
"GstApp.AppLeakyType",
"GstApp.AppStreamType",
]
manual = [
"GObject.Object",
"Gst.Element",
"Gst.Format",
"Gst.MiniObject",
"Gst.Object",
"Gst.Element",
"Gst.URIHandler",
"Gst.FlowReturn",
"Gst.Format",
"GstBase.BaseSrc",
"GstBase.BaseSink",
]
[[object]]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.BufferList"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "Option"
[[object]]
name = "Gst.FlowReturn"
status = "manual"
must_use = true
[object.conversion_type]
variant = "Result"
ok_type = "gst::FlowSuccess"
err_type = "gst::FlowError"
[[object]]
name = "Gst.Sample"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstApp.AppSink"
status = "generate"
@ -84,11 +47,6 @@ final_type = true
# Action signal
ignore = true
[[object.signal]]
name = "pull-object"
# Action signal
ignore = true
[[object.signal]]
name = "try-pull-sample"
# Action signal
@ -99,50 +57,25 @@ final_type = true
# Action signal
ignore = true
[[object.signal]]
name = "try-pull-object"
# Action signal
ignore = true
[[object.signal]]
name = "propose-allocation"
# Action signal
ignore = true
[[object.property]]
name = "emit-signals"
# Use callbacks instead
ignore = true
[[object.function]]
name = "get_emit_signals"
# Use callbacks instead
ignore = true
[[object.function]]
name = "set_emit_signals"
# Use callbacks instead
ignore = true
[[object.signal]]
name = "new-sample"
# Use callbacks instead
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.signal]]
name = "new-preroll"
# Use callbacks instead
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.signal]]
name = "eos"
# Use callbacks instead
ignore = true
concurrency = "send"
[[object.signal]]
name = "new-serialized-event"
# Use callbacks instead
ignore = true
[[object.function]]
name = "set_caps"
[[object.function.parameter]]
name = "caps"
nullable = true
[[object.function]]
name = "pull_preroll"
@ -154,16 +87,21 @@ final_type = true
[object.function.return]
nullable_return_is_error = "Failed to pull sample"
[[object.function]]
name = "pull_object"
[object.function.return]
nullable_return_is_error = "Failed to pull object"
[[object]]
name = "GstApp.AppSrc"
status = "generate"
final_type = true
[[object.function]]
name = "push_buffer"
# Pass by value
ignore = true
[[object.function]]
name = "push_buffer_list"
# Pass by value
ignore = true
[[object.signal]]
name = "end-of-stream"
# Action signal
@ -184,42 +122,58 @@ final_type = true
# Action signal
ignore = true
[[object.property]]
name = "emit-signals"
# Use callbacks instead
[[object.function]]
name = "end_of_stream"
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.function]]
name = "get_emit_signals"
# Use callbacks instead
ignore = true
[[object.function]]
name = "set_emit_signals"
# Use callbacks instead
ignore = true
[[object.signal]]
name = "enough-data"
# Use callbacks instead
ignore = true
[[object.signal]]
name = "need-data"
# Use callbacks instead
ignore = true
[[object.signal]]
name = "seek-data"
# Use callbacks instead
name = "push_sample"
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.function]]
name = "set_latency"
# ClockTime
manual = true
ignore = true
[[object.function]]
name = "get_latency"
# ClockTime
manual = true
ignore = true
[[object.function]]
name = "set_caps"
[[object.function.parameter]]
name = "caps"
nullable = true
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.BufferList"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Sample"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"

255
Gir_GstAudio.toml Normal file
View file

@ -0,0 +1,255 @@
[options]
girs_dir = "gir-files"
library = "GstAudio"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-audio"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
external_libraries = [
"GLib",
"GObject",
"Gst",
"GstBase",
]
generate = [
"GstAudio.AudioFormatFlags",
"GstAudio.AudioLayout",
"GstAudio.AudioChannelPosition",
"GstAudio.StreamVolume",
"GstAudio.StreamVolumeFormat",
"GstAudio.AudioSink",
"GstAudio.AudioSrc",
"GstAudio.AudioBaseSink",
"GstAudio.AudioBaseSrc",
]
manual = [
"GObject.Object",
"Gst.Object",
"Gst.Element",
"Gst.Allocator",
"Gst.AllocationParams",
"Gst.TagList",
"Gst.TagMergeMode",
"GstBase.BaseSink",
"GstBase.BaseSrc",
"GstAudio.AudioInfo",
"GstAudio.AudioFormatInfo",
]
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "GstAudio.AudioFormat"
status = "generate"
[[object.derive]]
name = "Debug, Eq, PartialEq, Hash"
[[object.member]]
name = "s16"
# Platform dependant
ignore = true
[[object.member]]
name = "u16"
# Platform dependant
ignore = true
[[object.member]]
name = "s24_32"
# Platform dependant
ignore = true
[[object.member]]
name = "u24_32"
# Platform dependant
ignore = true
[[object.member]]
name = "s32"
# Platform dependant
ignore = true
[[object.member]]
name = "u32"
# Platform dependant
ignore = true
[[object.member]]
name = "s24"
# Platform dependant
ignore = true
[[object.member]]
name = "u24"
# Platform dependant
ignore = true
[[object.member]]
name = "s20"
# Platform dependant
ignore = true
[[object.member]]
name = "u20"
# Platform dependant
ignore = true
[[object.member]]
name = "s18"
# Platform dependant
ignore = true
[[object.member]]
name = "u18"
# Platform dependant
ignore = true
[[object.member]]
name = "f32"
# Platform dependant
ignore = true
[[object.member]]
name = "f64"
# Platform dependant
ignore = true
[[object]]
name = "GstAudio.AudioStreamAlign"
status = "generate"
[[object.function]]
name = "process"
# bool does not signal error
ignore = true
[[object.function]]
pattern = "get_.*"
[[object.function.parameter]]
name = "align"
const = true
[[object]]
name = "GstAudio.AudioDecoder"
status = "generate"
manual_traits = ["AudioDecoderExtManual"]
[[object.function]]
name = "finish_frame"
ignore = true
[[object.function]]
name = "finish_subframe"
ignore = true
[[object.function]]
name = "negotiate"
ignore = true
[[object.function]]
name = "set_output_caps"
ignore = true
[[object.function]]
name = "set_output_format"
ignore = true
[[object.function]]
name = "get_allocator"
ignore = true
[[object.function]]
name = "proxy_getcaps"
[object.function.return]
nullable = false
[[object.function]]
name = "allocate_output_buffer"
[object.function.return]
nullable_return_is_error = "Failed to allocate output buffer"
[[object]]
name = "GstAudio.AudioEncoder"
status = "generate"
manual_traits = ["AudioEncoderExtManual"]
[[object.function]]
name = "finish_frame"
ignore = true
[[object.function]]
name = "negotiate"
ignore = true
[[object.function]]
name = "set_output_format"
ignore = true
[[object.function]]
name = "get_allocator"
ignore = true
[[object.function]]
name = "get_latency"
ignore = true
[[object.function]]
name = "proxy_getcaps"
[object.function.return]
nullable = false
[[object.function]]
name = "allocate_output_buffer"
[object.function.return]
nullable_return_is_error = "Failed to allocate output buffer"
[[object]]
name = "GstAudio.AudioRingBufferFormatType"
status = "generate"
[[object.member]]
name = "mpeg2_aac_raw"
version = "1.12"
[[object.member]]
name = "mpeg4_aac_raw"
version = "1.12"
[[object.member]]
name = "flac"
version = "1.12"
[[object]]
name = "GstAudio.AudioFlags"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object]]
name = "GstAudio.AudioPackFlags"
status = "generate"
[[object.member]]
name = "none"
ignore = true

View file

@ -1,14 +1,14 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstBase"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.8"
target_path = "gstreamer-base"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"GLib",
@ -17,84 +17,197 @@ external_libraries = [
]
generate = [
"GstBase.AggregatorStartTimeSelection",
"GstBase.PushSrc",
"GstBase.AggregatorStartTimeSelection",
]
manual = [
"GLib.Bytes",
"GObject.Object",
"Gst.AllocationParams",
"Gst.Allocator",
"Gst.BufferFlags",
"GLib.Bytes",
"Gst.Object",
"Gst.Element",
"Gst.BufferPool",
"Gst.ClockTimeDiff",
"Gst.Element",
"Gst.EventType",
"Gst.ClockReturn",
"Gst.FlowReturn",
"Gst.Format",
"Gst.Memory",
"Gst.Object",
"Gst.Pad",
"Gst.Segment",
"Gst.State",
"Gst.StateChangeReturn",
"Gst.TagMergeMode",
"Gst.TypeFindProbability",
"Gst.TagMergeMode",
"GstBase.BaseParseFrame",
]
[[object]]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
name = "GstBase.Adapter"
status = "generate"
final_type = true
concurrency = "send-unique"
[[object.function]]
name = "map"
# Unsafe
ignore = true
[[object.function]]
name = "unmap"
# Unsafe
ignore = true
[[object.function]]
name = "copy"
# Unsafe
ignore = true
[[object.function]]
name = "push"
# Move Buffer
ignore = true
[[object.function]]
name = "take"
# Useless copying of data
ignore = true
[[object.function]]
name = "copy_bytes"
[object.function.return]
nullable_return_is_error = "Failed to copy bytes"
[[object.function]]
name = "get_buffer"
[object.function.return]
nullable_return_is_error = "Failed to get buffer"
[[object.function]]
name = "get_buffer_fast"
[object.function.return]
nullable_return_is_error = "Failed to get buffer"
[[object.function]]
name = "get_buffer_list"
[object.function.return]
nullable_return_is_error = "Failed to get buffer list"
[[object.function]]
name = "take_buffer"
[object.function.return]
nullable_return_is_error = "Failed to take buffer"
[[object.function]]
name = "take_buffer_fast"
[object.function.return]
nullable_return_is_error = "Failed to take buffer"
[[object.function]]
name = "take_buffer_list"
[object.function.return]
nullable_return_is_error = "Failed to take buffer list"
[[object]]
name = "Gst.BufferList"
name = "GstBase.FlowCombiner"
# Manual because ref/unref functions were added much later
status = "manual"
ref_mode = "ref"
final_type = true
concurrency = "none"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
name = "GstBase.BaseSink"
status = "generate"
manual_traits = ["BaseSinkExtManual"]
[[object.function]]
name = "wait"
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.function]]
name = "wait_preroll"
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.function]]
name = "wait_clock"
# Use Result<ClockSuccess, ClockError>
ignore = true
[[object.function]]
name = "query_latency"
# Use Result
ignore = true
[[object]]
name = "Gst.ClockReturn"
status = "manual"
must_use = true
[object.conversion_type]
variant = "Result"
ok_type = "gst::ClockSuccess"
err_type = "gst::ClockError"
name = "GstBase.BaseSrc"
status = "generate"
manual_traits = ["BaseSrcExtManual"]
[[object.function]]
name = "set_caps"
[object.function.return]
bool_return_is_error = "Failed to set caps"
[[object.function]]
name = "start_complete"
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.function]]
name = "start_wait"
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.function]]
name = "submit_buffer_list"
# Pass by value, to be added manually
ignore = true
[[object.function]]
name = "wait_playing"
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.function]]
name = "query_latency"
# Use Result
ignore = true
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "Option"
name = "GstBase.BaseTransform"
status = "generate"
manual_traits = ["BaseTransformExtManual"]
[[object.function]]
name = "update_src_caps"
[object.function.return]
bool_return_is_error = "Failed to update src caps"
[[object]]
name = "Gst.FlowReturn"
status = "manual"
must_use = true
[object.conversion_type]
variant = "Result"
ok_type = "gst::FlowSuccess"
err_type = "gst::FlowError"
name = "GstBase.Aggregator"
status = "generate"
manual_traits = ["AggregatorExtManual"]
[[object.function]]
name = "finish_buffer"
# Takes ownership
ignore = true
[[object.property]]
name = "latency"
version = "1.14"
[[object.property]]
name = "start-time-selection"
version = "1.18"
[[object.property]]
name = "min-upstream-latency"
# clock time instead of u64
ignore = true
[[object]]
name = "Gst.Sample"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.TagList"
status = "manual"
ref_mode = "ref"
name = "GstBase.AggregatorPad"
status = "generate"
manual_traits = ["AggregatorPadExtManual"]
[[object]]
name = "GstBase.*"
@ -107,37 +220,22 @@ status = "generate"
[[object.function]]
name = "type_find_helper_for_data"
# broken return value
manual = true
ignore = true
[[object.function]]
name = "type_find_helper_for_data_with_extension"
# broken return value
manual = true
ignore = true
[[object.function]]
name = "type_find_helper_for_buffer"
# broken return value
manual = true
ignore = true
[[object.function]]
name = "type_find_helper_for_buffer_with_extension"
# broken return value
manual = true
[[object.function]]
name = "type_find_helper_for_buffer_with_caps"
# broken return value
manual = true
[[object.function]]
name = "type_find_helper_for_data_with_caps"
# broken return value
manual = true
[[object.function]]
name = "type_find_list_factories_for_caps"
# broken return value
manual = true
ignore = true
[[object.function]]
name = "type_find_helper"
@ -150,125 +248,59 @@ status = "generate"
nullable_return_is_error = "Could not find type"
[[object]]
name = "GstBase.Adapter"
status = "generate"
final_type = true
concurrency = "none"
[[object.function]]
name = "map"
# Unsafe, implemented on `UniqueAdapter`
ignore = true
[[object.function]]
name = "unmap"
# Unsafe
ignore = true
[[object.function]]
pattern = "copy.*"
# Unsafe
manual = true
[[object.function]]
name = "push"
# Move Buffer
manual = true
[[object.function]]
pattern = "take.*"
# Unsafe
ignore = true
[[object.function]]
pattern = "get.*"
# Unsafe
ignore = true
[[object.function]]
pattern = "masked.*"
# Unsafe
ignore = true
[[object.function]]
name = "flush"
# Unsafe Buffer
manual = true
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"
[[object]]
name = "GstBase.Aggregator"
status = "generate"
manual_traits = ["AggregatorExtManual"]
[[object.function]]
name = "update_segment"
# Takes FormattedValue as argument
manual = true
[[object.function]]
name = "get_allocator"
manual = true
[[object.function]]
name = "selected_samples"
# Info parameter
manual = true
[[object.function]]
name = "set_latency"
[[object.function.parameter]]
name = "min_latency"
mandatory = true
[[object.property]]
name = "start-time-selection"
version = "1.18"
[[object.property]]
name = "min-upstream-latency"
# clock time instead of u64
manual = true
[[object.signal]]
name = "samples-selected"
# StructureRef instead of Structure
manual = true
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstBase.AggregatorPad"
status = "generate"
manual_traits = ["AggregatorPadExtManual"]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.BufferList"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Sample"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "Gst.TagList"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstBase.BaseParse"
status = "generate"
manual_traits = ["BaseParseExtManual"]
[[object.function]]
name = "add_index_entry"
[[object.function.parameter]]
name = "ts"
mandatory = true
[[object.function]]
name = "finish_frame"
manual = true
ignore = true
[[object.function]]
name = "set_duration"
manual = true
[[object.function]]
name = "set_latency"
[[object.function.parameter]]
name = "min_latency"
mandatory = true
ignore = true
[[object.function]]
name = "convert_default"
manual = true
ignore = true
[[object.function]]
name = "set_frame_rate"
manual = true
ignore = true
# Didn't bind gst_base_parse_frame_{new,init} so not needed
[[object.function]]
@ -281,146 +313,3 @@ status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object]]
name = "GstBase.BaseSink"
status = "generate"
manual_traits = ["BaseSinkExtManual"]
[[object.function]]
name = "get_latency"
[object.function.return]
mandatory = true
[[object.function]]
name = "get_processing_deadline"
[object.function.return]
mandatory = true
[[object.function]]
name = "get_render_delay"
[object.function.return]
mandatory = true
[[object.function]]
name = "query_latency"
# Use Result
manual = true
[[object.function]]
name = "is_qos_enabled"
# Same as the qos property
ignore = true
[[object.function]]
name = "set_qos_enabled"
# Same as the qos property
ignore = true
[[object.function]]
name = "is_async_enabled"
# Same as the async property
ignore = true
[[object.function]]
name = "set_async_enabled"
# Same as the async property
ignore = true
[[object.function]]
name = "is_last_sample_enabled"
# Same as the enable-last-sample property
ignore = true
[[object.function]]
name = "set_last_sample_enabled"
# Same as the enable-last-sample property
ignore = true
[[object.function]]
name = "get_drop_out_of_segment"
rename = "drops_out_of_segment"
[[object.function]]
name = "set_processing_deadline"
[[object.function.parameter]]
name = "processing_deadline"
mandatory = true
[[object.function]]
name = "set_render_delay"
[[object.function.parameter]]
name = "delay"
mandatory = true
[[object.function]]
name = "wait_clock"
[[object.function.parameter]]
name = "time"
mandatory = true
[[object]]
name = "GstBase.BaseSrc"
status = "generate"
manual_traits = ["BaseSrcExtManual"]
[[object.function]]
name = "get_allocator"
manual = true
[[object.function]]
name = "set_caps"
[object.function.return]
bool_return_is_error = "Failed to set caps"
[[object.function]]
name = "submit_buffer_list"
# Pass by value, to be added manually
ignore = true
[[object.function]]
name = "query_latency"
# Use Result
manual = true
[[object.function]]
name = "new_segment"
[object.function.return]
bool_return_is_error = "Failed to update segment"
[[object]]
name = "GstBase.BaseTransform"
status = "generate"
manual_traits = ["BaseTransformExtManual"]
[[object.function]]
name = "get_allocator"
manual = true
[[object.function]]
name = "update_qos"
[[object.function.parameter]]
name = "timestamp"
mandatory = true
[[object.function]]
name = "update_src_caps"
[object.function.return]
bool_return_is_error = "Failed to update src caps"
[[object.function]]
name = "is_qos_enabled"
# Same as the qos property
ignore = true
[[object.function]]
name = "set_qos_enabled"
# Same as the qos property
ignore = true
[[object]]
name = "GstBase.FlowCombiner"
# Manual because ref/unref functions were added much later
status = "manual"
final_type = true
concurrency = "none"

39
Gir_GstCheck.toml Normal file
View file

@ -0,0 +1,39 @@
[options]
girs_dir = "gir-files"
library = "GstCheck"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-check"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
external_libraries = [
"GLib",
"GObject",
"Gst",
]
generate = [
]
manual = [
"GObject.Object",
"Gst.Object",
"Gst.Clock",
"Gst.ClockTimeDiff",
"Gst.ClockType",
"GstCheck.Harness",
]
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "GstCheck.TestClock"
status = "generate"
final_type = true

View file

@ -1,188 +1,87 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GES"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.8"
target_path = "gstreamer-editing-services"
work_mode = "normal"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"Gio",
"GLib",
"GObject",
"Gio",
"Gst",
"GstPbutils",
]
manual = [
"GES.FrameCompositionMeta",
"Gio.AsyncReadyCallback",
"Gio.Cancellable",
"GLib.Date",
"GLib.DateTime",
"GLib.Error",
"GLib.Source",
"GLib.Type",
"GLib.DateTime",
"Gio.Cancellable",
"Gio.AsyncReadyCallback",
"GObject.Object",
"GObject.ParamSpec",
"GObject.Value",
"Gst.Bin",
"Gst.ChildProxy",
"Gst.ControlBinding",
"Gst.ControlSource",
"Gst.CoreError",
"Gst.DateTime",
"Gst.Element",
"Gst.ElementFactory",
"Gst.EventType",
"Gst.Pad",
"Gst.Pipeline",
"Gst.Segment",
"Gst.State",
"Gst.StaticCaps",
"Gst.StaticPadTemplate",
"GstPbutils.EncodingProfile",
"GstPbutils.DiscovererInfo",
"GstPbutils.DiscovererStreamInfo",
"GstPbutils.EncodingProfile",
"Gst.Object",
"Gst.Element",
"Gst.Pad",
"Gst.Pipeline",
]
generate = [
"GES.AssetLoadingReturn",
"GES.AudioSource",
"GES.AudioTestSource",
"GES.AudioTrack",
"GES.AudioTransition",
"GES.AudioUriSource",
"GES.BaseEffectClip",
"GES.BaseTransitionClip",
"GES.BaseXmlFormatter",
"GES.ChildrenControlMode",
"GES.CommandLineFormatter",
"GES.Edge",
"GES.EditMode",
"GES.EffectClip",
"GES.Error",
"GES.ExtractableCheckId",
"GES.Formatter",
"GES.FrameNumber",
"GES.Group",
"GES.ImageSource",
"GES.Marker",
"GES.MarkerFlags",
"GES.MarkerList",
"GES.MetaContainer",
"GES.MetaFlag",
"GES.MultiFileSource",
"GES.Operation",
"GES.OperationClip",
"GES.OverlayClip",
"GES.PipelineFlags",
"GES.Source",
"GES.SourceClip",
"GES.TestClip",
"GES.TextHAlign",
"GES.TextOverlay",
"GES.TextOverlayClip",
"GES.TextVAlign",
"GES.TitleSource",
"GES.Edge",
"GES.FrameNumber",
"GES.TrackType",
"GES.Transition",
"GES.TransitionClip",
"GES.VideoSource",
"GES.VideoStandardTransitionType",
"GES.VideoTestPattern",
"GES.VideoTestSource",
"GES.VideoTrack",
"GES.VideoTransition",
"GES.VideoUriSource",
"GES.XmlFormatter",
"GES.Group",
"GES.UriClipAsset",
"GES.UriSourceAsset",
]
[[object]]
name = "GES.Asset"
status = "generate"
concurrency = "send+sync"
[[object.function]]
name = "set_proxy"
[object.function.return]
bool_return_is_error = "Failed to set proxy"
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"
[[object.function]]
name = "unproxy"
[object.function.return]
bool_return_is_error = "Failed to unproxy asset"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.BufferList"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Sample"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "GES.BaseEffect"
status = "generate"
[[object.function]]
name = "set_time_translation_funcs"
# Pointers and HashTables
ignore = true
[[object]]
name = "GES.Clip"
status = "generate"
[[object.function]]
name = "get_duration_limit"
[object.function.return]
mandatory = true
[[object.function]]
name = "move_to_layer"
[object.function.return]
bool_return_is_error = "Failed to move clip to specified layer"
[[object.function]]
name = "set_top_effect_index"
[object.function.return]
bool_return_is_error = "Failed to move effect"
[[object.function]]
name = "set_top_effect_priority"
[object.function.return]
bool_return_is_error = "Failed to the set top effect priority"
[[object.function]]
name = "add_asset"
[object.function.return]
nullable_return_is_error = "Failed to add asset"
[[object.function]]
name = "split"
[object.function.return]
nullable_return_is_error = "Failed to split clip"
[[object]]
name = "GES.ClipAsset"
status = "generate"
concurrency = "send+sync"
[[object]]
name = "GES.Container"
status = "generate"
trait_name = "GESContainerExt"
[[object.function]]
name = "add"
[object.function.return]
bool_return_is_error = "Failed to add element"
[[object.function]]
name = "remove"
[object.function.return]
bool_return_is_error = "Failed to remove element"
[[object.function]]
name = "edit"
[object.function.return]
bool_return_is_error = "Failed to edit container"
[[object]]
name = "GES.DiscovererManager"
status = "generate"
# Actually available since 1.2 even if all functions are new in 1.18
version = "1.2"
[[object]]
name = "GES.Effect"
@ -193,96 +92,20 @@ status = "generate"
nullable_return_is_error = "Failed to create effect from description"
[[object]]
name = "GES.EffectAsset"
status = "generate"
concurrency = "send+sync"
[[object]]
name = "GES.Extractable"
name = "GES.UriClip"
status = "generate"
[[object.function]]
name = "set_asset"
name = "new"
[object.function.return]
bool_return_is_error = "Failed to set asset"
[[object]]
name = "GES.Layer"
status = "generate"
[[object.function]]
name = "add_clip"
[object.function.return]
bool_return_is_error = "Failed to add clip"
[[object.function]]
name = "get_duration"
[object.function.return]
mandatory = true
[[object.function]]
name = "remove_clip"
[object.function.return]
bool_return_is_error = "Failed to remove clip"
[[object.function]]
name = "add_asset"
[object.function.return]
nullable_return_is_error = "Failed to add asset"
[[object]]
name = "GES.Pipeline"
status = "generate"
trait_name = "GESPipelineExt"
[[object.function]]
name = "set_render_settings"
[object.function.return]
bool_return_is_error = "Failed to set render settings"
[[object.function]]
name = "set_mode"
[object.function.return]
bool_return_is_error = "Failed to set mode"
[[object.function]]
name = "save_thumbnail"
[object.function.return]
bool_return_is_error = "Failed to save thumbnail"
[[object.function]]
name = "set_timeline"
[object.function.return]
bool_return_is_error = "Failed to set timeline"
[[object]]
name = "GES.Project"
status = "generate"
[[object.function]]
name = "add_encoding_profile"
[object.function.return]
bool_return_is_error = "Failed to add profile"
[[object.function]]
name = "remove_asset"
[object.function.return]
bool_return_is_error = "Failed to remove asset"
[[object]]
name = "GES.SourceClipAsset"
status = "generate"
concurrency = "send+sync"
nullable_return_is_error = "Failed to create Uri clip from Uri"
[[object]]
name = "GES.Timeline"
status = "generate"
[[object.function]]
name = "get_duration"
name = "append_layer"
[object.function.return]
mandatory = true
[[object.function]]
name = "get_frame_at"
[[object.function.parameter]]
name = "timestamp"
mandatory = true
nullable = false
[[object.function]]
name = "load_from_uri"
@ -309,143 +132,53 @@ status = "generate"
[object.function.return]
bool_return_is_error = "Failed to move layer"
[[object.function]]
name = "paste_element"
[[object.function.parameter]]
name = "position"
mandatory = true
[[object.function]]
name = "remove_track"
[object.function.return]
bool_return_is_error = "Failed to remove track"
[[object]]
name = "GES.Container"
status = "generate"
trait_name = "GESContainerExt"
[[object.function]]
name = "set_snapping_distance"
[[object.function.parameter]]
name = "snapping_distance"
mandatory = true
name = "add"
[object.function.return]
bool_return_is_error = "Failed to add element"
[[object.function]]
name = "remove"
[object.function.return]
bool_return_is_error = "Failed to remove element"
[[object.function]]
name = "edit"
[object.function.return]
bool_return_is_error = "Failed to edit container"
[[object]]
name = "GES.TimelineElement"
name = "GES.Pipeline"
status = "generate"
manual_traits = ["TimelineElementExtManual"]
trait_name = "GESPipelineExt"
[[object.function]]
name = "get_duration"
name = "set_render_settings"
[object.function.return]
mandatory = true
bool_return_is_error = "Failed to set render settings"
[[object.function]]
name = "get_inpoint"
name = "set_mode"
[object.function.return]
mandatory = true
bool_return_is_error = "Failed to set mode"
[[object.function]]
name = "get_start"
name = "save_thumbnail"
[object.function.return]
mandatory = true
[[object.function]]
name = "paste"
[[object.function.parameter]]
name = "paste_position"
mandatory = true
[object.function.return]
nullable_return_is_error = "Failed to paste timeline element"
[[object.function]]
name = "ripple"
[[object.function.parameter]]
name = "start"
mandatory = true
[object.function.return]
bool_return_is_error = "Failed to ripple"
[[object.function]]
name = "ripple_end"
[[object.function.parameter]]
name = "end"
mandatory = true
[object.function.return]
bool_return_is_error = "Failed to ripple"
[[object.function]]
name = "roll_end"
[[object.function.parameter]]
name = "end"
mandatory = true
[object.function.return]
bool_return_is_error = "Failed to roll"
[[object.function]]
name = "roll_start"
[[object.function.parameter]]
name = "start"
mandatory = true
[object.function.return]
bool_return_is_error = "Failed to roll"
[[object.function]]
name = "set_inpoint"
[[object.function.parameter]]
name = "inpoint"
mandatory = true
[[object.function]]
name = "set_name"
[object.function.return]
bool_return_is_error = "Failed to set name"
[[object.function]]
name = "add_child_property"
[object.function.return]
bool_return_is_error = "Failed to add child property"
[[object.function]]
name = "remove_child_property"
[object.function.return]
bool_return_is_error = "Failed to remove child property"
[[object.function]]
name = "set_child_property"
[object.function.return]
bool_return_is_error = "Failed to set child property"
[[object.function]]
name = "set_parent"
[object.function.return]
bool_return_is_error = "`TimelineElement` already had a parent or its parent was the same as specified"
[[object.function]]
name = "set_start"
[[object.function.parameter]]
name = "start"
mandatory = true
bool_return_is_error = "Failed to save thumbnail"
[[object.function]]
name = "set_timeline"
[object.function.return]
bool_return_is_error = "`Failed to set timeline"
[[object.function]]
name = "trim"
[[object.function.parameter]]
name = "start"
mandatory = true
[object.function.return]
bool_return_is_error = "Failed to trim"
[[object]]
name = "GES.TitleClip"
status = "generate"
[[object.function]]
pattern = "[^n].*"
# all functions except constructor are deprecated since 1.6
ignore = true
[[object.property]]
pattern = ".*"
# all properties are deprecated since 1.6
ignore = true
bool_return_is_error = "Failed to set timeline"
[[object]]
name = "GES.Track"
@ -461,6 +194,141 @@ trait_name = "GESTrackExt"
[object.function.return]
bool_return_is_error = "Failed to remove element"
[[object]]
name = "GES.Asset"
status = "generate"
[[object.function]]
name = "set_proxy"
[object.function.return]
bool_return_is_error = "Failed to set proxy"
[[object.function]]
name = "unproxy"
[object.function.return]
bool_return_is_error = "Failed to unproxy asset"
[[object]]
name = "GES.Clip"
status = "generate"
[[object.function]]
name = "move_to_layer"
[object.function.return]
bool_return_is_error = "Failed to move clip to specified layer"
[[object.function]]
name = "set_top_effect_index"
[object.function.return]
bool_return_is_error = "Failed to move effect"
[[object.function]]
name = "set_top_effect_priority"
[object.function.return]
bool_return_is_error = "Failed to the set top effect priority"
[[object.function]]
name = "add_asset"
[object.function.return]
nullable_return_is_error = "Failed to add asset"
[[object.function]]
name = "split"
[object.function.return]
nullable_return_is_error = "Failed to split clip"
[[object]]
name = "GES.Extractable"
status = "generate"
[[object.function]]
name = "set_asset"
[object.function.return]
bool_return_is_error = "Failed to set asset"
[[object]]
name = "GES.Layer"
status = "generate"
[[object.function]]
name = "add_clip"
[object.function.return]
bool_return_is_error = "Failed to add clip"
[[object.function]]
name = "remove_clip"
[object.function.return]
bool_return_is_error = "Failed to remove clip"
[[object.function]]
name = "add_asset"
[object.function.return]
nullable_return_is_error = "Failed to add asset"
[[object]]
name = "GES.Project"
status = "generate"
[[object.function]]
name = "add_encoding_profile"
[object.function.return]
bool_return_is_error = "Failed to add profile"
[[object.function]]
name = "remove_asset"
[object.function.return]
bool_return_is_error = "Failed to remove asset"
[[object]]
name = "GES.TimelineElement"
status = "generate"
manual_traits = ["TimelineElementExtManual"]
[[object.function]]
name = "ripple"
[object.function.return]
bool_return_is_error = "Failed to ripple"
[[object.function]]
name = "ripple_end"
[object.function.return]
bool_return_is_error = "Failed to ripple"
[[object.function]]
name = "roll_end"
[object.function.return]
bool_return_is_error = "Failed to roll"
[[object.function]]
name = "roll_start"
[object.function.return]
bool_return_is_error = "Failed to roll"
[[object.function]]
name = "set_name"
[object.function.return]
bool_return_is_error = "Failed to set name"
[[object.function]]
name = "set_parent"
[object.function.return]
bool_return_is_error = "`TimelineElement` already had a parent or its parent was the same as specified"
[[object.function]]
name = "set_timeline"
[object.function.return]
bool_return_is_error = "`Failed to set timeline"
[[object.function]]
name = "trim"
[object.function.return]
bool_return_is_error = "Failed to trim"
[[object.function]]
name = "copy"
[object.function.return]
nullable_return_is_error = "Failed to copy timeline element"
[[object.function]]
name = "paste"
[object.function.return]
nullable_return_is_error = "Failed to paste timeline element"
[[object]]
name = "GES.TrackElement"
status = "generate"
@ -473,85 +341,3 @@ status = "generate"
name = "remove_control_binding"
[object.function.return]
bool_return_is_error = "Failed to remove control binding"
[[object.function]]
name = "set_child_property"
[[object.function.parameter]]
name = "value"
const = true
[object.function.return]
bool_return_is_error = "Failed to set child property"
[[object.function]]
name = "set_child_property_by_pspec"
[[object.function.parameter]]
name = "value"
const = true
[[object]]
name = "GES.TrackElementAsset"
status = "generate"
concurrency = "send+sync"
[[object]]
name = "GES.UriClip"
status = "generate"
[[object.function]]
name = "new"
[object.function.return]
nullable_return_is_error = "Failed to create Uri clip from Uri"
[[object]]
name = "GES.UriClipAsset"
status = "generate"
concurrency = "send+sync"
[[object.function]]
name = "new"
# broken finish function in < 1.16
manual = true
[[object]]
name = "GES.UriSourceAsset"
status = "generate"
concurrency = "send+sync"
[[object]]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.BufferList"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "Option"
[[object]]
name = "Gst.Object"
# For renaming the trait...
status = "manual"
trait_name = "GstObjectExt"
[[object]]
name = "Gst.Sample"
status = "manual"
ref_mode = "ref"
[[object.function]]
name = "get_caps"
# Function moved to SampleRef
ignore = true
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref"

View file

@ -1,14 +1,14 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstGL"
version = "1.0"
min_cfg_version = "1.14"
target_path = "gstreamer-gl"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"GLib",
@ -19,50 +19,48 @@ external_libraries = [
]
generate = [
"GstGL.GLBaseFilter",
"GstGL.GLBaseMemoryAllocator",
"GstGL.GLBaseSrc",
"GstGL.GLBufferPool",
"GstGL.GLConfigCaveat",
"GstGL.GLConfigSurfaceType",
"GstGL.GLContextError",
# Enums
"GstGL.GLFormat",
"GstGL.GLMemoryAllocator",
"GstGL.GLQueryType",
"GstGL.GLSLError",
"GstGL.GLSLVersion",
"GstGL.GLStereoDownmix",
"GstGL.GLTextureTarget",
"GstGL.GLStereoDownmix",
"GstGL.GLUploadReturn",
# Enums Errors
#"GstGL.GLBaseMemoryError",
"GstGL.GLContextError",
"GstGL.GLSLError",
"GstGL.GLWindowError",
# Records
#"GstGL.GLAllocationParams",
#"GstGL.GLRenderbufferAllocationParams",
#"GstGL.GLVideoAllocationParams",
# Objects
"GstGL.GLBaseFilter",
#"GstGL.GLBufferPool",
"GstGL.GLFramebuffer",
#"GstGL.GLBaseMemory",
#"GstGL.GLBaseMemoryAllocator",
#"GstGL.GLMemoryPBOAllocator",
#"GstGL.GLRenderbufferAllocator",
]
manual = [
"GLib.Error",
"GLib.Thread",
"GObject.Object",
"GObject.Value",
"Gst.AllocationParams",
"Gst.Allocator",
"Gst.BufferPool",
"Gst.Context",
"Gst.Element",
"Gst.FlowReturn",
"Gst.Memory",
"Gst.Object",
"Gst.FlowReturn",
"Gst.PadDirection",
"GstBase.BaseSrc",
"GstBase.BaseTransform",
"GstBase.PushSrc",
"GstGL.GLBaseMemory",
"GstGL.GLMemory",
"GstVideo.VideoAlignment",
"GstVideo.VideoInfo",
"GstVideo.VideoMultiviewFlags",
"GstVideo.VideoMultiviewMode",
"GstVideo.VideoOverlay",
]
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"
[[object]]
name = "Gst.Buffer"
status = "manual"
@ -78,176 +76,189 @@ name = "Gst.Query"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstGL.*"
status = "generate"
[[object.function]]
name = "buffer_add_gl_sync_meta"
manual = true
[[object.function]]
name = "buffer_add_gl_sync_meta_full"
manual = true
[[object.function]]
name = "context_get_gl_display"
manual = true
[[object.function]]
name = "context_set_gl_display"
manual = true
[[object.function]]
name = "gl_sync_meta_api_get_type"
manual = true
[[object.function]]
name = "gl_handle_context_query"
manual = true
[[object.function]]
name = "gl_handle_set_context"
manual = true
# gir has issues with the inout annotation
[[object.function]]
name = "gl_ensure_element_data"
ignore = true
# These need manual bindings
[[object.function]]
name = "buffer_pool_config_get_gl_allocation_params"
ignore = true
[[object.function]]
name = "buffer_pool_config_set_gl_allocation_params"
ignore = true
[[object.function]]
name = "buffer_pool_config_get_gl_min_free_queue_size"
ignore = true
[[object.function]]
name = "buffer_pool_config_set_gl_min_free_queue_size"
ignore = true
# Needs manual binding to be an extension on gst_video::VideoAffineTransformationMeta
[[object.function]]
name = "gl_get_affine_transformation_meta_as_ndc"
ignore = true
[[object.function]]
name = "gl_set_affine_transformation_meta_from_ndc"
ignore = true
# Needs manual binding to be an extension on gst_video::VideoInfo
[[object.function]]
name = "gl_get_plane_data_size"
ignore = true
[[object.function]]
name = "gl_get_plane_start"
ignore = true
# Needs manual binding
[[object.function]]
name = "gl_insert_debug_marker"
ignore = true
# Needs manual binding around `[[f32;4];4]`
[[object.function]]
name = "gl_multiply_matrix4"
ignore = true
# Should be a method on GLContextExtManual
[[object.function]]
name = "gl_sized_gl_format_from_gl_format_type"
ignore = true
# Should be method on GLTextureTarget and/or Value
[[object.function]]
name = "gl_value_get_texture_target_mask"
ignore = true
# This needs to return a value
# target_mask is a bitwise mask of the GLTextureTarget enum
[[object.function]]
name = "gl_value_set_texture_target_from_mask"
ignore = true
# This needs more checks like target != GLTextureTarget::None
# and a value that's already initialized as a string.
# Could become GLTextureTarget::to_value() maybe.
[[object.function]]
name = "gl_value_set_texture_target"
ignore = true
# Should be a method/constructor on GLSLVersion
[[object.function]]
name = "gl_version_to_glsl_version"
ignore = true
# ffi constant is ignored
[[object.constant]]
name = "GL_COLOR_CONVERT_FORMATS"
ignore = true
[[object.constant]]
name = "GL_COLOR_CONVERT_EXT_FORMATS"
ignore = true
[[object.constant]]
name = "GL_MEMORY_VIDEO_FORMATS_STR"
ignore = true
[[object.constant]]
name = "GL_MEMORY_VIDEO_EXT_FORMATS"
ignore = true
# the below need manual bindings
[[object.function]]
name = "is_gl_base_memory"
ignore = true
[[object.function]]
name = "is_gl_buffer"
ignore = true
[[object.function]]
name = "is_gl_memory"
ignore = true
[[object.function]]
name = "is_gl_memory_pbo"
ignore = true
[[object.function]]
name = "is_gl_renderbuffer"
ignore = true
# duplicate of gst_gl_stereo_downmix_get_type
[[object.function]]
name = "gl_stereo_downmix_mode_get_type"
ignore = true
[[object]]
name = "GstGL.GLAllocationParams"
status = "generate"
[[object.function]]
name = "free_data"
# Function should only be called by subclasses from
# an overridden `free` vfunc.
ignore = true
[[object.function]]
name = "copy_data"
[[object.function.parameter]]
name = "src"
const = true
[[object]]
name = "GstGL.GLAPI"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object.member]]
name = "any"
ignore = true
[[object]]
name = "GstGL.GLBuffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstGL.GLQuery"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstGL.GLContext"
status = "generate"
manual_traits = ["GLContextExtManual"]
[[object.function]]
name = "new_wrapped"
# input handle
ignore = true
[[object.function]]
name = "get_gl_context"
# return handle
ignore = true
[[object.function]]
name = "get_current_gl_context"
# return handle
ignore = true
[[object.function]]
name = "get_proc_address"
ignore = true
[[object.function]]
name = "default_get_proc_address"
ignore = true
[[object.function]]
name = "get_proc_address_with_platform"
ignore = true
[[object.function]]
name = "thread_add"
# unimplemented GLContextThreadFunc
ignore = true
[[object.function]]
name = "get_thread"
# glib::Thread not found in `glib`
ignore = true
[[object.function]]
name = "get_gl_context_for_thread"
# glib::Thread not found in `glib`
ignore = true
[[object.function]]
name = "error_quark"
# bad error domian definition in gir
ignore = true
[[object.function]]
name = "activate"
[object.function.return]
bool_return_is_error = "Failed to activate OpenGL context"
[[object.function]]
name = "get_display"
[object.function.return]
nullable = false
[[object.function]]
name = "set_window"
[object.function.return]
bool_return_is_error = "Failed to set window"
[[object]]
name = "GstGL.GLDisplay"
status = "generate"
[[object.function]]
name = "find_window"
# unimplemented pointer
ignore = true
[[object.function]]
name = "get_handle"
# return handle
ignore = true
[[object.function]]
name = "get_gl_context_for_thread"
# glib::Thread not found in `glib`
ignore = true
[[object.function]]
name = "add_context"
[object.function.return]
bool_return_is_error = "Failed to add OpenGL context"
[[object.function]]
name = "remove_window"
[object.function.return]
bool_return_is_error = "Failed to remove window"
[[object.function]]
name = "create_window"
[object.function.return]
nullable_return_is_error = "Failed to create window"
[[object]]
name = "GstGL.GLDisplayEGL"
status = "generate"
final_type = true
cfg_condition = "feature = \"egl\""
[[object]]
name = "GstGL.GLDisplayX11"
status = "generate"
final_type = true
cfg_condition = "feature = \"x11\""
[[object]]
name = "GstGL.GLDisplayWayland"
status = "generate"
final_type = true
cfg_condition = "feature = \"wayland\""
[[object]]
name = "GstGL.GLWindow"
status = "generate"
[[object.function]]
name = "get_window_handle"
# return native handle
ignore = true
[[object.function]]
name = "send_message"
# callback
ignore = true
[[object.function]]
name = "send_message_async"
# callback
ignore = true
[[object.function]]
name = "set_close_callback"
# callback
ignore = true
[[object.function]]
name = "set_draw_callback"
# callback
ignore = true
[[object.function]]
name = "set_render_rectangle"
[object.function.return]
bool_return_is_error = "Failed to set the specified region"
[[object.function]]
name = "set_resize_callback"
# callback
ignore = true
[[object.function]]
name = "set_window_handle"
# handle
ignore = true
[[object.function]]
name = "get_display"
# handle
ignore = true
[[object.function]]
name = "error_quark"
# bad error domian definition in gir
ignore = true
[[object]]
name = "GstGL.GLColorConvert"
status = "generate"
@ -272,166 +283,6 @@ final_type = true
# transfer gst::Buffer not reference
ignore = true
[[object]]
name = "GstGL.GLContext"
status = "generate"
manual_traits = ["GLContextExtManual"]
[[object.function]]
name = "new_wrapped"
# input handle
manual = true
[[object.function]]
name = "get_gl_context"
# return handle
manual = true
[[object.function]]
name = "get_current_gl_context"
# return handle
manual = true
[[object.function]]
name = "get_proc_address"
manual = true
[[object.function]]
name = "default_get_proc_address"
ignore = true
[[object.function]]
name = "get_proc_address_with_platform"
manual = true
[[object.function]]
name = "thread_add"
manual = true
[[object.function]]
name = "get_thread"
# glib::Thread not found in `glib`
ignore = true
[[object.function]]
name = "get_gl_context_for_thread"
# glib::Thread not found in `glib`
ignore = true
[[object.function]]
name = "error_quark"
# bad error domain definition in gir
ignore = true
[[object.function]]
name = "activate"
[object.function.return]
bool_return_is_error = "Failed to activate OpenGL context"
[[object.function]]
name = "set_window"
[object.function.return]
bool_return_is_error = "Failed to set window"
[[object]]
name = "GstGL.GLDisplay"
status = "generate"
[[object.function]]
name = "find_window"
# unimplemented pointer
ignore = true
[[object.function]]
name = "get_handle"
# return handle
manual = true
[[object.function]]
name = "get_gl_context_for_thread"
# require object lock
manual = true
[[object.function]]
name = "create_context"
# require object lock
manual = true
[[object.function]]
name = "add_context"
# require object lock
manual = true
[[object.function]]
name = "remove_context"
# require object lock
manual = true
[[object.function]]
name = "remove_window"
[object.function.return]
bool_return_is_error = "Failed to remove window"
[[object.function]]
name = "create_window"
[object.function.return]
nullable_return_is_error = "Failed to create window"
[[object.function]]
name = "ensure_context"
# inout parameter
manual = true
[[object]]
name = "GstGL.GLDisplayType"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object.member]]
name = "any"
ignore = true
[[object]]
name = "GstGL.GLFilter"
status = "generate"
[[object.function]]
name = "add_rgba_pad_templates"
# Automatically called if
# GLFilterImpl::ADD_RGBA_PAD_TEMPLATES is true
ignore = true
[[object.function]]
pattern = "render_to_target\\w*"
[[object.function.parameter]]
pattern = "input|output"
const = true
[[object.function]]
name = "render_to_target"
[object.function.return]
bool_return_is_error = "`func` returned `false`"
[[object.function]]
name = "filter_texture"
[object.function.return]
bool_return_is_error = "Failed to transform texture"
[[object]]
name = "GstGL.GLFramebuffer"
status = "generate"
manual_traits = ["GLFramebufferExtManual"]
[[object.function]]
name = "attach"
# attachment_point parameter unchecked
unsafe = true
[[object.function]]
name = "draw_to_texture"
manual = true
[[object]]
name = "GstGL.GLOverlayCompositor"
status = "generate"
@ -447,19 +298,14 @@ final_type = true
version = "1.16"
[[object]]
name = "GstGL.GLPlatform"
name = "GstGL.GLSLStage"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object.member]]
name = "any"
ignore = true
final_type = true
[[object]]
name = "GstGL.GLQuery"
status = "manual"
ref_mode = "ref"
[[object.function]]
name = "set_strings"
[object.function.return]
bool_return_is_error = "Failed to attach stage to set strings"
[[object]]
name = "GstGL.GLShader"
@ -521,26 +367,6 @@ final_type = true
# array with size
ignore = true
[[object]]
name = "GstGL.GLSLProfile"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object.member]]
name = "any"
ignore = true
[[object]]
name = "GstGL.GLSLStage"
status = "generate"
final_type = true
[[object.function]]
name = "set_strings"
[object.function.return]
bool_return_is_error = "Failed to attach stage to set strings"
[[object]]
name = "GstGL.GLUpload"
status = "generate"
@ -561,21 +387,10 @@ final_type = true
# fix mutability
ignore = true
[[object]]
name = "GstGL.GLVideoAllocationParams"
status = "generate"
[[object.function]]
name = "free_data"
# Function should only be called by subclasses from
# an overridden `free` vfunc.
ignore = true
[[object.function]]
name = "copy_data"
[[object.function.parameter]]
name = "src_vid"
const = true
name = "get_input_template_caps"
[object.function.return]
nullable = false
[[object]]
name = "GstGL.GLViewConvert"
@ -597,61 +412,55 @@ final_type = true
# Result<Buffer, FlowReturn>
ignore = true
[[object.function]]
name = "tranform_caps"
[object.function.return]
nullable = false
[[object.function]]
name = "submit_input_buffer"
# Buffer ownership transfer
ignore = true
[[object]]
name = "GstGL.GLWindow"
name = "GstGL.GLDisplayType"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object.member]]
name = "any"
ignore = true
[[object.member]]
name = "egl_device"
version = "1.18"
[[object.function]]
name = "get_window_handle"
# return native handle
[[object]]
name = "GstGL.GLAPI"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object.member]]
name = "any"
ignore = true
[[object.function]]
name = "send_message"
# callback
[[object]]
name = "GstGL.GLPlatform"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object.member]]
name = "any"
ignore = true
[[object.function]]
name = "send_message_async"
# callback
[[object]]
name = "GstGL.GLSLProfile"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object.function]]
name = "set_close_callback"
# callback
ignore = true
[[object.function]]
name = "set_draw_callback"
# callback
ignore = true
[[object.function]]
name = "set_render_rectangle"
[object.function.return]
bool_return_is_error = "Failed to set the specified region"
[[object.function]]
name = "set_resize_callback"
# callback
ignore = true
[[object.function]]
name = "set_window_handle"
# handle
ignore = true
[[object.function]]
name = "get_display"
# handle
ignore = true
[[object.function]]
name = "error_quark"
# bad error domain definition in gir
[[object.member]]
name = "any"
ignore = true

View file

@ -1,20 +1,20 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstNet"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.8"
target_path = "gstreamer-net"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"Gio",
"GLib",
"GObject",
"Gst",
"Gio",
]
generate = [
@ -22,44 +22,52 @@ generate = [
manual = [
"GObject.Object",
"Gst.Bus",
"Gst.Clock",
"Gst.Object",
"Gst.Pipeline",
"Gst.Structure",
"Gst.Clock",
"Gst.Bus",
]
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "Option"
[[object]]
name = "GstNet.NetClientClock"
status = "generate"
final_type = true
[[object]]
name = "GstNet.NetTimeProvider"
status = "generate"
final_type = true
[[object.function]]
name = "new"
[object.function.return]
nullable_return_is_error = "Failed to create NetTimeProvider"
conversion_type = "scalar"
[[object]]
name = "GstNet.NtpClock"
status = "generate"
final_type = true
[[object.function]]
name = "new"
# Floating reference handling
ignore = true
[[object]]
name = "GstNet.PtpClock"
status = "generate"
final_type = true
[[object.function]]
name = "new"
[[object.function.parameter]]
name = "name"
[object.function.return]
nullable_return_is_error = "Can't create gst::PtpClock"
# Floating reference handling
ignore = true
[[object]]
name = "GstNet.NetClientClock"
status = "generate"
final_type = true
[[object.function]]
name = "new"
# Floating reference handling
ignore = true
[[object]]
name = "GstNet.NetTimeProvider"
status = "generate"
final_type = true
[[object.function]]
name = "new"
# Floating reference handling
ignore = true

View file

@ -1,14 +1,14 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstPbutils"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.8"
target_path = "gstreamer-pbutils"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"GLib",
@ -17,31 +17,21 @@ external_libraries = [
]
generate = [
"GstPbutils.AudioVisualizer",
"GstPbutils.AudioVisualizerShader",
"GstPbutils.DiscovererInfo",
"GstPbutils.DiscovererResult",
"GstPbutils.InstallPluginsContext",
"GstPbutils.InstallPluginsReturn",
"GstPbutils.PbUtilsCapsDescriptionFlags",
]
manual = [
"GLib.DateTime",
"GLib.Error",
"GLib.MainContext",
"GLib.MainLoop",
"GLib.Source",
"GLib.Type",
"GLib.DateTime",
"GLib.Variant",
"GObject.Object",
"Gst.Element",
"Gst.Object",
"Gst.Preset",
"Gst.Element",
]
[[object]]
name = "Gst.Buffer"
name = "Gst.Toc"
status = "manual"
ref_mode = "ref"
@ -50,23 +40,18 @@ name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "Option"
[[object]]
name = "Gst.Event"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Message"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Structure"
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Event"
status = "manual"
ref_mode = "ref"
@ -75,11 +60,203 @@ name = "Gst.TagList"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"
[[object]]
name = "Gst.Toc"
name = "Gst.ClockTime"
status = "manual"
ref_mode = "ref"
conversion_type = "scalar"
[[object]]
name = "GstPbutils.Discoverer"
status = "generate"
final_type = true
[[object.function]]
name = "discover_uri_async"
[object.function.return]
bool_return_is_error = "Failed to add URI to list of discovers"
# Manually implemented to use ClockTime
[[object.property]]
name = "timeout"
ignore = true
[[object.property]]
name = "use-cache"
version = "1.16"
[[object]]
name = "GstPbutils.DiscovererInfo"
status = "generate"
[[object.function]]
name = "copy"
[object.function.return]
nullable = false
[[object.function]]
name = "to_variant"
[object.function.return]
nullable_return_is_error = "Failed to serialize DiscovererInfo to Variant"
[[object.function]]
name = "from_variant"
[object.function.return]
nullable_return_is_error = "Failed to deserialize DiscovererInfo from Variant"
[[object]]
name = "GstPbutils.DiscovererStreamInfo"
status = "generate"
final_type = false
# Not useful
[[object.function]]
name = "list_free"
ignore = true
[[object.function]]
name = "get_stream_type_nick"
[object.function.return]
nullable = false
[[object]]
name = "GstPbutils.DiscovererAudioInfo"
status = "generate"
final_type = true
[[object]]
name = "GstPbutils.DiscovererVideoInfo"
status = "generate"
final_type = true
# Implement manually to expose as gst::Fraction
[[object.function]]
name = "get_framerate_num"
ignore = true
[[object.function]]
name = "get_framerate_denom"
ignore = true
[[object.function]]
name = "get_par_num"
ignore = true
[[object.function]]
name = "get_par_denom"
ignore = true
[[object]]
name = "GstPbutils.DiscovererSubtitleInfo"
status = "generate"
final_type = true
[[object]]
name = "GstPbutils.DiscovererContainerInfo"
status = "generate"
final_type = true
[[object]]
name = "GstPbutils.EncodingProfile"
status = "generate"
# Ignore all setters, making it immutable. A builder will be provided.
[[object.function]]
name = "new"
ignore = true
[[object.function]]
pattern = "set_.*"
ignore = true
[[object.function]]
pattern = "get_restriction"
ignore = true
[[object.function]]
name = "copy"
[object.function.return]
nullable = false
[[object.function]]
name = "get_input_caps"
[object.function.return]
nullable = false
[[object.function]]
name = "get_format"
[object.function.return]
nullable = false
[[object.property]]
name = "restriction-caps"
# encodingprofile is immutable after constructed
ignore = true
[[object.function]]
name = "from_discoverer"
[object.function.return]
nullable_return_is_error = "Failed to create EncodingProfile from DiscovererInfo"
[[object]]
name = "GstPbutils.EncodingContainerProfile"
status = "generate"
# Make it immutable, only able to be constructed for a builder
[[object.function]]
name = "new"
ignore = true
[[object.function]]
name = "add_profile"
ignore = true
[[object]]
name = "GstPbutils.EncodingTarget"
status = "generate"
[[object.function]]
name = "add_profile"
# can be provided on constructor and we better
# consider this immutable
ignore = true
[[object.function]]
name = "get_category"
[object.function.return]
nullable = false
[[object.function]]
name = "get_description"
[object.function.return]
nullable = false
[[object.function]]
name = "get_name"
[object.function.return]
nullable = false
[[object]]
name = "GstPbutils.EncodingAudioProfile"
status = "generate"
final_type = true
# Ignore all setters, making it immutable. A builder will be provided.
[[object.function]]
name = "new"
ignore = true
[[object.function]]
pattern = "set_.*"
ignore = true
[[object]]
name = "GstPbutils.EncodingVideoProfile"
status = "generate"
final_type = true
# Ignore all setters, making it immutable. A builder will be provided.
[[object.function]]
name = "new"
ignore = true
[[object.function]]
pattern = "set_.*"
ignore = true
[[object]]
name = "GstPbutils.*"
@ -89,99 +266,10 @@ status = "generate"
pattern = ".+"
ignore = true
# Codec utils need some special care
[[object.function]]
pattern = "codec_utils_(aac_caps_set_level_and_profile|h264_caps_set_level_and_profile|h265_caps_set_level_tier_and_profile|mpeg4video_caps_set_level_and_profile)"
# Needs mutable caps references and checks for the caps
manual = true
[[object.function]]
name = "codec_utils_aac_get_level"
[object.function.return]
nullable_return_is_error = "Failed to get AAC level"
[[object.function]]
name = "codec_utils_aac_get_profile"
[object.function.return]
nullable_return_is_error = "Failed to get AAC profile"
[[object.function]]
name = "codec_utils_h264_get_level"
[object.function.return]
nullable_return_is_error = "Failed to get H264 level"
[[object.function]]
name = "codec_utils_h264_get_profile"
[object.function.return]
nullable_return_is_error = "Failed to get H264 profile"
[[object.function]]
name = "codec_utils_h264_get_profile_flags_level"
manual = true
[[object.function]]
name = "codec_utils_h265_get_level"
[object.function.return]
nullable_return_is_error = "Failed to get H265 level"
[[object.function]]
name = "codec_utils_h265_get_profile"
[object.function.return]
nullable_return_is_error = "Failed to get H265 profile"
[[object.function]]
name = "codec_utils_h265_get_tier"
[object.function.return]
nullable_return_is_error = "Failed to get H265 tier"
[[object.function]]
name = "codec_utils_mpeg4video_get_level"
[object.function.return]
nullable_return_is_error = "Failed to get MPEG4 video level"
[[object.function]]
name = "codec_utils_mpeg4video_get_profile"
[object.function.return]
nullable_return_is_error = "Failed to get MPEG4 video profile"
[[object.function]]
name = "codec_utils_caps_get_mime_codec"
[object.function.return]
nullable_return_is_error = "Unsupported caps"
[[object.function]]
name = "codec_utils_av1_get_level"
[object.function.return]
nullable_return_is_error = "Failed to get AV1 level"
[[object.function]]
name = "codec_utils_opus_create_caps"
# Manual checks
manual = true
[[object.function]]
name = "codec_utils_opus_create_header"
# Manual checks
manual = true
[[object.function]]
name = "codec_utils_opus_parse_caps"
# Manual checks
manual = true
[[object.function]]
name = "codec_utils_opus_parse_header"
# Manual checks
manual = true
[[object.function]]
name = "codec_utils_opus_create_caps_from_header"
# Use &CapsRef
manual = true
[[object.function]]
pattern = "(codec_utils_caps_get_mime_codec|pb_utils_get_caps_description_flags|pb_utils_get_file_extension_from_caps)"
# Use &CapsRef
manual = true
pattern = "codec_utils.*"
ignore = true
# Plugin installer API needs some manual impls
[[object.function]]
@ -198,7 +286,7 @@ status = "generate"
name = "is_missing_plugin_message"
ignore = true
# Initialization is handled implicitly
# Initialization is handled implicitely
[[object.function]]
name = "pb_utils_init"
ignore = true
@ -223,49 +311,31 @@ status = "generate"
name = "pb_utils_get_encoder_description"
ignore = true
[[object.function]]
name = "pb_utils_get_element_description"
[object.function.return]
nullable_return_is_error = "Failed to get element description"
[[object.function]]
name = "pb_utils_get_sink_description"
[object.function.return]
nullable_return_is_error = "Failed to get sink description"
[[object.function]]
name = "pb_utils_get_source_description"
[object.function.return]
nullable_return_is_error = "Failed to get source description"
[[object.function]]
name = "plugins_base_version_string"
assertion = "skip"
[object.function.return]
nullable = false
[[object.function]]
name = "plugins_base_version"
assertion = "skip"
[[object]]
name = "GstPbutils.Discoverer"
status = "generate"
final_type = true
[[object.function]]
name = "discover_uri_async"
[object.function.return]
bool_return_is_error = "Failed to add URI to list of discovers"
[[object.function]]
name = "new"
[[object.function.parameter]]
name = "timeout"
mandatory = true
# Manually implemented to use ClockTime
[[object.property]]
name = "timeout"
manual = true
[[object.property]]
name = "use-cache"
version = "1.16"
[[object]]
name = "GstPbutils.DiscovererAudioInfo"
status = "generate"
final_type = true
[[object]]
name = "GstPbutils.DiscovererContainerInfo"
status = "generate"
final_type = true
[[object]]
name = "GstPbutils.DiscovererSerializeFlags"
status = "generate"
@ -275,125 +345,3 @@ status = "generate"
[[object.member]]
name = "all"
ignore = true
[[object]]
name = "GstPbutils.DiscovererStreamInfo"
status = "generate"
final_type = false
# Not useful
[[object.function]]
name = "list_free"
ignore = true
[[object]]
name = "GstPbutils.DiscovererSubtitleInfo"
status = "generate"
final_type = true
[[object]]
name = "GstPbutils.DiscovererVideoInfo"
status = "generate"
final_type = true
# Implement manually to expose as gst::Fraction
[[object.function]]
name = "get_framerate_num"
manual = true
[[object.function]]
name = "get_framerate_denom"
manual = true
[[object.function]]
name = "get_par_num"
manual = true
[[object.function]]
name = "get_par_denom"
manual = true
[[object]]
name = "GstPbutils.EncodingAudioProfile"
status = "generate"
final_type = true
# Ignore all setters, making it immutable. A builder will be provided.
[[object.function]]
name = "new"
manual = true
[[object.function]]
pattern = "set_.*"
manual = true
[[object]]
name = "GstPbutils.EncodingContainerProfile"
status = "generate"
# Make it immutable, only able to be constructed for a builder
[[object.function]]
name = "new"
manual = true
[[object.function]]
name = "add_profile"
manual = true
[[object]]
name = "GstPbutils.EncodingProfile"
status = "generate"
# Ignore all setters, making it immutable. A builder will be provided.
[[object.function]]
name = "new"
manual = true
[[object.function]]
pattern = "set_.*"
manual = true
[[object.function]]
pattern = "get_restriction"
manual = true
[[object.function]]
name = "get_element_properties"
# Use custom wrapper types
manual = true
[[object.property]]
name = "restriction-caps"
# encodingprofile is immutable after constructed
ignore = true
[[object.function]]
name = "from_discoverer"
[object.function.return]
nullable_return_is_error = "Failed to create EncodingProfile from DiscovererInfo"
[[object.function]]
name = "from_string"
[object.function.return]
nullable_return_is_error = "Failed to create EncodingProfile from string"
[[object]]
name = "GstPbutils.EncodingTarget"
status = "generate"
[[object.function]]
name = "add_profile"
# can be provided on constructor and we better
# consider this immutable
ignore = true
[[object.function]]
name = "new"
[object.function.return]
nullable_return_is_error = "Failed to create EncodingTarget"
[[object]]
name = "GstPbutils.EncodingVideoProfile"
status = "generate"
final_type = true
# Ignore all setters, making it immutable. A builder will be provided.
[[object.function]]
name = "new"
manual = true
[[object.function]]
pattern = "set_.*"
manual = true

View file

@ -1,14 +1,14 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstPlayer"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.12"
target_path = "gstreamer-player"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"GLib",
@ -19,11 +19,9 @@ external_libraries = [
generate = [
"GstPlayer.PlayerColorBalanceType",
"GstPlayer.PlayerError",
"GstPlayer.PlayerMediaInfo",
"GstPlayer.PlayerSignalDispatcher",
"GstPlayer.PlayerSnapshotFormat",
"GstPlayer.PlayerState",
"GstPlayer.PlayerStreamInfo",
"GstPlayer.PlayerVideoRenderer",
"GstPlayer.PlayerVisualization",
]
@ -32,8 +30,8 @@ manual = [
"GLib.Error",
"GLib.MainContext",
"GObject.Object",
"Gst.Element",
"Gst.Object",
"Gst.Element",
"GstVideo.VideoMultiviewFlags",
"GstVideo.VideoMultiviewFramePacking",
]
@ -44,24 +42,20 @@ status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
name = "Gst.TagList"
status = "manual"
conversion_type = "Option"
ref_mode = "ref"
[[object]]
name = "Gst.Sample"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.TagList"
status = "manual"
ref_mode = "ref"
ref_mode = "ref-mut"
[[object]]
name = "GstPlayer.Player"
@ -76,12 +70,16 @@ final_type = true
[[object.function]]
name = "set_config"
# Custom type
manual = true
ignore = true
[[object.function]]
name = "new"
ignore = true
[[object.function]]
name = "get_config"
# Custom type
manual = true
ignore = true
[[object.function]]
pattern = "config_.*"
@ -89,10 +87,9 @@ final_type = true
ignore = true
[[object.function]]
name = "seek"
[[object.function.parameter]]
name = "position"
mandatory = true
name = "get_pipeline"
[object.function.return]
nullable = false
[[object.function]]
name = "set_audio_track"
@ -111,6 +108,10 @@ final_type = true
[[object.function]]
name = "set_visualization"
[[object.function.parameter]]
name = "name"
nullable = true
[object.function.return]
bool_return_is_error = "Failed to set visualization"
@ -122,7 +123,7 @@ final_type = true
name = "duration-changed"
concurrency = "send"
# Pass ClockTime instead of u64
manual = true
ignore = true
[[object.signal]]
name = "end-of-stream"
@ -144,13 +145,13 @@ final_type = true
name = "position-updated"
concurrency = "send"
# Pass ClockTime instead of u64
manual = true
ignore = true
[[object.signal]]
name = "seek-done"
concurrency = "send"
# Pass ClockTime instead of u64
manual = true
ignore = true
[[object.signal]]
name = "state-changed"
@ -176,16 +177,55 @@ final_type = true
name = "subtitle-video-offset"
version = "1.16"
[[object]]
name = "GstPlayer.PlayerStreamInfo"
status = "generate"
[[object.function]]
name = "get_video_snapshot"
# Use &StructureRef
manual = true
name = "get_stream_type"
[object.function.return]
nullable = false
[[object]]
name = "GstPlayer.PlayerAudioInfo"
status = "generate"
final_type = true
[[object]]
name = "GstPlayer.PlayerVideoInfo"
status = "generate"
final_type = true
[[object.function]]
name = "get_framerate"
# Fraction
ignore = true
[[object.function]]
name = "get_pixel_aspect_ratio"
# Fraction
ignore = true
[[object]]
name = "GstPlayer.PlayerSubtitleInfo"
status = "generate"
final_type = true
[[object]]
name = "GstPlayer.PlayerMediaInfo"
status = "generate"
final_type = true
[[object.function]]
name = "get_uri"
[object.function.return]
nullable = false
[[object]]
name = "GstPlayer.PlayerVideoOverlayVideoRenderer"
status = "generate"
final_type = true
[[object]]
name = "GstPlayer.PlayerGMainContextSignalDispatcher"
status = "generate"
@ -197,44 +237,6 @@ final_type = true
ignore = true
[[object]]
name = "GstPlayer.PlayerSubtitleInfo"
status = "generate"
final_type = true
[[object]]
name = "GstPlayer.PlayerVideoInfo"
status = "generate"
final_type = true
[[object.function]]
name = "get_framerate"
# Fraction
manual = true
[[object.function]]
name = "get_pixel_aspect_ratio"
# Fraction
manual = true
[[object]]
name = "GstPlayer.PlayerVideoOverlayVideoRenderer"
status = "generate"
final_type = true
[[object.function]]
name = "new"
manual = true
[[object.function]]
name = "new_with_sink"
# with_handle_and_sink() also calls this.
rename = "with_sink"
manual = true
[[object.function]]
name = "get_window_handle"
manual = true
[[object.function]]
name = "set_window_handle"
manual = true
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"

64
Gir_GstRtp.toml Normal file
View file

@ -0,0 +1,64 @@
[options]
girs_dir = "gir-files"
library = "GstRtp"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-rtp"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
doc_target_path = "docs/gstreamer-rtp/docs.md"
generate_display_trait = false
external_libraries = [
"GLib",
"GObject",
"Gst",
]
generate = [
"GstRtp.RTCPFBType",
"GstRtp.RTCPSDESType",
"GstRtp.RTCPType",
"GstRtp.RTCPXRType",
"GstRtp.RTPPayload",
"GstRtp.RTPProfile",
]
[[object]]
name = "Gst.Buffer"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstRtp.*"
status = "generate"
# expected enum `std::ffi::c_void`, found u8
[[object.function]]
name = "rtp_hdrext_get_ntp_56"
ignore = true
# expected enum `std::ffi::c_void`, found u8
[[object.function]]
name = "rtp_hdrext_get_ntp_64"
ignore = true
# manual bindings are needed for GstMeta
[[object.function]]
name = "rtp_source_meta_api_get_type"
ignore = true
[[object]]
name = "GstRtp.RTPBufferFlags"
status = "generate"
[[object.member]]
name = "last"
ignore = true
[[object]]
name = "GstRtp.RTPBufferMapFlags"
status = "generate"
[[object.member]]
name = "last"
ignore = true

View file

@ -1,19 +1,20 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstRtsp"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.8"
target_path = "gstreamer-rtsp"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
doc_target_path = "docs/gstreamer-rtsp/docs.md"
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"Gio",
"GLib",
"GObject",
"Gio",
"GstSdp"
]
@ -34,30 +35,35 @@ manual = [
"GLib.MainContext",
]
[[object]]
name = "GstRtsp.RTSPUrl"
status = "generate"
concurrency = "send"
[[object.function]]
name = "get_port"
ignore = true
[[object]]
name = "GstRtsp.RTSPAuthParam"
status = "generate"
concurrency="send"
version = "1.12"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "Option"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstRtsp.RTSPAuthCredential"
status = "generate"
concurrency = "send"
[[object]]
name = "GstRtsp.RTSPAuthParam"
status = "generate"
concurrency = "send"
ref_mode = "ref-mut"
[[object]]
name = "GstRtsp.RTSPLowerTrans"
@ -86,12 +92,3 @@ status = "generate"
[[object.member]]
name = "unknown"
ignore = true
[[object]]
name = "GstRtsp.RTSPUrl"
status = "generate"
concurrency = "send"
[[object.function]]
name = "get_port"
ignore = true

View file

@ -1,19 +1,20 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstRtspServer"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.8"
target_path = "gstreamer-rtsp-server"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
doc_target_path = "docs/gstreamer-rtsp-server/docs.md"
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"Gio",
"GLib",
"GObject",
"Gio",
"Gst",
"GstNet",
"GstRtsp",
@ -22,51 +23,58 @@ external_libraries = [
generate = [
"GstRtspServer.RTSPAddressPoolResult",
"GstRtspServer.RTSPFilterResult",
"GstRtspServer.RTSPMediaFactoryURI",
"GstRtspServer.RTSPMediaStatus",
"GstRtspServer.RTSPOnvifClient",
"GstRtspServer.RTSPOnvifMedia",
"GstRtspServer.RTSPOnvifServer",
"GstRtspServer.RTSPPublishClockMode",
"GstRtspServer.RTSPSuspendMode",
"GstRtspServer.RTSPThreadPool",
"GstRtspServer.RTSPThreadType",
"GstRtspServer.RTSPTransportMode",
"GstRtspServer.RTSPFilterResult"
]
manual = [
"Gio.Cancellable",
"Gio.Socket",
"Gio.SocketFamily",
"Gio.TlsAuthenticationMode",
"Gio.TlsCertificate",
"Gio.TlsCertificateFlags",
"Gio.TlsConnection",
"Gio.TlsDatabase",
"GLib.Error",
"GLib.IOCondition",
"GLib.MainContext",
"GLib.MainLoop",
"GLib.IOCondition",
"GLib.Source",
"GLib.MainLoop",
"GLib.ThreadPool",
"GLib.Error",
"GObject.Object",
"Gst.Bin",
"Gst.Clock",
"Gio.TlsCertificateFlags",
"Gio.TlsCertificate",
"Gio.TlsDatabase",
"Gio.TlsConnection",
"Gio.TlsAuthenticationMode",
"Gio.Socket",
"Gio.Cancellable",
"Gio.SocketFamily",
"Gst.Element",
"Gst.Pad",
"Gst.Pipeline",
"Gst.State",
"GstNet.NetTimeProvider",
"Gst.Clock",
"Gst.Pipeline",
"Gst.Pad",
"Gst.Bin",
"Gst.FlowReturn",
"GstRtsp.RTSPAuthMethod",
"GstRtsp.RTSPLowerTrans",
"GstRtsp.RTSPProfile",
"GstRtsp.RTSPRangeUnit",
"GstRtsp.RTSPUrl",
"GstRtsp.RTSPResult",
"GstRtsp.RTSPStatusCode",
"GstRtsp.RTSPUrl",
"GstRtsp.RTSPRangeUnit",
"GstRtsp.RTSPProfile",
"GstRtsp.RTSPLowerTrans"
]
[[object]]
name = "Gst.Message"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref-mut"
[[object]]
name = "Gst.Buffer"
status = "manual"
@ -77,172 +85,25 @@ name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "Option"
[[object]]
name = "Gst.FlowReturn"
status = "manual"
must_use = true
[object.conversion_type]
variant = "Result"
ok_type = "gst::FlowSuccess"
err_type = "gst::FlowError"
[[object]]
name = "Gst.Message"
status = "manual"
ref_mode = "ref"
[[object]]
name = "Gst.Object"
# For renaming the trait...
status = "manual"
trait_name = "GstObjectExt"
[[object]]
name = "Gst.Structure"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstRtspServer.RTSPAddress"
status = "generate"
concurrency = "send"
[[object]]
name = "GstRtspServer.RTSPAddressFlags"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object]]
name = "GstRtspServer.RTSPAddressPool"
status = "generate"
manual_traits = ["RTSPAddressPoolExtManual"]
[[object.function]]
name = "add_range"
[object.function.return]
bool_return_is_error = "Failed to add address range"
[[object.function]]
name = "acquire_address"
[object.function.return]
nullable_return_is_error = "Failed to acquire address"
[[object.function]]
name = "reserve_address"
manual = true
[[object]]
name="GstRtspServer.RTSPAuth"
status="generate"
manual_traits = ["RTSPAuthExtManual"]
[[object.function]]
name = "check"
[object.function.return]
bool_return_is_error = "Check failed"
[[object.function]]
name = "connect_accept_certificate"
# Use Result<(), LoggableError>
manual = true
[[object.function]]
name = "set_default_token"
# gir forgets mut
manual = true
[[object]]
name = "GstRtspServer.RTSPClient"
status = "generate"
manual_traits = ["RTSPClientExtManual"]
[[object.function]]
name = "attach"
manual = true
[[object.function]]
name = "set_send_func"
# user_data takes raw pointer
ignore = true
[[object]]
name = "GstRtspServer.RTSPContext"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstRtspServer.RTSPMedia"
status = "generate"
manual_traits = ["RTSPMediaExtManual"]
[[object.function]]
name = "suspend"
[object.function.return]
bool_return_is_error = "Failed to suspend media"
[[object.function]]
name = "unprepare"
[object.function.return]
bool_return_is_error = "Failed to unprepare media"
[[object.function]]
name = "prepare"
[object.function.return]
bool_return_is_error = "Failed to prepare media"
[[object.function]]
name = "unsuspend"
[object.function.return]
bool_return_is_error = "Failed to unsuspend media"
[[object.function]]
name = "take_pipeline"
manual = true
name = "GstRtspServer.RTSPToken"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstRtspServer.RTSPMediaFactory"
status = "generate"
manual_traits = ["RTSPMediaFactoryExtManual"]
[[object.function]]
name = "add_role_from_structure"
# Different structure mutability needed
manual = true
[[object.function]]
name = "construct"
[object.function.return]
nullable_return_is_error = "Failed to construct media"
[[object.function]]
name = "create_element"
[object.function.return]
nullable_return_is_error = "Failed to create media element"
name = "GstRtspServer.RTSPThread"
status = "manual"
ref_mode = "ref"
[[object]]
name="GstRtspServer.RTSPMountPoints"
status="generate"
[[object.function]]
name = "make_path"
[object.function.return]
nullable_return_is_error = "Failed to make path"
[[object]]
name = "GstRtspServer.RTSPOnvifMediaFactory"
status = "generate"
manual_traits = ["RTSPMediaExtManual"]
[[object.function]]
name = "requires_backchannel"
# on extension trait
manual = true
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "GstRtspServer.RTSPServer"
@ -251,7 +112,7 @@ manual_traits = ["RTSPServerExtManual"]
[[object.function]]
name = "attach"
manual = true
ignore = true
[[object.function]]
name = "io_func"
@ -264,53 +125,33 @@ manual_traits = ["RTSPServerExtManual"]
bool_return_is_error = "Failed to transfer to the connection"
[[object]]
name="GstRtspServer.RTSPSession"
status="generate"
name = "GstRtspServer.RTSPClient"
status = "generate"
manual_traits = ["RTSPClientExtManual"]
[[object.function]]
name = "get_media"
# Map to dup_media if new enough as get_media is not thread-safe
manual = true
[[object.function]]
name = "dup_media"
name = "attach"
ignore = true
[[object]]
name = "GstRtspServer.RTSPSessionMedia"
status = "generate"
[[object.function]]
name = "set_state"
[object.function.return]
bool_return_is_error = "Failed to set state of session media"
[[object]]
name = "GstRtspServer.RTSPSessionPool"
status = "generate"
manual_traits = ["RTSPSessionPoolExtManual"]
[[object.function]]
name = "remove"
[object.function.return]
bool_return_is_error = "Failed to remove session from pool"
[[object.function]]
name = "create_watch"
# GSource return
manual = true
[[object.function]]
name = "create"
[object.function.return]
nullable_return_is_error = "Failed to create session pool"
[[object]]
name = "GstRtspServer.RTSPStream"
status = "generate"
manual_traits = ["RTSPStreamExtManual"]
[[object.function]]
name = "recv_rtcp"
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.function]]
name = "recv_rtp"
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.function]]
name = "query_position"
ignore = true
[[object.function]]
name = "query_stop"
ignore = true
@ -350,10 +191,21 @@ status = "generate"
[object.function.return]
bool_return_is_error = "Failed to update crypto"
[[object]]
name = "GstRtspServer.RTSPAddress"
status = "generate"
concurrency = "send"
[[object]]
name = "GstRtspServer.RTSPStreamTransport"
status = "generate"
concurrency = "none"
manual_traits = ["RTSPStreamTransportExtManual"]
[[object.function]]
name = "recv_data"
# Use Result<FlowSuccess, FlowError>
ignore = true
[[object.function]]
name = "send_rtcp"
@ -371,16 +223,146 @@ concurrency = "none"
bool_return_is_error = "Failed to set active"
[[object]]
name = "GstRtspServer.RTSPThread"
status = "manual"
ref_mode = "ref"
name = "GstRtspServer.RTSPAddressPool"
status = "generate"
manual_traits = ["RTSPAddressPoolExtManual"]
[[object.function]]
name = "stop"
# Moved to RTSPThreadRef
name = "add_range"
[object.function.return]
bool_return_is_error = "Failed to add address range"
[[object.function]]
name = "acquire_address"
[object.function.return]
nullable_return_is_error = "Failed to acquire address"
[[object.function]]
name = "reserve_address"
ignore = true
[[object]]
name = "GstRtspServer.RTSPToken"
status = "manual"
ref_mode = "ref"
name = "GstRtspServer.RTSPMedia"
status = "generate"
manual_traits = ["RTSPMediaExtManual"]
[[object.function]]
name = "suspend"
[object.function.return]
bool_return_is_error = "Failed to suspend media"
[[object.function]]
name = "unprepare"
[object.function.return]
bool_return_is_error = "Failed to unprepare media"
[[object.function]]
name = "prepare"
[object.function.return]
bool_return_is_error = "Failed to prepare media"
[[object.function]]
name = "unsuspend"
[object.function.return]
bool_return_is_error = "Failed to unsuspend media"
[[object.function]]
name = "take_pipeline"
ignore = true
[[object]]
name = "GstRtspServer.RTSPMediaFactory"
status = "generate"
manual_traits = ["RTSPMediaFactoryExtManual"]
[[object.function]]
name = "add_role_from_structure"
# Different structure mutability needed
ignore = true
[[object.function]]
name = "construct"
[object.function.return]
nullable_return_is_error = "Failed to construct media"
[[object.function]]
name = "create_element"
[object.function.return]
nullable_return_is_error = "Failed to create media element"
[[object]]
name = "GstRtspServer.RTSPSessionMedia"
status = "generate"
[[object.function]]
name = "set_state"
[object.function.return]
bool_return_is_error = "Failed to set state of session media"
[[object]]
name = "GstRtspServer.RTSPSessionPool"
status = "generate"
manual_traits = ["RTSPSessionPoolExtManual"]
[[object.function]]
name = "remove"
[object.function.return]
bool_return_is_error = "Failed to remove session from pool"
[[object.function]]
name = "create_watch"
# GSource return
ignore = true
[[object.function]]
name = "create"
[object.function.return]
nullable_return_is_error = "Failed to create session pool"
[[object]]
name="GstRtspServer.RTSPAuth"
status="generate"
manual_traits = ["RTSPAuthExtManual"]
[[object.function]]
name = "check"
[object.function.return]
bool_return_is_error = "Check failed"
[[object.function]]
name = "connect_accept_certificate"
# Use Result<(), LoggableError>
ignore = true
[[object.function]]
name = "make_basic"
[object.function.return]
nullable = false
[[object.function]]
name = "set_default_token"
# gir forgets mut
ignore = true
[[object]]
name="GstRtspServer.RTSPMountPoints"
status="generate"
[[object.function]]
name = "make_path"
[object.function.return]
nullable_return_is_error = "Failed to make path"
[[object]]
name="GstRtspServer.RTSPSession"
status="generate"
[[object.function]]
name = "manage_media"
[object.function.return]
nullable_return_is_error = "Failed to manage media"
[[object]]
name = "GstRtspServer.RTSPAddressFlags"
status = "generate"
[[object.member]]
name = "none"
ignore = true

View file

@ -1,14 +1,15 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstSdp"
version = "1.0"
min_cfg_version = "1.14"
min_cfg_version = "1.8"
target_path = "gstreamer-sdp"
work_mode = "normal"
concurrency = "send"
generate_safety_asserts = true
single_version_file = true
doc_target_path = "docs/gstreamer-sdp/docs.md"
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"GLib",

510
Gir_GstVideo.toml Normal file
View file

@ -0,0 +1,510 @@
[options]
girs_dir = "gir-files"
library = "GstVideo"
version = "1.0"
min_cfg_version = "1.8"
target_path = "gstreamer-video"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
external_libraries = [
"GLib",
"GObject",
"Gst",
"GstBase",
]
generate = [
"GstVideo.VideoCodecFrameFlags",
"GstVideo.VideoFormatFlags",
"GstVideo.VideoTileMode",
"GstVideo.VideoColorMatrix",
"GstVideo.VideoMultiviewMode",
"GstVideo.VideoFieldOrder",
"GstVideo.VideoMultiviewFramePacking",
"GstVideo.VideoFilter",
"GstVideo.VideoCaptionType",
"GstVideo.VideoBufferPool",
"GstVideo.VideoAlphaMode",
"GstVideo.VideoChromaMode",
"GstVideo.VideoMatrixMode",
"GstVideo.VideoGammaMode",
"GstVideo.VideoPrimariesMode",
"GstVideo.VideoResamplerMethod",
"GstVideo.VideoDitherMethod",
"GstVideo.VideoAFDValue",
"GstVideo.VideoAFDSpec",
]
manual = [
"GLib.DateTime",
"GObject.Object",
"Gst.Object",
"Gst.Element",
"Gst.Buffer",
"Gst.BufferPool",
"Gst.BufferPoolAcquireParams",
"Gst.Allocator",
"Gst.AllocationParams",
"Gst.ClockTimeDiff",
"Gst.FlowReturn",
"Gst.TagList",
"Gst.TagMergeMode",
"GstBase.BaseSink",
"GstBase.BaseTransform",
"GstVideo.VideoCodecState",
"GstVideo.VideoCodecFrame",
"GstVideo.VideoInfo",
"GstVideo.VideoFormatInfo",
"GstVideo.VideoColorimetry",
"GstVideo.VideoColorRange",
"GstVideo.VideoFrame",
"GstVideo.VideoTimeCode",
"GstVideo.VideoTimeCodeInterval",
]
[[object]]
name = "Gst.ClockTime"
status = "manual"
conversion_type = "scalar"
[[object]]
name = "Gst.Caps"
status = "manual"
ref_mode = "ref"
[[object]]
name = "GstVideo.VideoOverlay"
status = "generate"
manual_traits = ["VideoOverlayExtManual"]
[[object.function]]
name = "set_render_rectangle"
[object.function.return]
bool_return_is_error = "Failed to set render rectangle"
[[object]]
name = "GstVideo.VideoDecoder"
status = "generate"
manual_traits = ["VideoDecoderExtManual"]
[[object.function]]
name = "allocate_output_frame"
ignore = true
[[object.function]]
name = "allocate_output_frame_with_params"
ignore = true
[[object.function]]
name = "finish_frame"
ignore = true
[[object.function]]
name = "release_frame"
ignore = true
[[object.function]]
name = "drop_frame"
ignore = true
[[object.function]]
name = "have_frame"
ignore = true
[[object.function]]
name = "set_latency"
ignore = true
[[object.function]]
name = "get_latency"
ignore = true
[[object.function]]
name = "get_frame"
ignore = true
[[object.function]]
name = "get_frames"
ignore = true
[[object.function]]
name = "get_oldest_frame"
ignore = true
[[object.function]]
name = "get_output_state"
ignore = true
[[object.function]]
name = "set_output_state"
ignore = true
[[object.function]]
name = "set_interlaced_output_state"
ignore = true
[[object.function]]
name = "negotiate"
ignore = true
[[object.function]]
name = "get_allocator"
ignore = true
[[object.function]]
name = "proxy_getcaps"
[object.function.return]
nullable = false
[[object.function]]
name = "allocate_output_buffer"
[object.function.return]
nullable_return_is_error = "Failed to allocate output buffer"
[[object]]
name = "GstVideo.VideoEncoder"
status = "generate"
manual_traits = ["VideoEncoderExtManual"]
[[object.function]]
name = "allocate_output_frame"
ignore = true
[[object.function]]
name = "allocate_output_frame_with_params"
ignore = true
[[object.function]]
name = "finish_frame"
ignore = true
[[object.function]]
name = "finish_subframe"
ignore = true
[[object.function]]
name = "set_latency"
ignore = true
[[object.function]]
name = "get_latency"
ignore = true
[[object.function]]
name = "get_frame"
ignore = true
[[object.function]]
name = "get_frames"
ignore = true
[[object.function]]
name = "get_oldest_frame"
ignore = true
[[object.function]]
name = "get_output_state"
ignore = true
[[object.function]]
name = "set_output_state"
ignore = true
[[object.function]]
name = "negotiate"
ignore = true
[[object.function]]
name = "get_allocator"
ignore = true
[[object.function]]
name = "proxy_getcaps"
[object.function.return]
nullable = false
[[object.function]]
name = "allocate_output_buffer"
[object.function.return]
nullable_return_is_error = "Failed to allocate output buffer"
[[object]]
name = "GstVideo.VideoFormat"
status = "generate"
[[object.derive]]
name = "Debug, Eq, PartialEq, Hash"
[[object.member]]
name = "p010_10be"
version = "1.10"
[[object.member]]
name = "p010_10le"
version = "1.10"
[[object.member]]
name = "iyu2"
version = "1.10"
[[object.member]]
name = "vyuy"
version = "1.12"
[[object.member]]
name = "gbra"
version = "1.12"
[[object.member]]
name = "gbra_10be"
version = "1.12"
[[object.member]]
name = "gbra_10le"
version = "1.12"
[[object.member]]
name = "gbr_12be"
version = "1.12"
[[object.member]]
name = "gbr_12le"
version = "1.12"
[[object.member]]
name = "gbra_12be"
version = "1.12"
[[object.member]]
name = "gbra_12le"
version = "1.12"
[[object.member]]
name = "i420_12be"
version = "1.12"
[[object.member]]
name = "i420_12le"
version = "1.12"
[[object.member]]
name = "i422_12be"
version = "1.12"
[[object.member]]
name = "i422_12le"
version = "1.12"
[[object.member]]
name = "y444_12be"
version = "1.12"
[[object.member]]
name = "y444_12le"
version = "1.12"
[[object.member]]
name = "gray10_le32"
version = "1.14"
[[object.member]]
name = "nv12_10le32"
version = "1.14"
[[object.member]]
name = "nv16_10le32"
version = "1.14"
[[object.member]]
name = "nv12_10le40"
version = "1.16"
[[object.member]]
name = "y210"
version = "1.16"
[[object.member]]
name = "y410"
version = "1.16"
[[object.member]]
name = "vuya"
version = "1.16"
[[object.member]]
name = "bgr10a2_le"
version = "1.16"
[[object.member]]
name = "rgb10a2_le"
version = "1.18"
[[object.member]]
name = "y444_16be"
version = "1.18"
[[object.member]]
name = "y444_16le"
version = "1.18"
[[object.member]]
name = "p016_be"
version = "1.18"
[[object.member]]
name = "p016_le"
version = "1.18"
[[object.member]]
name = "p012_be"
version = "1.18"
[[object.member]]
name = "p012_le"
version = "1.18"
[[object.member]]
name = "y212_be"
version = "1.18"
[[object.member]]
name = "y212_le"
version = "1.18"
[[object.member]]
name = "y412_be"
version = "1.18"
[[object.member]]
name = "y412_le"
version = "1.18"
[[object]]
name = "GstVideo.VideoSink"
status = "generate"
[[object.function]]
name = "center_rect"
# Implemented in video_rectangle
ignore = true
[[object]]
name = "GstVideo.VideoOverlayFormatFlags"
status = "generate"
[[object.function]]
name = "get_type"
version = "1.16"
[[object.member]]
name = "none"
ignore = true
[[object]]
name = "GstVideo.VideoTimeCodeFlags"
status = "generate"
[[object.function]]
name = "get_type"
version = "1.18"
[[object.member]]
name = "none"
ignore = true
[[object]]
name = "GstVideo.VideoFrameFlags"
status = "generate"
[[object.member]]
name = "top_field"
version = "1.16"
[[object.member]]
name = "bottom_field"
version = "1.16"
[[object]]
name = "GstVideo.VideoBufferFlags"
status = "generate"
[[object.member]]
name = "top_field"
version = "1.16"
[[object.member]]
name = "bottom_field"
version = "1.16"
[[object.member]]
name = "marker"
version = "1.18"
[[object.member]]
name = "last"
ignore = true
[[object]]
name = "GstVideo.VideoInterlaceMode"
status = "generate"
[[object.member]]
name = "alternate"
version = "1.16"
[[object]]
name = "GstVideo.VideoChromaSite"
status = "generate"
[[object.member]]
name = "unknown"
ignore = true
[[object]]
name = "GstVideo.VideoFlags"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object]]
name = "GstVideo.VideoFrameFlags"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object]]
name = "GstVideo.VideoMultiviewFlags"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object]]
name = "GstVideo.VideoPackFlags"
status = "generate"
[[object.member]]
name = "none"
ignore = true
[[object]]
name = "GstVideo.VideoTransferFunction"
status = "generate"
[[object.member]]
name = "bt2020_10"
version = "1.18"
[[object.member]]
name = "smpte2084"
version = "1.18"
[[object.member]]
name = "arib_std_b67"
version = "1.18"
[[object]]
name = "GstVideo.VideoColorPrimaries"
status = "generate"
[[object.member]]
name = "smptest428"
version = "1.16"
[[object.member]]
name = "smpterp431"
version = "1.16"
[[object.member]]
name = "smpteeg432"
version = "1.16"
[[object.member]]
name = "ebu3213"
version = "1.16"

View file

@ -1,14 +1,14 @@
[options]
girs_directories = ["../gir-files", "../gst-gir-files"]
girs_dir = "gir-files"
library = "GstWebRTC"
version = "1.0"
min_cfg_version = "1.14"
target_path = "gstreamer-webrtc"
work_mode = "normal"
concurrency = "send+sync"
generate_safety_asserts = true
single_version_file = true
generate_display_trait = false
trust_return_value_nullability = true
external_libraries = [
"GLib",
@ -18,61 +18,38 @@ external_libraries = [
]
generate = [
"GstWebRTC.WebRTCBundlePolicy",
"GstWebRTC.WebRTCDataChannelState",
"GstWebRTC.WebRTCDTLSSetup",
"GstWebRTC.WebRTCDTLSTransportState",
"GstWebRTC.WebRTCError",
"GstWebRTC.WebRTCFECType",
"GstWebRTC.WebRTCICECandidateStats",
"GstWebRTC.WebRTCICEComponent",
"GstWebRTC.WebRTCICEConnectionState",
"GstWebRTC.WebRTCICEGatheringState",
"GstWebRTC.WebRTCICEConnectionState",
"GstWebRTC.WebRTCICERole",
"GstWebRTC.WebRTCICEStream",
"GstWebRTC.WebRTCICETransportPolicy",
"GstWebRTC.WebRTCKind",
"GstWebRTC.WebRTCPeerConnectionState",
"GstWebRTC.WebRTCPriorityType",
"GstWebRTC.WebRTCRTPTransceiverDirection",
"GstWebRTC.WebRTCSCTPTransportState",
"GstWebRTC.WebRTCICEComponent",
"GstWebRTC.WebRTCSDPType",
"GstWebRTC.WebRTCDTLSSetup",
"GstWebRTC.WebRTCPeerConnectionState",
"GstWebRTC.WebRTCRTPTransceiverDirection",
"GstWebRTC.WebRTCSignalingState",
"GstWebRTC.WebRTCStatsType",
"GstWebRTC.WebRTCBundlePolicy",
"GstWebRTC.WebRTCDataChannelState",
"GstWebRTC.WebRTCICETransportPolicy",
"GstWebRTC.WebRTCPriorityType",
"GstWebRTC.WebRTCSCTPTransportState",
"GstWebRTC.WebRTCFECType",
]
manual = [
"GLib.Bytes",
"GLib.Error",
"GObject.Object",
"Gst.Caps",
"Gst.Structure",
"GstSdp.SDPMessage",
]
[[object]]
name = "GstWebRTC.WebRTCDataChannel"
status = "generate"
final_type = true
[[object.function]]
name = "on_error"
# takes ownership of SDP message
manual = true
[[object]]
name = "GstWebRTC.WebRTCDTLSTransport"
status = "generate"
final_type = true
[[object]]
name = "GstWebRTC.WebRTCICE"
status = "generate"
[[object.function]]
name = "add_candidate"
# ABI breakage in 1.24 needs working around
manual = true
[[object]]
name = "GstWebRTC.WebRTCICETransport"
status = "generate"
@ -93,11 +70,6 @@ name = "GstWebRTC.WebRTCRTPTransceiver"
status = "generate"
final_type = true
[[object]]
name = "GstWebRTC.WebRTCSCTPTransport"
status = "generate"
version = "1.20"
[[object]]
name = "GstWebRTC.WebRTCSessionDescription"
status = "generate"
@ -107,3 +79,13 @@ final_type = true
name = "new"
# takes ownership of SDP message
ignore = true
[[object]]
name = "GstWebRTC.WebRTCDataChannel"
status = "generate"
final_type = true
[[object.function]]
name = "on_error"
# takes ownership of SDP message
ignore = true

118
README.md
View file

@ -1,7 +1,7 @@
# gstreamer-rs [![crates.io](https://img.shields.io/crates/v/gstreamer.svg)](https://crates.io/crates/gstreamer) [![pipeline status](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/badges/main/pipeline.svg)](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/commits/main)
# gstreamer-rs [![crates.io](https://img.shields.io/crates/v/gstreamer.svg)](https://crates.io/crates/gstreamer) [![pipeline status](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/badges/master/pipeline.svg)](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/commits/master)
[GStreamer](https://gstreamer.freedesktop.org/) bindings for Rust.
Documentation can be found [here](https://gstreamer.pages.freedesktop.org/gstreamer-rs/stable/latest/docs/gstreamer/).
Documentation can be found [here](https://slomo.pages.freedesktop.org/rustdocs/gstreamer/gstreamer/).
These bindings are providing a safe API that can be used to interface with
GStreamer, e.g. for writing GStreamer-based applications and GStreamer plugins.
@ -24,7 +24,7 @@ API metadata provided by the GStreamer project.
## Installation
To build the GStreamer bindings or anything depending on them, you need to
have at least GStreamer 1.14 and gst-plugins-base 1.14 installed. In addition,
have at least GStreamer 1.8 and gst-plugins-base 1.8 installed. In addition,
some of the examples/tutorials require various GStreamer plugins to be
available, which can be found in gst-plugins-base, gst-plugins-good,
gst-plugins-bad, gst-plugins-ugly and/or gst-libav.
@ -38,20 +38,23 @@ package manager, or build them from source.
On Debian/Ubuntu they can be installed with
```console
```
$ apt-get install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \
gstreamer1.0-plugins-base gstreamer1.0-plugins-good \
gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly \
gstreamer1.0-libav libgstrtspserver-1.0-dev libges-1.0-dev
```
The minimum required version of the above libraries is >= 1.14. If you
The minimum required version of the above libraries is >= 1.8. If you
build the gstreamer-player sub-crate, or any of the examples that
depend on gstreamer-player, you must ensure that in addition to the above
packages, `libgstreamer-plugins-bad1.0-dev` is installed. See the `Cargo.toml`
files for the full details,
depend on gstreamer-player, you must ensure that in addition to the
above packages, `libgstreamer-plugins-bad1.0-dev` is installed and
that the version is >= 1.12. See the `Cargo.toml` files for the full
details,
```console
```
# Only if you wish to install gstreamer-player, make sure the version
# of this package is >= 1.12.
$ apt-get install libgstreamer-plugins-bad1.0-dev
```
@ -66,41 +69,31 @@ You can install GStreamer and the plugins via [Homebrew](https://brew.sh/) or
by installing the [binaries](https://gstreamer.freedesktop.org/data/pkg/osx/)
provided by the GStreamer project.
We recommend using the official GStreamer binaries over Homebrew, especially
as GStreamer in Homebrew is [currently broken](https://github.com/orgs/Homebrew/discussions/3740#discussioncomment-3804964).
#### Homebrew
```
$ brew install gstreamer gst-plugins-base gst-plugins-good \
gst-plugins-bad gst-plugins-ugly gst-libav gst-rtsp-server \
gst-editing-services
```
If you wish to install the gstreamer-player sub-crate, make sure the
version of these libraries is >= 1.12. Otherwise, a version >= 1.8 is
sufficient.
#### GStreamer Binaries
You need to download the *two* `.pkg` files from the GStreamer website and
install them, e.g. `gstreamer-1.0-1.20.4-universal.pkg` and
`gstreamer-1.0-devel-1.20.4-universal.pkg`.
install them, e.g. `gstreamer-1.0-1.12.3-x86_64.pkg` and
`gstreamer-1.0-devel-1.12.3-x86_64.pkg`.
After installation, you also need to set the `PATH` environment variable as
follows
After installation, you also need to install `pkg-config` (e.g. via Homebrew)
and set the `PKG_CONFIG_PATH` environment variable
```console
$ export PATH="/Library/Frameworks/GStreamer.framework/Versions/1.0/bin${PATH:+:$PATH}"
```
Also note that the `pkg-config` from GStreamer should be the first one in
the `PATH` as other versions have all kinds of quirks that will cause
problems.
#### Homebrew
Homebrew only installs various plugins if explicitly enabled, so some extra
`--with-*` flags may be required.
```console
$ brew install gstreamer gst-plugins-base gst-plugins-good \
gst-plugins-bad gst-plugins-ugly gst-libav gst-rtsp-server \
gst-editing-services --with-orc --with-libogg --with-opus \
--with-pango --with-theora --with-libvorbis --with-libvpx \
--enable-gtk3
$ export PKG_CONFIG_PATH="/Library/Frameworks/GStreamer.framework/Versions/Current/lib/pkgconfig${PKG_CONFIG_PATH:+:$PKG_CONFIG_PATH}"
```
Make sure the version of these libraries is >= 1.14.
<a name="installation-windows"/>
### Windows
@ -110,55 +103,44 @@ with `pacman` or by installing the
[binaries](https://gstreamer.freedesktop.org/data/pkg/windows/) provided by
the GStreamer project.
We recommend using the official GStreamer binaries over MSYS2.
#### GStreamer Binaries
You need to download the *two* `.msi` files for your platform from the
GStreamer website and install them, e.g. `gstreamer-1.0-x86_64-1.20.4.msi` and
`gstreamer-1.0-devel-x86_64-1.20.4.msi`. Make sure to select the version that
matches your Rust toolchain, i.e. MinGW or MSVC.
After installation set the ``PATH` environment variable as follows:
```console
# For a UNIX-style shell:
$ export PATH="c:/gstreamer/1.0/msvc_x86_64/bin${PATH:+:$PATH}"
# For cmd.exe:
$ set PATH=C:\gstreamer\1.0\msvc_x86_64\bin;%PATH%
```
Make sure to update the path to where you have actually installed GStreamer
and for the corresponding toolchain.
Also note that the `pkg-config.exe` from GStreamer should be the first one in
the `PATH` as other versions have all kinds of quirks that will cause
problems.
#### MSYS2 / pacman
```console
$ pacman -S glib2-devel pkg-config \
mingw-w64-x86_64-gstreamer mingw-w64-x86_64-gst-plugins-base \
```
$ pacman -S pkg-config mingw-w64-x86_64-gstreamer mingw-w64-x86_64-gst-plugins-base \
mingw-w64-x86_64-gst-plugins-good mingw-w64-x86_64-gst-plugins-bad \
mingw-w64-x86_64-gst-plugins-ugly mingw-w64-x86_64-gst-libav \
mingw-w64-x86_64-gst-rtsp-server
```
Make sure the version of these libraries is >= 1.14.
If you wish to install the gstreamer-player sub-crate, make sure the
version of these libraries is >= 1.12. Otherwise, a version >= 1.8 is
sufficient.
Note that the version of `pkg-config` included in `MSYS2` is
[known to have problems](https://github.com/rust-lang/pkg-config-rs/issues/51#issuecomment-346300858)
compiling GStreamer, so you may need to install another version. One option
would be [`pkg-config-lite`](https://sourceforge.net/projects/pkgconfiglite/).
#### GStreamer Binaries
You need to download the *two* `.msi` files for your platform from the
GStreamer website and install them, e.g. `gstreamer-1.0-x86_64-1.12.3.msi` and
`gstreamer-1.0-devel-x86_64-1.12.3.msi`.
After installation, you also need to install `pkg-config` (e.g. via MSYS2 or
from [here](https://sourceforge.net/projects/pkgconfiglite/))
and set the `PKG_CONFIG_PATH` environment variable
```
$ export PKG_CONFIG_PATH="c:\\gstreamer\\1.0\\x86_64\\lib\\pkgconfig${PKG_CONFIG_PATH:+:$PKG_CONFIG_PATH}"
```
<a name="getting-started"/>
## Getting Started
The API reference can be found
[here](https://gstreamer.pages.freedesktop.org/gstreamer-rs/stable/latest/docs/gstreamer/), however it is
[here](https://slomo.pages.freedesktop.org/rustdocs/gstreamer/gstreamer/), however it is
only the Rust API reference and does not explain any of the concepts.
For getting started with GStreamer development, the best would be to follow
@ -174,12 +156,12 @@ In addition there are
[tutorials](https://gstreamer.freedesktop.org/documentation/tutorials/) on the
GStreamer website. Many of them were ported to Rust already and the code can
be found in the
[tutorials](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/main/tutorials)
[tutorials](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/master/tutorials)
directory.
Some further examples for various aspects of GStreamer and how to use it from
Rust can be found in the
[examples](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/main/examples)
[examples](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/master/examples)
directory.
Various GStreamer plugins written in Rust can be found in the

View file

@ -1,42 +0,0 @@
from itertools import chain
import os
import sys
from pathlib import Path as P
from subprocess import check_call as exec
NATIVE_CRATES = ["gstreamer-utils"]
def git(*args):
exec(["git"] + list(args))
def check_no_git_diff():
git("diff", "--exit-code")
check_no_git_diff()
git("clone", "--depth", "1", "https://github.com/gtk-rs/checker")
check_no_git_diff()
rootdir = P(".")
checker_dir = P("checker")
with (checker_dir / "Cargo.toml").open("a") as f:
f.write("[workspace]\n")
check_no_git_diff()
exec(['cargo', 'build', '--locked', '--color=always', '--release'], cwd=checker_dir)
check_no_git_diff()
exec('cargo run --color=always --release -- ../gstreamer* ../gstreamer-gl/{egl,wayland,x11}', cwd=checker_dir, shell=True)
gl_dir = rootdir / 'gstreamer-gl'
for crate in chain(rootdir.glob('gstreamer*'), [gl_dir / 'egl', gl_dir / 'wayland', gl_dir / 'x11']):
# Ignore "native" crates
if crate.name in NATIVE_CRATES:
continue
print(f'--> Checking doc aliases in {crate.absolute()}')
exec(['python3', 'doc_aliases.py', crate.absolute()], cwd=checker_dir)
print(f'--> {crate.absolute()}')
exec(['./checker/check_init_asserts', crate.absolute()])
check_no_git_diff()

View file

@ -1,8 +1,2 @@
variables:
GST_RS_IMG_TAG: "2024-09-12.1"
GST_RS_IMG_WINDOWS_TAG: "2024-09-12.1"
GST_RS_STABLE: "1.81.0"
GST_RS_MSRV: "1.71.1"
# The branch we use to build GStreamer from in the docker images
# Ex. main, 1.24, my-test-branch
GST_UPSTREAM_BRANCH: 'main'
GST_RS_IMG_TAG: '2020-07-05.0'

View file

@ -1,11 +0,0 @@
set -e
RELEASE=1.4.3
git clone https://code.videolan.org/videolan/dav1d.git --branch $RELEASE
cd dav1d
meson build -D prefix=/usr/local
ninja -C build
ninja -C build install
cd ..
rm -rf dav1d

View file

@ -1,50 +1,8 @@
#! /bin/bash
pip3 install meson==0.54.3
set -e
git clone --depth 1 https://gitlab.freedesktop.org/gstreamer/gst-build.git --branch master
cd gst-build
DEFAULT_BRANCH="$GST_UPSTREAM_BRANCH"
pip3 install meson==1.5.1 --break-system-packages
# gstreamer-rs already has a 'gstreamer' directory so don't clone there
pushd .
cd ..
git clone https://gitlab.freedesktop.org/gstreamer/gstreamer.git \
--depth 1 \
--branch "$DEFAULT_BRANCH"
cd gstreamer
# plugins required by tests
PLUGINS=(
-Dgst-plugins-base:ogg=enabled
-Dgst-plugins-base:vorbis=enabled
-Dgst-plugins-base:theora=enabled
-Dgst-plugins-good:matroska=enabled
-Dgst-plugins-good:vpx=enabled
-Dgst-plugins-bad:opus=enabled
-Dgst-plugins-ugly:x264=enabled
)
meson setup build \
-Dprefix=/usr/local \
-Dgpl=enabled \
-Dugly=enabled \
-Dexamples=disabled \
-Dgtk_doc=disabled \
-Dintrospection=disabled \
-Dlibav=disabled \
-Dpython=disabled \
-Dvaapi=disabled \
"${PLUGINS[@]}" "$@"
meson compile -C build
meson install -C build
ldconfig
cd ..
rm -rf gstreamer/
# Check what plugins we installed
gst-inspect-1.0
popd
meson build -D prefix=/usr/local -D devtools=disabled -D examples=disabled -D gtk_doc=disabled -D introspection=disabled -D libav=disabled -D libnice=disabled -D python=disabled -D ugly=disabled -D vaapi=disabled
ninja -C build
ninja -C build install

View file

@ -1,13 +1,8 @@
#! /bin/bash
source ./ci/env.sh
set -e
export CARGO_HOME='/usr/local/cargo'
RUSTUP_VERSION=1.27.1
RUSTUP_VERSION=1.21.1
RUST_VERSION=$1
RUST_IMAGE_FULL=$2
RUST_ARCH="x86_64-unknown-linux-gnu"
RUSTUP_URL=https://static.rust-lang.org/rustup/archive/$RUSTUP_VERSION/$RUST_ARCH/rustup-init
@ -22,38 +17,9 @@ rustup --version
cargo --version
rustc --version
if [ "$RUST_IMAGE_FULL" = "1" ]; then
if [ "$RUST_VERSION" = "stable" ]; then
rustup component add clippy-preview
rustup component add rustfmt
cargo install --locked cargo-deny
if [ "$RUST_VERSION" = "1.71.1" ]; then
cargo install --locked cargo-outdated
else
# Don't use --locked because time-0.3.30 does not build with 1.80 or newer
cargo install cargo-outdated
fi
cargo install --locked typos-cli --version "1.19.0"
# Coverage tools
rustup component add llvm-tools-preview
if [ "$RUST_VERSION" = "1.71.1" ]; then
cargo install --locked grcov
else
# Don't use --locked because time-0.3.30 does not build with 1.80 or newer
cargo install grcov
fi
fi
if [ "$RUST_VERSION" = "1.71.1" ]; then
cargo install --locked cargo-c --version 0.9.26+cargo-0.74
else
cargo install --locked cargo-c --version 0.10.3+cargo-0.81
fi
if [ "$RUST_VERSION" = "nightly" ]; then
rustup component add rustfmt --toolchain nightly
# Documentation tools
cargo install --locked rustdoc-stripper
cargo install --force cargo-deny
cargo install --force --git https://github.com/kbknapp/cargo-outdated
fi

View file

@ -1,33 +0,0 @@
#! /bin/bash
set -ex
rustc --version
cargo --version
cpus=$(nproc || sysctl -n hw.ncpu)
CARGO_FLAGS="-j${FDO_CI_CONCURRENT:-$cpus}"
for crate in gstreamer* gstreamer-gl/{egl,wayland,x11}; do
if [ -e "$crate/Cargo.toml" ]; then
if [ -n "$ALL_FEATURES" ]; then
FEATURES="--all-features"
else
FEATURES=""
fi
echo "Building and testing $crate with $FEATURES"
cargo build $CARGO_FLAGS --locked --color=always --manifest-path "$crate/Cargo.toml" $FEATURES
RUST_BACKTRACE=1 G_DEBUG=fatal_warnings cargo test $CARGO_FLAGS --color=always --manifest-path "$crate/Cargo.toml" $FEATURES
fi
done
if [ -n "$EXAMPLES_TUTORIALS" ]; then
# Keep in sync with examples/Cargo.toml
# List all features except windows/win32
EXAMPLES_FEATURES="--features=rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gl,gst-gl-x11,gst-gl-egl,allocators,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18"
cargo build $CARGO_FLAGS --locked --color=always --manifest-path examples/Cargo.toml --bins --examples "$EXAMPLES_FEATURES"
cargo build $CARGO_FLAGS --locked --color=always --manifest-path tutorials/Cargo.toml --bins --examples --all-features
fi

View file

@ -1,38 +0,0 @@
#! /bin/bash
set -ex
rustc --version
cargo --version
cargo clippy --version
# Keep features in sync with run-cargo-test.sh
get_features() {
crate=$1
case "$crate" in
gstreamer-audio|gstreamer-editing-services|gstreamer-gl|gstreamer-pbutils|gstreamer-rtp|gstreamer-rtsp|gstreamer-video|gstreamer)
echo "--features=serde,v1_26"
;;
*)
echo "--features=v1_26"
;;
esac
}
for crate in gstreamer* gstreamer-gl/{egl,wayland,x11}; do
if [ -e "$crate/Cargo.toml" ]; then
FEATURES=$(get_features "$crate")
echo "Running clippy on $crate with $FEATURES"
cargo clippy --locked --color=always --manifest-path "$crate/Cargo.toml" $FEATURES --all-targets -- $CLIPPY_LINTS
fi
done
# Keep in sync with examples/Cargo.toml
# List all features except windows/win32
EXAMPLES_FEATURES="--features=rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gl,gst-gl-x11,gst-gl-egl,allocators,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18"
# And also run over all the examples/tutorials
cargo clippy --locked --color=always --manifest-path examples/Cargo.toml --all-targets "$EXAMPLES_FEATURES" -- $CLIPPY_LINTS
cargo clippy --locked --color=always --manifest-path tutorials/Cargo.toml --all-targets --all-features -- $CLIPPY_LINTS

View file

@ -1,46 +0,0 @@
#! /bin/bash
set -ex
rustc --version
cargo --version
cpus=$(nproc || sysctl -n hw.ncpu)
CARGO_FLAGS="-j${FDO_CI_CONCURRENT:-$cpus}"
for crate in gstreamer*/sys gstreamer-gl/*/sys; do
if [ -e "$crate/Cargo.toml" ]; then
echo "Building $crate with --all-features"
cargo build $CARGO_FLAGS --locked --color=always --manifest-path "$crate/Cargo.toml" --all-features
fi
done
for crate in gstreamer/sys \
gstreamer-allocators/sys \
gstreamer-analytics/sys \
gstreamer-app/sys \
gstreamer-audio/sys \
gstreamer-base/sys \
gstreamer-check/sys \
gstreamer-controller/sys \
gstreamer-editing-services/sys \
gstreamer-gl/sys \
gstreamer-gl/egl/sys \
gstreamer-gl/wayland/sys \
gstreamer-gl/x11/sys \
gstreamer-mpegts/sys \
gstreamer-net/sys \
gstreamer-pbutils/sys \
gstreamer-play/sys \
gstreamer-player/sys \
gstreamer-rtp/sys \
gstreamer-rtsp-server/sys \
gstreamer-rtsp/sys \
gstreamer-sdp/sys \
gstreamer-tag/sys \
gstreamer-validate/sys \
gstreamer-video/sys \
gstreamer-webrtc/sys; do
echo "Testing $crate with --all-features)"
RUST_BACKTRACE=1 cargo test $CARGO_FLAGS --locked --color=always --manifest-path $crate/Cargo.toml --all-features
done

View file

@ -1,87 +0,0 @@
# List of all the crates we want to build
# We need to do this manually to avoid trying
# to build egl,wayland,x11 etc, which can't
# work on windows
[string[]] $crates = @(
'gstreamer',
# Unix specific atm
# 'gstreamer-allocators'
'gstreamer-app',
'gstreamer-audio',
'gstreamer-base',
'gstreamer-check',
'gstreamer-controller',
'gstreamer-editing-services',
'gstreamer-gl',
# 'gstreamer-gl/egl',
# 'gstreamer-gl/wayland',
# 'gstreamer-gl/x11',
'gstreamer-mpegts',
'gstreamer-mpegts/sys',
'gstreamer-net',
'gstreamer-pbutils',
'gstreamer-player',
'gstreamer-rtp',
'gstreamer-rtsp',
'gstreamer-rtsp-server',
'gstreamer-sdp',
'gstreamer-tag',
'gstreamer-tag/sys',
'gstreamer-video',
'gstreamer-webrtc',
'tutorials',
'examples'
)
# "" is the default build, no flags appended
[string[]] $features_matrix = @(
# "--no-default-features",
# "--features=v1_18",
# "--features=v1_20",
"",
"--all-features"
)
foreach($features in $features_matrix) {
foreach($crate in $crates)
{
Write-Host "Building crate: $crate"
Write-Host "Features: $features"
$env:LocalFeatures = $features
# Don't append feature flags if the string is null/empty
# Or when we want to build without default features
if ($env:LocalFeatures -and ($env:LocalFeatures -ne '--no-default-features')) {
if ($crate -eq 'examples') {
# FIXME: We can do --all-features for examples once we have gtk3 installed in the image
$env:LocalFeatures = "--features=rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18,windows,gl"
}
if ($crate -eq 'tutorials') {
$env:LocalFeatures = ''
}
}
Write-Host "with features: $env:LocalFeatures"
cargo build --color=always --manifest-path $crate/Cargo.toml --all-targets $env:LocalFeatures
if (!$?) {
Write-Host "Failed to build crate: $crate"
Exit 1
}
if (($crate -eq "gstreamer-tag/sys") -or ($crate -eq "gstreamer-mpegts/sys")) {
Write-Host "Skipping tests for $crate"
continue
}
$env:G_DEBUG="fatal_warnings"
$env:RUST_BACKTRACE="1"
cargo test --no-fail-fast --color=always --manifest-path $crate/Cargo.toml $env:LocalFeatures
if (!$?) {
Write-Host "Tests failed to for crate: $crate"
Exit 1
}
}
}

View file

@ -1,23 +0,0 @@
# escape=`
FROM "registry.freedesktop.org/gstreamer/gstreamer/amd64/windows:2023-07-17.0-main"
SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"]
# These arguments are always required to be specified with --build-arg
# when building the image.
# See DOCKER_BUILD_ARGS in .gitlab-ci.yml for an example
ARG DEFAULT_BRANCH="invalid"
ARG RUST_VERSION="invalid"
RUN choco install -y pkgconfiglite nasm llvm openssl
# https://stackoverflow.com/a/50716450
RUN setx PATH '%PATH%;C:\Program Files\NASM;C:\gst-install\bin'
ENV PKG_CONFIG_PATH="C:\gst-install\lib\pkgconfig"
COPY install_gst.ps1 install_dav1d.ps1 install_rust.ps1 install_cargo_utils.ps1 C:\
RUN C:\install_gst.ps1
RUN C:\install_dav1d.ps1
RUN C:\install_rust.ps1
RUN C:\install_cargo_utils.ps1

View file

@ -1,60 +0,0 @@
# Copied from mesa, big kudos
#
# https://gitlab.freedesktop.org/mesa/mesa/-/blob/master/.gitlab-ci/windows/mesa_container.ps1
# https://gitlab.freedesktop.org/mesa/mesa/-/blob/34e3e164936d1d3cef267da7780e87f062fedf39/.gitlab-ci/windows/mesa_container.ps1
# Implements the equivalent of ci-templates container-ifnot-exists, using
# Docker directly as we don't have buildah/podman/skopeo available under
# Windows, nor can we execute Docker-in-Docker
$registry_uri = $args[0]
$registry_username = $args[1]
$registry_password = $args[2]
$registry_user_image = $args[3]
$registry_central_image = $args[4]
$dockerfile = $args[5]
docker --config "windows-docker.conf" login -u "$registry_username" -p "$registry_password" "$registry_uri"
if (!$?) {
Write-Host "docker login failed to $registry_uri"
Exit 1
}
# if the image already exists, don't rebuild it
docker --config "windows-docker.conf" pull "$registry_user_image"
if ($?) {
Write-Host "User image $registry_user_image already exists; not rebuilding"
docker --config "windows-docker.conf" logout "$registry_uri"
Exit 0
}
# if the image already exists upstream, copy it
docker --config "windows-docker.conf" pull "$registry_central_image"
if ($?) {
Write-Host "Copying central image $registry_central_image to user image $registry_user_image"
docker --config "windows-docker.conf" tag "$registry_central_image" "$registry_user_image"
docker --config "windows-docker.conf" push "$registry_user_image"
$pushstatus = $?
docker --config "windows-docker.conf" logout "$registry_uri"
if (!$pushstatus) {
Write-Host "Pushing image to $registry_user_image failed"
Exit 1
}
Exit 0
}
Write-Host "No image found at $registry_user_image or $registry_central_image; rebuilding"
docker --config "windows-docker.conf" build $DOCKER_BUILD_ARGS --no-cache -t "$registry_user_image" -f "$dockerfile" "./ci/windows-docker"
if (!$?) {
Write-Host "Container build failed"
docker --config "windows-docker.conf" logout "$registry_uri"
Exit 1
}
Get-Date
docker --config "windows-docker.conf" push "$registry_user_image"
$pushstatus = $?
docker --config "windows-docker.conf" logout "$registry_uri"
if (!$pushstatus) {
Write-Host "Pushing image to $registry_user_image failed"
Exit 1
}

View file

@ -1,18 +0,0 @@
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;
rustup --version
rustc --version
cargo --version
if ("$env:RUST_VERSION" -eq "1.71.1") {
cargo install --locked cargo-c --version 0.9.26+cargo-0.74
} else {
cargo install --locked cargo-c --version 0.10.3+cargo-0.81
}
if (!$?) {
Write-Host "Failed to install cargo-c"
Exit 1
}
cargo-cbuild --version

View file

@ -1,28 +0,0 @@
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;
# Download gstreamer and all its subprojects
git clone -b 1.4.3 --depth 1 https://code.videolan.org/videolan/dav1d.git C:\dav1d
if (!$?) {
Write-Host "Failed to clone dav1d"
Exit 1
}
Set-Location C:\dav1d
# This is fine, we are not going to use the GtkMedia* apis
$env:MESON_ARGS = "--prefix=C:\gst-install\"
Write-Output "Building dav1d"
cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && meson _build $env:MESON_ARGS && meson compile -C _build && ninja -C _build install"
if (!$?) {
Write-Host "Failed to build and install dav1d"
Exit 1
}
cd C:\
cmd /c rmdir /s /q C:\dav1d
if (!$?) {
Write-Host "Failed to remove dav1d checkout"
Exit 1
}

View file

@ -1,69 +0,0 @@
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;
# Download gstreamer and all its subprojects
git clone -b $env:DEFAULT_BRANCH --depth 1 https://gitlab.freedesktop.org/gstreamer/gstreamer.git C:\gstreamer
if (!$?) {
Write-Host "Failed to clone gstreamer"
Exit 1
}
Set-Location C:\gstreamer
# Copy the cache we already have in the image to avoid massive redownloads
Move-Item C:/subprojects/* C:\gstreamer\subprojects
# Update the subprojects cache
Write-Output "Running meson subproject reset"
meson subprojects update --reset
if (!$?) {
Write-Host "Failed to update gstreamer subprojects"
Exit 1
}
$MESON_ARGS = @(`
"--prefix=C:\gst-install", `
"-Dglib:installed_tests=false", `
"-Dlibnice:tests=disabled", `
"-Dlibnice:examples=disabled", `
"-Dffmpeg:tests=disabled", `
"-Dopenh264:tests=disabled", `
"-Dpygobject:tests=false", `
"-Dgpl=enabled", `
"-Dugly=enabled", `
"-Dbad=enabled", `
"-Dges=enabled", `
"-Drtsp_server=enabled", `
"-Ddevtools=enabled", `
"-Dsharp=disabled", `
"-Dpython=disabled", `
"-Dlibav=disabled", `
"-Dvaapi=disabled", `
"-Dgtk=enabled", `
"-Dgst-plugins-base:pango=enabled", `
"-Dgst-plugins-good:cairo=enabled", `
"-Dgst-plugins-good:lame=disabled"
)
Write-Output "Building gstreamer"
meson setup --vsenv $MESON_ARGS _build
if (!$?) {
type "_build\meson-logs\meson-log.txt"
Write-Host "Failed to run meson setup, see log above"
Exit 1
}
Write-Output "Compiling gstreamer"
meson compile -C _build
if (!$?) {
Write-Host "Failed to run meson compile"
Exit 1
}
# meson install does a spurious rebuild sometimes that then fails
meson install --no-rebuild -C _build
if (!$?) {
Write-Host "Failed to run meson install"
Exit 1
}
cd c:\
Remove-Item -LiteralPath "C:\gstreamer" -Force -Recurse

View file

@ -1,17 +0,0 @@
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;
$rustup_url = 'https://win.rustup.rs/x86_64'
Invoke-WebRequest -Uri $rustup_url -OutFile C:\rustup-init.exe
if (!$?) {
Write-Host "Failed to download rustup"
Exit 1
}
C:\rustup-init.exe -y --profile minimal --default-toolchain $env:RUST_VERSION
if (!$?) {
Write-Host "Failed to install rust"
Exit 1
}

View file

@ -1,33 +1,69 @@
[graph]
exclude = [
"examples",
"tutorials",
]
[advisories]
version = 2
db-path = "~/.cargo/advisory-db"
db-urls = ["https://github.com/rustsec/advisory-db"]
db-url = "https://github.com/rustsec/advisory-db"
vulnerability = "deny"
unmaintained = "warn"
notice = "warn"
ignore = []
[licenses]
version = 2
confidence-threshold = 0.8
unlicensed = "deny"
allow = [
"MIT",
"Apache-2.0",
"Apache-2.0 WITH LLVM-exception",
"Unicode-DFS-2016",
"Apache-2.0",
]
deny = [
"GPL-1.0",
"GPL-2.0",
"GPL-3.0",
"AGPL-1.0",
"AGPL-3.0",
]
copyleft = "deny"
allow-osi-fsf-free = "either"
confidence-threshold = 0.8
[[licenses.exceptions]]
allow = ["LGPL-2.0"]
name = "gstreamer-rs-lgpl-docs"
[bans]
multiple-versions = "deny"
wildcards = "allow"
highlight = "all"
# ignore duplicated deps because of outdated glutin
# https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/-/merge_requests/409
[[bans.skip]]
name = "unicode-xid"
version = "0.1.0"
[[bans.skip]]
name = "rusttype"
version = "0.7.9"
[[bans.skip]]
name = "quote"
version = "0.6.13"
[[bans.skip]]
name = "proc-macro2"
version = "0.4.30"
[[bans.skip]]
name = "gl_generator"
version = "0.13.1"
[[bans.skip]]
name = "libloading"
version = "0.5.2"
[sources]
unknown-registry = "deny"
unknown-git = "deny"
allow-git = [
"https://github.com/gtk-rs/gtk-rs-core",
"https://gitlab.freedesktop.org/gstreamer/gstreamer-rs-sys",
"https://github.com/gtk-rs/sys",
"https://github.com/gtk-rs/glib",
"https://github.com/gtk-rs/gio",
"https://github.com/gtk-rs/cairo",
"https://github.com/gtk-rs/pango",
"https://github.com/gtk-rs/pangocairo",
"https://github.com/gtk-rs/atk",
"https://github.com/gtk-rs/gdk-pixbuf",
"https://github.com/gtk-rs/gdk",
"https://github.com/gtk-rs/gtk",
]

14
docs/Cargo.toml Normal file
View file

@ -0,0 +1,14 @@
[package]
name = "gstreamer-rs-lgpl-docs"
version = "0.16.0"
authors = ["Sebastian Dröge <sebastian@centricular.com>"]
license = "LGPL-2.0"
description = "LGPL-licensed docs for gstreamer-rs crates"
repository = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs"
homepage = "https://gstreamer.freedesktop.org"
[lib]
name = "lgpl_docs"
[dependencies]
rustdoc-stripper = "0.1.6"

688
docs/gstreamer-app/docs.md Normal file
View file

@ -0,0 +1,688 @@
<!-- file * -->
<!-- struct AppSink -->
Appsink is a sink plugin that supports many different methods for making
the application get a handle on the GStreamer data in a pipeline. Unlike
most GStreamer elements, Appsink provides external API functions.
appsink can be used by linking to the gstappsink.h header file to access the
methods or by using the appsink action signals and properties.
The normal way of retrieving samples from appsink is by using the
`AppSink::pull_sample` and `AppSink::pull_preroll` methods.
These methods block until a sample becomes available in the sink or when the
sink is shut down or reaches EOS. There are also timed variants of these
methods, `AppSink::try_pull_sample` and `AppSink::try_pull_preroll`,
which accept a timeout parameter to limit the amount of time to wait.
Appsink will internally use a queue to collect buffers from the streaming
thread. If the application is not pulling samples fast enough, this queue
will consume a lot of memory over time. The "max-buffers" property can be
used to limit the queue size. The "drop" property controls whether the
streaming thread blocks or if older buffers are dropped when the maximum
queue size is reached. Note that blocking the streaming thread can negatively
affect real-time performance and should be avoided.
If a blocking behaviour is not desirable, setting the "emit-signals" property
to `true` will make appsink emit the "new-sample" and "new-preroll" signals
when a sample can be pulled without blocking.
The "caps" property on appsink can be used to control the formats that
appsink can receive. This property can contain non-fixed caps, the format of
the pulled samples can be obtained by getting the sample caps.
If one of the pull-preroll or pull-sample methods return `None`, the appsink
is stopped or in the EOS state. You can check for the EOS state with the
"eos" property or with the `AppSink::is_eos` method.
The eos signal can also be used to be informed when the EOS state is reached
to avoid polling.
# Implements
[`gst_base::BaseSinkExt`](../gst_base/trait.BaseSinkExt.html), [`gst::ElementExt`](../gst/trait.ElementExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`gst::URIHandlerExt`](../gst/trait.URIHandlerExt.html)
<!-- impl AppSink::fn get_buffer_list_support -->
Check if `self` supports buffer lists.
Feature: `v1_12`
# Returns
`true` if `self` supports buffer lists.
<!-- impl AppSink::fn get_caps -->
Get the configured caps on `self`.
# Returns
the `gst::Caps` accepted by the sink. `gst::Caps::unref` after usage.
<!-- impl AppSink::fn get_drop -->
Check if `self` will drop old buffers when the maximum amount of queued
buffers is reached.
# Returns
`true` if `self` is dropping old buffers when the queue is
filled.
<!-- impl AppSink::fn get_emit_signals -->
Check if appsink will emit the "new-preroll" and "new-sample" signals.
# Returns
`true` if `self` is emitting the "new-preroll" and "new-sample"
signals.
<!-- impl AppSink::fn get_max_buffers -->
Get the maximum amount of buffers that can be queued in `self`.
# Returns
The maximum amount of buffers that can be queued.
<!-- impl AppSink::fn get_wait_on_eos -->
Check if `self` will wait for all buffers to be consumed when an EOS is
received.
# Returns
`true` if `self` will wait for all buffers to be consumed when an
EOS is received.
<!-- impl AppSink::fn is_eos -->
Check if `self` is EOS, which is when no more samples can be pulled because
an EOS event was received.
This function also returns `true` when the appsink is not in the PAUSED or
PLAYING state.
# Returns
`true` if no more samples can be pulled and the appsink is EOS.
<!-- impl AppSink::fn pull_preroll -->
Get the last preroll sample in `self`. This was the sample that caused the
appsink to preroll in the PAUSED state.
This function is typically used when dealing with a pipeline in the PAUSED
state. Calling this function after doing a seek will give the sample right
after the seek position.
Calling this function will clear the internal reference to the preroll
buffer.
Note that the preroll sample will also be returned as the first sample
when calling `AppSink::pull_sample`.
If an EOS event was received before any buffers, this function returns
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
This function blocks until a preroll sample or EOS is received or the appsink
element is set to the READY/NULL state.
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS.
Call `gst::Sample::unref` after usage.
<!-- impl AppSink::fn pull_sample -->
This function blocks until a sample or EOS becomes available or the appsink
element is set to the READY/NULL state.
This function will only return samples when the appsink is in the PLAYING
state. All rendered buffers will be put in a queue so that the application
can pull samples at its own rate. Note that when the application does not
pull samples fast enough, the queued buffers could consume a lot of memory,
especially when dealing with raw video frames.
If an EOS event was received before any buffers, this function returns
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS.
Call `gst::Sample::unref` after usage.
<!-- impl AppSink::fn set_buffer_list_support -->
Instruct `self` to enable or disable buffer list support.
For backwards-compatibility reasons applications need to opt in
to indicate that they will be able to handle buffer lists.
Feature: `v1_12`
## `enable_lists`
enable or disable buffer list support
<!-- impl AppSink::fn set_callbacks -->
Set callbacks which will be executed for each new preroll, new sample and eos.
This is an alternative to using the signals, it has lower overhead and is thus
less expensive, but also less flexible.
If callbacks are installed, no signals will be emitted for performance
reasons.
Before 1.16.3 it was not possible to change the callbacks in a thread-safe
way.
## `callbacks`
the callbacks
## `user_data`
a user_data argument for the callbacks
## `notify`
a destroy notify function
<!-- impl AppSink::fn set_caps -->
Set the capabilities on the appsink element. This function takes
a copy of the caps structure. After calling this method, the sink will only
accept caps that match `caps`. If `caps` is non-fixed, or incomplete,
you must check the caps on the samples to get the actual used caps.
## `caps`
caps to set
<!-- impl AppSink::fn set_drop -->
Instruct `self` to drop old buffers when the maximum amount of queued
buffers is reached.
## `drop`
the new state
<!-- impl AppSink::fn set_emit_signals -->
Make appsink emit the "new-preroll" and "new-sample" signals. This option is
by default disabled because signal emission is expensive and unneeded when
the application prefers to operate in pull mode.
## `emit`
the new state
<!-- impl AppSink::fn set_max_buffers -->
Set the maximum amount of buffers that can be queued in `self`. After this
amount of buffers are queued in appsink, any more buffers will block upstream
elements until a sample is pulled from `self`.
## `max`
the maximum number of buffers to queue
<!-- impl AppSink::fn set_wait_on_eos -->
Instruct `self` to wait for all buffers to be consumed when an EOS is received.
## `wait`
the new state
<!-- impl AppSink::fn try_pull_preroll -->
Get the last preroll sample in `self`. This was the sample that caused the
appsink to preroll in the PAUSED state.
This function is typically used when dealing with a pipeline in the PAUSED
state. Calling this function after doing a seek will give the sample right
after the seek position.
Calling this function will clear the internal reference to the preroll
buffer.
Note that the preroll sample will also be returned as the first sample
when calling `AppSink::pull_sample`.
If an EOS event was received before any buffers or the timeout expires,
this function returns `None`. Use gst_app_sink_is_eos () to check for the EOS
condition.
This function blocks until a preroll sample or EOS is received, the appsink
element is set to the READY/NULL state, or the timeout expires.
Feature: `v1_10`
## `timeout`
the maximum amount of time to wait for the preroll sample
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
Call `gst::Sample::unref` after usage.
<!-- impl AppSink::fn try_pull_sample -->
This function blocks until a sample or EOS becomes available or the appsink
element is set to the READY/NULL state or the timeout expires.
This function will only return samples when the appsink is in the PLAYING
state. All rendered buffers will be put in a queue so that the application
can pull samples at its own rate. Note that when the application does not
pull samples fast enough, the queued buffers could consume a lot of memory,
especially when dealing with raw video frames.
If an EOS event was received before any buffers or the timeout expires,
this function returns `None`. Use gst_app_sink_is_eos () to check for the EOS
condition.
Feature: `v1_10`
## `timeout`
the maximum amount of time to wait for a sample
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
Call `gst::Sample::unref` after usage.
<!-- impl AppSink::fn connect_eos -->
Signal that the end-of-stream has been reached. This signal is emitted from
the streaming thread.
<!-- impl AppSink::fn connect_new_preroll -->
Signal that a new preroll sample is available.
This signal is emitted from the streaming thread and only when the
"emit-signals" property is `true`.
The new preroll sample can be retrieved with the "pull-preroll" action
signal or `AppSink::pull_preroll` either from this signal callback
or from any other thread.
Note that this signal is only emitted when the "emit-signals" property is
set to `true`, which it is not by default for performance reasons.
<!-- impl AppSink::fn connect_new_sample -->
Signal that a new sample is available.
This signal is emitted from the streaming thread and only when the
"emit-signals" property is `true`.
The new sample can be retrieved with the "pull-sample" action
signal or `AppSink::pull_sample` either from this signal callback
or from any other thread.
Note that this signal is only emitted when the "emit-signals" property is
set to `true`, which it is not by default for performance reasons.
<!-- impl AppSink::fn connect_pull_preroll -->
Get the last preroll sample in `appsink`. This was the sample that caused the
appsink to preroll in the PAUSED state.
This function is typically used when dealing with a pipeline in the PAUSED
state. Calling this function after doing a seek will give the sample right
after the seek position.
Calling this function will clear the internal reference to the preroll
buffer.
Note that the preroll sample will also be returned as the first sample
when calling `AppSink::pull_sample` or the "pull-sample" action signal.
If an EOS event was received before any buffers, this function returns
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
This function blocks until a preroll sample or EOS is received or the appsink
element is set to the READY/NULL state.
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS.
<!-- impl AppSink::fn connect_pull_sample -->
This function blocks until a sample or EOS becomes available or the appsink
element is set to the READY/NULL state.
This function will only return samples when the appsink is in the PLAYING
state. All rendered samples will be put in a queue so that the application
can pull samples at its own rate.
Note that when the application does not pull samples fast enough, the
queued samples could consume a lot of memory, especially when dealing with
raw video frames. It's possible to control the behaviour of the queue with
the "drop" and "max-buffers" properties.
If an EOS event was received before any buffers, this function returns
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS.
<!-- impl AppSink::fn connect_try_pull_preroll -->
Get the last preroll sample in `appsink`. This was the sample that caused the
appsink to preroll in the PAUSED state.
This function is typically used when dealing with a pipeline in the PAUSED
state. Calling this function after doing a seek will give the sample right
after the seek position.
Calling this function will clear the internal reference to the preroll
buffer.
Note that the preroll sample will also be returned as the first sample
when calling `AppSink::pull_sample` or the "pull-sample" action signal.
If an EOS event was received before any buffers or the timeout expires,
this function returns `None`. Use gst_app_sink_is_eos () to check for the EOS
condition.
This function blocks until a preroll sample or EOS is received, the appsink
element is set to the READY/NULL state, or the timeout expires.
Feature: `v1_10`
## `timeout`
the maximum amount of time to wait for the preroll sample
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
<!-- impl AppSink::fn connect_try_pull_sample -->
This function blocks until a sample or EOS becomes available or the appsink
element is set to the READY/NULL state or the timeout expires.
This function will only return samples when the appsink is in the PLAYING
state. All rendered samples will be put in a queue so that the application
can pull samples at its own rate.
Note that when the application does not pull samples fast enough, the
queued samples could consume a lot of memory, especially when dealing with
raw video frames. It's possible to control the behaviour of the queue with
the "drop" and "max-buffers" properties.
If an EOS event was received before any buffers or the timeout expires,
this function returns `None`. Use gst_app_sink_is_eos () to check
for the EOS condition.
Feature: `v1_10`
## `timeout`
the maximum amount of time to wait for a sample
# Returns
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
<!-- struct AppSrc -->
The appsrc element can be used by applications to insert data into a
GStreamer pipeline. Unlike most GStreamer elements, appsrc provides
external API functions.
appsrc can be used by linking with the libgstapp library to access the
methods directly or by using the appsrc action signals.
Before operating appsrc, the caps property must be set to fixed caps
describing the format of the data that will be pushed with appsrc. An
exception to this is when pushing buffers with unknown caps, in which case no
caps should be set. This is typically true of file-like sources that push raw
byte buffers. If you don't want to explicitly set the caps, you can use
gst_app_src_push_sample. This method gets the caps associated with the
sample and sets them on the appsrc replacing any previously set caps (if
different from sample's caps).
The main way of handing data to the appsrc element is by calling the
`AppSrc::push_buffer` method or by emitting the push-buffer action signal.
This will put the buffer onto a queue from which appsrc will read from in its
streaming thread. It is important to note that data transport will not happen
from the thread that performed the push-buffer call.
The "max-bytes" property controls how much data can be queued in appsrc
before appsrc considers the queue full. A filled internal queue will always
signal the "enough-data" signal, which signals the application that it should
stop pushing data into appsrc. The "block" property will cause appsrc to
block the push-buffer method until free data becomes available again.
When the internal queue is running out of data, the "need-data" signal is
emitted, which signals the application that it should start pushing more data
into appsrc.
In addition to the "need-data" and "enough-data" signals, appsrc can emit the
"seek-data" signal when the "stream-mode" property is set to "seekable" or
"random-access". The signal argument will contain the new desired position in
the stream expressed in the unit set with the "format" property. After
receiving the seek-data signal, the application should push-buffers from the
new position.
These signals allow the application to operate the appsrc in two different
ways:
The push mode, in which the application repeatedly calls the push-buffer/push-sample
method with a new buffer/sample. Optionally, the queue size in the appsrc
can be controlled with the enough-data and need-data signals by respectively
stopping/starting the push-buffer/push-sample calls. This is a typical
mode of operation for the stream-type "stream" and "seekable". Use this
mode when implementing various network protocols or hardware devices.
The pull mode, in which the need-data signal triggers the next push-buffer call.
This mode is typically used in the "random-access" stream-type. Use this
mode for file access or other randomly accessible sources. In this mode, a
buffer of exactly the amount of bytes given by the need-data signal should be
pushed into appsrc.
In all modes, the size property on appsrc should contain the total stream
size in bytes. Setting this property is mandatory in the random-access mode.
For the stream and seekable modes, setting this property is optional but
recommended.
When the application has finished pushing data into appsrc, it should call
`AppSrc::end_of_stream` or emit the end-of-stream action signal. After
this call, no more buffers can be pushed into appsrc until a flushing seek
occurs or the state of the appsrc has gone through READY.
# Implements
[`gst_base::BaseSrcExt`](../gst_base/trait.BaseSrcExt.html), [`gst::ElementExt`](../gst/trait.ElementExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`gst::URIHandlerExt`](../gst/trait.URIHandlerExt.html)
<!-- impl AppSrc::fn end_of_stream -->
Indicates to the appsrc element that the last buffer queued in the
element is the last buffer of the stream.
# Returns
`gst::FlowReturn::Ok` when the EOS was successfully queued.
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
<!-- impl AppSrc::fn get_caps -->
Get the configured caps on `self`.
# Returns
the `gst::Caps` produced by the source. `gst::Caps::unref` after usage.
<!-- impl AppSrc::fn get_current_level_bytes -->
Get the number of currently queued bytes inside `self`.
# Returns
The number of currently queued bytes.
<!-- impl AppSrc::fn get_duration -->
Get the duration of the stream in nanoseconds. A value of GST_CLOCK_TIME_NONE means that the duration is
not known.
Feature: `v1_10`
# Returns
the duration of the stream previously set with `AppSrc::set_duration`;
<!-- impl AppSrc::fn get_emit_signals -->
Check if appsrc will emit the "new-preroll" and "new-buffer" signals.
# Returns
`true` if `self` is emitting the "new-preroll" and "new-buffer"
signals.
<!-- impl AppSrc::fn get_latency -->
Retrieve the min and max latencies in `min` and `max` respectively.
## `min`
the min latency
## `max`
the max latency
<!-- impl AppSrc::fn get_max_bytes -->
Get the maximum amount of bytes that can be queued in `self`.
# Returns
The maximum amount of bytes that can be queued.
<!-- impl AppSrc::fn get_size -->
Get the size of the stream in bytes. A value of -1 means that the size is
not known.
# Returns
the size of the stream previously set with `AppSrc::set_size`;
<!-- impl AppSrc::fn get_stream_type -->
Get the stream type. Control the stream type of `self`
with `AppSrc::set_stream_type`.
# Returns
the stream type.
<!-- impl AppSrc::fn push_buffer -->
Adds a buffer to the queue of buffers that the appsrc element will
push to its source pad. This function takes ownership of the buffer.
When the block property is TRUE, this function can block until free
space becomes available in the queue.
## `buffer`
a `gst::Buffer` to push
# Returns
`gst::FlowReturn::Ok` when the buffer was successfully queued.
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
`gst::FlowReturn::Eos` when EOS occurred.
<!-- impl AppSrc::fn push_buffer_list -->
Adds a buffer list to the queue of buffers and buffer lists that the
appsrc element will push to its source pad. This function takes ownership
of `buffer_list`.
When the block property is TRUE, this function can block until free
space becomes available in the queue.
Feature: `v1_14`
## `buffer_list`
a `gst::BufferList` to push
# Returns
`gst::FlowReturn::Ok` when the buffer list was successfully queued.
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
`gst::FlowReturn::Eos` when EOS occurred.
<!-- impl AppSrc::fn push_sample -->
Extract a buffer from the provided sample and adds it to the queue of
buffers that the appsrc element will push to its source pad. Any
previous caps that were set on appsrc will be replaced by the caps
associated with the sample if not equal.
This function does not take ownership of the
sample so the sample needs to be unreffed after calling this function.
When the block property is TRUE, this function can block until free
space becomes available in the queue.
## `sample`
a `gst::Sample` from which buffer and caps may be
extracted
# Returns
`gst::FlowReturn::Ok` when the buffer was successfully queued.
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
`gst::FlowReturn::Eos` when EOS occurred.
<!-- impl AppSrc::fn set_callbacks -->
Set callbacks which will be executed when data is needed, enough data has
been collected or when a seek should be performed.
This is an alternative to using the signals, it has lower overhead and is thus
less expensive, but also less flexible.
If callbacks are installed, no signals will be emitted for performance
reasons.
Before 1.16.3 it was not possible to change the callbacks in a thread-safe
way.
## `callbacks`
the callbacks
## `user_data`
a user_data argument for the callbacks
## `notify`
a destroy notify function
<!-- impl AppSrc::fn set_caps -->
Set the capabilities on the appsrc element. This function takes
a copy of the caps structure. After calling this method, the source will
only produce caps that match `caps`. `caps` must be fixed and the caps on the
buffers must match the caps or left NULL.
## `caps`
caps to set
<!-- impl AppSrc::fn set_duration -->
Set the duration of the stream in nanoseconds. A value of GST_CLOCK_TIME_NONE means that the duration is
not known.
Feature: `v1_10`
## `duration`
the duration to set
<!-- impl AppSrc::fn set_emit_signals -->
Make appsrc emit the "new-preroll" and "new-buffer" signals. This option is
by default disabled because signal emission is expensive and unneeded when
the application prefers to operate in pull mode.
## `emit`
the new state
<!-- impl AppSrc::fn set_latency -->
Configure the `min` and `max` latency in `src`. If `min` is set to -1, the
default latency calculations for pseudo-live sources will be used.
## `min`
the min latency
## `max`
the max latency
<!-- impl AppSrc::fn set_max_bytes -->
Set the maximum amount of bytes that can be queued in `self`.
After the maximum amount of bytes are queued, `self` will emit the
"enough-data" signal.
## `max`
the maximum number of bytes to queue
<!-- impl AppSrc::fn set_size -->
Set the size of the stream in bytes. A value of -1 means that the size is
not known.
## `size`
the size to set
<!-- impl AppSrc::fn set_stream_type -->
Set the stream type on `self`. For seekable streams, the "seek" signal must
be connected to.
A stream_type stream
## `type_`
the new state
<!-- impl AppSrc::fn connect_end_of_stream -->
Notify `appsrc` that no more buffer are available.
<!-- impl AppSrc::fn connect_enough_data -->
Signal that the source has enough data. It is recommended that the
application stops calling push-buffer until the need-data signal is
emitted again to avoid excessive buffer queueing.
<!-- impl AppSrc::fn connect_need_data -->
Signal that the source needs more data. In the callback or from another
thread you should call push-buffer or end-of-stream.
`length` is just a hint and when it is set to -1, any number of bytes can be
pushed into `appsrc`.
You can call push-buffer multiple times until the enough-data signal is
fired.
## `length`
the amount of bytes needed.
<!-- impl AppSrc::fn connect_push_buffer -->
Adds a buffer to the queue of buffers that the appsrc element will
push to its source pad. This function does not take ownership of the
buffer so the buffer needs to be unreffed after calling this function.
When the block property is TRUE, this function can block until free space
becomes available in the queue.
## `buffer`
a buffer to push
<!-- impl AppSrc::fn connect_push_buffer_list -->
Adds a buffer list to the queue of buffers and buffer lists that the
appsrc element will push to its source pad. This function does not take
ownership of the buffer list so the buffer list needs to be unreffed
after calling this function.
When the block property is TRUE, this function can block until free space
becomes available in the queue.
Feature: `v1_14`
## `buffer_list`
a buffer list to push
<!-- impl AppSrc::fn connect_push_sample -->
Extract a buffer from the provided sample and adds the extracted buffer
to the queue of buffers that the appsrc element will
push to its source pad. This function set the appsrc caps based on the caps
in the sample and reset the caps if they change.
Only the caps and the buffer of the provided sample are used and not
for example the segment in the sample.
This function does not take ownership of the
sample so the sample needs to be unreffed after calling this function.
When the block property is TRUE, this function can block until free space
becomes available in the queue.
## `sample`
a sample from which extract buffer to push
<!-- impl AppSrc::fn connect_seek_data -->
Seek to the given offset. The next push-buffer should produce buffers from
the new `offset`.
This callback is only called for seekable stream types.
## `offset`
the offset to seek to
# Returns
`true` if the seek succeeded.
<!-- enum AppStreamType -->
The stream type.
<!-- enum AppStreamType::variant Stream -->
No seeking is supported in the stream, such as a
live stream.
<!-- enum AppStreamType::variant Seekable -->
The stream is seekable but seeking might not
be very fast, such as data from a webserver.
<!-- enum AppStreamType::variant RandomAccess -->
The stream is seekable and seeking is fast,
such as in a local file.

1494
docs/gstreamer-audio/docs.md Normal file

File diff suppressed because it is too large Load diff

2458
docs/gstreamer-base/docs.md Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,339 @@
<!-- file * -->
<!-- struct TestClock -->
GstTestClock is an implementation of `gst::Clock` which has different
behaviour compared to `gst::SystemClock`. Time for `gst::SystemClock` advances
according to the system time, while time for `TestClock` changes only
when `TestClock::set_time` or `TestClock::advance_time` are
called. `TestClock` provides unit tests with the possibility to
precisely advance the time in a deterministic manner, independent of the
system time or any other external factors.
## Advancing the time of a `TestClock`
```C
#include <gst/gst.h>
#include <gst/check/gsttestclock.h>
GstClock *clock;
GstTestClock *test_clock;
clock = gst_test_clock_new ();
test_clock = GST_TEST_CLOCK (clock);
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
gst_test_clock_advance_time ( test_clock, 1 * GST_SECOND);
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
g_usleep (10 * G_USEC_PER_SEC);
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
gst_test_clock_set_time (test_clock, 42 * GST_SECOND);
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
...
```
`gst::Clock` allows for setting up single shot or periodic clock notifications
as well as waiting for these notifications synchronously (using
`gst::Clock::id_wait`) or asynchronously (using `gst::Clock::id_wait_async` or
`gst::Clock::id_wait_async`). This is used by many GStreamer elements,
among them `GstBaseSrc` and `GstBaseSink`.
`TestClock` keeps track of these clock notifications. By calling
`TestClock::wait_for_next_pending_id` or
`TestClock::wait_for_multiple_pending_ids` a unit tests may wait for the
next one or several clock notifications to be requested. Additionally unit
tests may release blocked waits in a controlled fashion by calling
`TestClock::process_next_clock_id`. This way a unit test can control the
inaccuracy (jitter) of clock notifications, since the test can decide to
release blocked waits when the clock time has advanced exactly to, or past,
the requested clock notification time.
There are also interfaces for determining if a notification belongs to a
`TestClock` or not, as well as getting the number of requested clock
notifications so far.
N.B.: When a unit test waits for a certain amount of clock notifications to
be requested in `TestClock::wait_for_next_pending_id` or
`TestClock::wait_for_multiple_pending_ids` then these functions may block
for a long time. If they block forever then the expected clock notifications
were never requested from `TestClock`, and so the assumptions in the code
of the unit test are wrong. The unit test case runner in gstcheck is
expected to catch these cases either by the default test case timeout or the
one set for the unit test by calling tcase_set_timeout\(\).
The sample code below assumes that the element under test will delay a
buffer pushed on the source pad by some latency until it arrives on the sink
pad. Moreover it is assumed that the element will at some point call
`gst::Clock::id_wait` to synchronously wait for a specific time. The first
buffer sent will arrive exactly on time only delayed by the latency. The
second buffer will arrive a little late (7ms) due to simulated jitter in the
clock notification.
## Demonstration of how to work with clock notifications and `TestClock`
```C
#include <gst/gst.h>
#include <gst/check/gstcheck.h>
#include <gst/check/gsttestclock.h>
GstClockTime latency;
GstElement *element;
GstPad *srcpad;
GstClock *clock;
GstTestClock *test_clock;
GstBuffer buf;
GstClockID pending_id;
GstClockID processed_id;
latency = 42 * GST_MSECOND;
element = create_element (latency, ...);
srcpad = get_source_pad (element);
clock = gst_test_clock_new ();
test_clock = GST_TEST_CLOCK (clock);
gst_element_set_clock (element, clock);
GST_INFO ("Set time, create and push the first buffer\n");
gst_test_clock_set_time (test_clock, 0);
buf = create_test_buffer (gst_clock_get_time (clock), ...);
gst_assert_cmpint (gst_pad_push (srcpad, buf), ==, GST_FLOW_OK);
GST_INFO ("Block until element is waiting for a clock notification\n");
gst_test_clock_wait_for_next_pending_id (test_clock, &pending_id);
GST_INFO ("Advance to the requested time of the clock notification\n");
gst_test_clock_advance_time (test_clock, latency);
GST_INFO ("Release the next blocking wait and make sure it is the one from element\n");
processed_id = gst_test_clock_process_next_clock_id (test_clock);
g_assert (processed_id == pending_id);
g_assert_cmpint (GST_CLOCK_ENTRY_STATUS (processed_id), ==, GST_CLOCK_OK);
gst_clock_id_unref (pending_id);
gst_clock_id_unref (processed_id);
GST_INFO ("Validate that element produced an output buffer and check its timestamp\n");
g_assert_cmpint (get_number_of_output_buffer (...), ==, 1);
buf = get_buffer_pushed_by_element (element, ...);
g_assert_cmpint (GST_BUFFER_TIMESTAMP (buf), ==, latency);
gst_buffer_unref (buf);
GST_INFO ("Check that element does not wait for any clock notification\n");
g_assert (!gst_test_clock_peek_next_pending_id (test_clock, NULL));
GST_INFO ("Set time, create and push the second buffer\n");
gst_test_clock_advance_time (test_clock, 10 * GST_SECOND);
buf = create_test_buffer (gst_clock_get_time (clock), ...);
gst_assert_cmpint (gst_pad_push (srcpad, buf), ==, GST_FLOW_OK);
GST_INFO ("Block until element is waiting for a new clock notification\n");
(gst_test_clock_wait_for_next_pending_id (test_clock, &pending_id);
GST_INFO ("Advance past 7ms beyond the requested time of the clock notification\n");
gst_test_clock_advance_time (test_clock, latency + 7 * GST_MSECOND);
GST_INFO ("Release the next blocking wait and make sure it is the one from element\n");
processed_id = gst_test_clock_process_next_clock_id (test_clock);
g_assert (processed_id == pending_id);
g_assert_cmpint (GST_CLOCK_ENTRY_STATUS (processed_id), ==, GST_CLOCK_OK);
gst_clock_id_unref (pending_id);
gst_clock_id_unref (processed_id);
GST_INFO ("Validate that element produced an output buffer and check its timestamp\n");
g_assert_cmpint (get_number_of_output_buffer (...), ==, 1);
buf = get_buffer_pushed_by_element (element, ...);
g_assert_cmpint (GST_BUFFER_TIMESTAMP (buf), ==,
10 * GST_SECOND + latency + 7 * GST_MSECOND);
gst_buffer_unref (buf);
GST_INFO ("Check that element does not wait for any clock notification\n");
g_assert (!gst_test_clock_peek_next_pending_id (test_clock, NULL));
...
```
Since `TestClock` is only supposed to be used in unit tests it calls
`g_assert`, `g_assert_cmpint` or `g_assert_cmpuint` to validate all function
arguments. This will highlight any issues with the unit test code itself.
# Implements
[`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl TestClock::fn new -->
Creates a new test clock with its time set to zero.
MT safe.
# Returns
a `TestClock` cast to `gst::Clock`.
<!-- impl TestClock::fn new_with_start_time -->
Creates a new test clock with its time set to the specified time.
MT safe.
## `start_time`
a `gst::ClockTime` set to the desired start time of the clock.
# Returns
a `TestClock` cast to `gst::Clock`.
<!-- impl TestClock::fn id_list_get_latest_time -->
Finds the latest time inside the list.
MT safe.
## `pending_list`
List
of of pending `GstClockIDs`
<!-- impl TestClock::fn advance_time -->
Advances the time of the `self` by the amount given by `delta`. The
time of `self` is monotonically increasing, therefore providing a
`delta` which is negative or zero is a programming error.
MT safe.
## `delta`
a positive `gst::ClockTimeDiff` to be added to the time of the clock
<!-- impl TestClock::fn crank -->
A "crank" consists of three steps:
1: Wait for a `gst::ClockID` to be registered with the `TestClock`.
2: Advance the `TestClock` to the time the `gst::ClockID` is waiting, unless
the clock time is already passed the clock id (Since: 1.18).
3: Release the `gst::ClockID` wait.
A "crank" can be though of as the notion of
manually driving the clock forward to its next logical step.
# Returns
`true` if the crank was successful, `false` otherwise.
MT safe.
<!-- impl TestClock::fn get_next_entry_time -->
Retrieve the requested time for the next pending clock notification.
MT safe.
# Returns
a `gst::ClockTime` set to the time of the next pending clock
notification. If no clock notifications have been requested
`GST_CLOCK_TIME_NONE` will be returned.
<!-- impl TestClock::fn has_id -->
Checks whether `self` was requested to provide the clock notification
given by `id`.
MT safe.
## `id`
a `gst::ClockID` clock notification
# Returns
`true` if the clock has been asked to provide the given clock
notification, `false` otherwise.
<!-- impl TestClock::fn peek_id_count -->
Determine the number of pending clock notifications that have been
requested from the `self`.
MT safe.
# Returns
the number of pending clock notifications.
<!-- impl TestClock::fn peek_next_pending_id -->
Determines if the `pending_id` is the next clock notification scheduled to
be triggered given the current time of the `self`.
MT safe.
## `pending_id`
a `gst::ClockID` clock
notification to look for
# Returns
`true` if `pending_id` is the next clock notification to be
triggered, `false` otherwise.
<!-- impl TestClock::fn process_id -->
Processes and releases the pending ID.
MT safe.
Feature: `v1_18`
## `pending_id`
`gst::ClockID`
<!-- impl TestClock::fn process_id_list -->
Processes and releases the pending IDs in the list.
MT safe.
## `pending_list`
List
of pending `GstClockIDs`
<!-- impl TestClock::fn process_next_clock_id -->
MT safe.
# Returns
a `gst::ClockID` containing the next pending clock
notification.
<!-- impl TestClock::fn set_time -->
Sets the time of `self` to the time given by `new_time`. The time of
`self` is monotonically increasing, therefore providing a `new_time`
which is earlier or equal to the time of the clock as given by
`gst::ClockExt::get_time` is a programming error.
MT safe.
## `new_time`
a `gst::ClockTime` later than that returned by `gst::ClockExt::get_time`
<!-- impl TestClock::fn timed_wait_for_multiple_pending_ids -->
Blocks until at least `count` clock notifications have been requested from
`self`, or the timeout expires.
MT safe.
Feature: `v1_16`
## `count`
the number of pending clock notifications to wait for
## `timeout_ms`
the timeout in milliseconds
## `pending_list`
Address
of a `glib::List` pointer variable to store the list of pending `GstClockIDs`
that expired, or `None`
# Returns
a `gboolean` `true` if the waits have been registered, `false` if not.
(Could be that it timed out waiting or that more waits than waits was found)
<!-- impl TestClock::fn wait_for_multiple_pending_ids -->
Blocks until at least `count` clock notifications have been requested from
`self`. There is no timeout for this wait, see the main description of
`TestClock`.
MT safe.
## `count`
the number of pending clock notifications to wait for
## `pending_list`
Address
of a `glib::List` pointer variable to store the list of pending `GstClockIDs`
that expired, or `None`
<!-- impl TestClock::fn wait_for_next_pending_id -->
Waits until a clock notification is requested from `self`. There is no
timeout for this wait, see the main description of `TestClock`. A reference
to the pending clock notification is stored in `pending_id`.
MT safe.
## `pending_id`
`gst::ClockID`
with information about the pending clock notification
<!-- impl TestClock::fn wait_for_pending_id_count -->
Blocks until at least `count` clock notifications have been requested from
`self`. There is no timeout for this wait, see the main description of
`TestClock`.
# Deprecated
use `TestClock::wait_for_multiple_pending_ids` instead.
## `count`
the number of pending clock notifications to wait for
<!-- impl TestClock::fn get_property_start_time -->
When a `TestClock` is constructed it will have a certain start time set.
If the clock was created using `TestClock::new_with_start_time` then
this property contains the value of the `start_time` argument. If
`TestClock::new` was called the clock started at time zero, and thus
this property contains the value 0.
<!-- impl TestClock::fn set_property_start_time -->
When a `TestClock` is constructed it will have a certain start time set.
If the clock was created using `TestClock::new_with_start_time` then
this property contains the value of the `start_time` argument. If
`TestClock::new` was called the clock started at time zero, and thus
this property contains the value 0.

File diff suppressed because it is too large Load diff

1769
docs/gstreamer-gl/docs.md Normal file

File diff suppressed because it is too large Load diff

140
docs/gstreamer-net/docs.md Normal file
View file

@ -0,0 +1,140 @@
<!-- file * -->
<!-- struct NetClientClock -->
`NetClientClock` implements a custom `gst::Clock` that synchronizes its time
to a remote time provider such as `NetTimeProvider`. `NtpClock`
implements a `gst::Clock` that synchronizes its time to a remote NTPv4 server.
A new clock is created with `NetClientClock::new` or
`NtpClock::new`, which takes the address and port of the remote time
provider along with a name and an initial time.
This clock will poll the time provider and will update its calibration
parameters based on the local and remote observations.
The "round-trip" property limits the maximum round trip packets can take.
Various parameters of the clock can be configured with the parent `gst::Clock`
"timeout", "window-size" and "window-threshold" object properties.
A `NetClientClock` and `NtpClock` is typically set on a `gst::Pipeline` with
`gst::Pipeline::use_clock`.
If you set a `gst::Bus` on the clock via the "bus" object property, it will
send `gst::MessageType::Element` messages with an attached `gst::Structure` containing
statistics about clock accuracy and network traffic.
# Implements
[`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl NetClientClock::fn new -->
Create a new `NetClientClock` that will report the time
provided by the `NetTimeProvider` on `remote_address` and
`remote_port`.
## `name`
a name for the clock
## `remote_address`
the address or hostname of the remote clock provider
## `remote_port`
the port of the remote clock provider
## `base_time`
initial time of the clock
# Returns
a new `gst::Clock` that receives a time from the remote
clock.
<!-- struct NetTimeProvider -->
This object exposes the time of a `gst::Clock` on the network.
A `NetTimeProvider` is created with `NetTimeProvider::new` which
takes a `gst::Clock`, an address and a port number as arguments.
After creating the object, a client clock such as `NetClientClock` can
query the exposed clock over the network for its values.
The `NetTimeProvider` typically wraps the clock used by a `gst::Pipeline`.
# Implements
[`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl NetTimeProvider::fn new -->
Allows network clients to get the current time of `clock`.
## `clock`
a `gst::Clock` to export over the network
## `address`
an address to bind on as a dotted quad
(xxx.xxx.xxx.xxx), IPv6 address, or NULL to bind to all addresses
## `port`
a port to bind on, or 0 to let the kernel choose
# Returns
the new `NetTimeProvider`, or NULL on error
<!-- struct NtpClock -->
# Implements
[`NetClientClockExt`](trait.NetClientClockExt.html), [`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl NtpClock::fn new -->
Create a new `NtpClock` that will report the time provided by
the NTPv4 server on `remote_address` and `remote_port`.
## `name`
a name for the clock
## `remote_address`
the address or hostname of the remote clock provider
## `remote_port`
the port of the remote clock provider
## `base_time`
initial time of the clock
# Returns
a new `gst::Clock` that receives a time from the remote
clock.
<!-- struct PtpClock -->
GstPtpClock implements a PTP (IEEE1588:2008) ordinary clock in slave-only
mode, that allows a GStreamer pipeline to synchronize to a PTP network
clock in some specific domain.
The PTP subsystem can be initialized with `gst_ptp_init`, which then starts
a helper process to do the actual communication via the PTP ports. This is
required as PTP listens on ports < 1024 and thus requires special
privileges. Once this helper process is started, the main process will
synchronize to all PTP domains that are detected on the selected
interfaces.
`PtpClock::new` then allows to create a GstClock that provides the PTP
time from a master clock inside a specific PTP domain. This clock will only
return valid timestamps once the timestamps in the PTP domain are known. To
check this, you can use `gst::ClockExt::wait_for_sync`, the GstClock::synced
signal and `gst::ClockExt::is_synced`.
To gather statistics about the PTP clock synchronization,
`gst_ptp_statistics_callback_add` can be used. This gives the application
the possibility to collect all kinds of statistics from the clock
synchronization.
# Implements
[`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PtpClock::fn new -->
Creates a new PTP clock instance that exports the PTP time of the master
clock in `domain`. This clock can be slaved to other clocks as needed.
If `gst_ptp_init` was not called before, this will call `gst_ptp_init` with
default parameters.
This clock only returns valid timestamps after it received the first
times from the PTP master clock on the network. Once this happens the
GstPtpClock::internal-clock property will become non-NULL. You can
check this with `gst::ClockExt::wait_for_sync`, the GstClock::synced signal and
`gst::ClockExt::is_synced`.
## `name`
Name of the clock
## `domain`
PTP domain
# Returns
A new `gst::Clock`

View file

@ -0,0 +1,944 @@
<!-- file * -->
<!-- struct Discoverer -->
The `Discoverer` is a utility object which allows to get as much
information as possible from one or many URIs.
It provides two APIs, allowing usage in blocking or non-blocking mode.
The blocking mode just requires calling `Discoverer::discover_uri`
with the URI one wishes to discover.
The non-blocking mode requires a running `glib::MainLoop` iterating a
`glib::MainContext`, where one connects to the various signals, appends the
URIs to be processed (through `Discoverer::discover_uri_async`) and then
asks for the discovery to begin (through `Discoverer::start`).
By default this will use the GLib default main context unless you have
set a custom context using `glib::MainContext::push_thread_default`.
All the information is returned in a `DiscovererInfo` structure.
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl Discoverer::fn new -->
Creates a new `Discoverer` with the provided timeout.
## `timeout`
timeout per file, in nanoseconds. Allowed are values between
one second (`GST_SECOND`) and one hour (3600 * `GST_SECOND`)
# Returns
The new `Discoverer`.
If an error occurred when creating the discoverer, `err` will be set
accordingly and `None` will be returned. If `err` is set, the caller must
free it when no longer needed using `glib::Error::free`.
<!-- impl Discoverer::fn discover_uri -->
Synchronously discovers the given `uri`.
A copy of `uri` will be made internally, so the caller can safely `g_free`
afterwards.
## `uri`
The URI to run on.
# Returns
the result of the scanning. Can be `None` if an
error occurred.
<!-- impl Discoverer::fn discover_uri_async -->
Appends the given `uri` to the list of URIs to discoverer. The actual
discovery of the `uri` will only take place if `Discoverer::start` has
been called.
A copy of `uri` will be made internally, so the caller can safely `g_free`
afterwards.
## `uri`
the URI to add.
# Returns
`true` if the `uri` was successfully appended to the list of pending
uris, else `false`
<!-- impl Discoverer::fn start -->
Allow asynchronous discovering of URIs to take place.
A `glib::MainLoop` must be available for `Discoverer` to properly work in
asynchronous mode.
<!-- impl Discoverer::fn stop -->
Stop the discovery of any pending URIs and clears the list of
pending URIS (if any).
<!-- impl Discoverer::fn connect_discovered -->
Will be emitted in async mode when all information on a URI could be
discovered, or an error occurred.
When an error occurs, `info` might still contain some partial information,
depending on the circumstances of the error.
## `info`
the results `DiscovererInfo`
## `error`
`glib::Error`, which will be non-NULL
if an error occurred during
discovery. You must not free
this `glib::Error`, it will be freed by
the discoverer.
<!-- impl Discoverer::fn connect_finished -->
Will be emitted in async mode when all pending URIs have been processed.
<!-- impl Discoverer::fn connect_source_setup -->
This signal is emitted after the source element has been created for, so
the URI being discovered, so it can be configured by setting additional
properties (e.g. set a proxy server for an http source, or set the device
and read speed for an audio cd source).
This signal is usually emitted from the context of a GStreamer streaming
thread.
## `source`
source element
<!-- impl Discoverer::fn connect_starting -->
Will be emitted when the discover starts analyzing the pending URIs
<!-- impl Discoverer::fn get_property_timeout -->
The duration (in nanoseconds) after which the discovery of an individual
URI will timeout.
If the discovery of a URI times out, the `DiscovererResult::Timeout` will be
set on the result flags.
<!-- impl Discoverer::fn set_property_timeout -->
The duration (in nanoseconds) after which the discovery of an individual
URI will timeout.
If the discovery of a URI times out, the `DiscovererResult::Timeout` will be
set on the result flags.
<!-- struct DiscovererAudioInfo -->
`DiscovererStreamInfo` specific to audio streams.
# Implements
[`DiscovererStreamInfoExt`](trait.DiscovererStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl DiscovererAudioInfo::fn get_bitrate -->
# Returns
the average or nominal bitrate of the stream in bits/second.
<!-- impl DiscovererAudioInfo::fn get_channel_mask -->
Feature: `v1_14`
# Returns
the channel-mask of the stream, refer to
`gst_audio_channel_positions_from_mask` for more
information.
<!-- impl DiscovererAudioInfo::fn get_channels -->
# Returns
the number of channels in the stream.
<!-- impl DiscovererAudioInfo::fn get_depth -->
# Returns
the number of bits used per sample in each channel.
<!-- impl DiscovererAudioInfo::fn get_language -->
# Returns
the language of the stream, or NULL if unknown.
<!-- impl DiscovererAudioInfo::fn get_max_bitrate -->
# Returns
the maximum bitrate of the stream in bits/second.
<!-- impl DiscovererAudioInfo::fn get_sample_rate -->
# Returns
the sample rate of the stream in Hertz.
<!-- struct DiscovererContainerInfo -->
`DiscovererStreamInfo` specific to container streams.
# Implements
[`DiscovererStreamInfoExt`](trait.DiscovererStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl DiscovererContainerInfo::fn get_streams -->
# Returns
the list of
`DiscovererStreamInfo` this container stream offers.
Free with `DiscovererStreamInfo::list_free` after usage.
<!-- struct DiscovererInfo -->
Structure containing the information of a URI analyzed by `Discoverer`.
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl DiscovererInfo::fn from_variant -->
Parses a `glib::Variant` as produced by `DiscovererInfo::to_variant`
back to a `DiscovererInfo`.
## `variant`
A `glib::Variant` to deserialize into a `DiscovererInfo`.
# Returns
A newly-allocated `DiscovererInfo`.
<!-- impl DiscovererInfo::fn copy -->
# Returns
A copy of the `DiscovererInfo`
<!-- impl DiscovererInfo::fn get_audio_streams -->
Finds all the `DiscovererAudioInfo` contained in `self`
# Returns
A `glib::List` of
matching `DiscovererStreamInfo`. The caller should free it with
`DiscovererStreamInfo::list_free`.
<!-- impl DiscovererInfo::fn get_container_streams -->
Finds all the `DiscovererContainerInfo` contained in `self`
# Returns
A `glib::List` of
matching `DiscovererStreamInfo`. The caller should free it with
`DiscovererStreamInfo::list_free`.
<!-- impl DiscovererInfo::fn get_duration -->
# Returns
the duration of the URI in `gst::ClockTime` (nanoseconds).
<!-- impl DiscovererInfo::fn get_live -->
Feature: `v1_14`
# Returns
whether the URI is live.
<!-- impl DiscovererInfo::fn get_misc -->
# Deprecated
This functions is deprecated since version 1.4, use
`DiscovererInfo::get_missing_elements_installer_details`
# Returns
Miscellaneous information stored as a `gst::Structure`
(for example: information about missing plugins). If you wish to use the
`gst::Structure` after the life-time of `self`, you will need to copy it.
<!-- impl DiscovererInfo::fn get_missing_elements_installer_details -->
Get the installer details for missing elements
# Returns
An array of strings
containing information about how to install the various missing elements
for `self` to be usable. If you wish to use the strings after the life-time
of `self`, you will need to copy them.
<!-- impl DiscovererInfo::fn get_result -->
# Returns
the result of the discovery as a `DiscovererResult`.
<!-- impl DiscovererInfo::fn get_seekable -->
# Returns
the whether the URI is seekable.
<!-- impl DiscovererInfo::fn get_stream_info -->
# Returns
the structure (or topology) of the URI as a
`DiscovererStreamInfo`.
This structure can be traversed to see the original hierarchy. Unref with
`gst_discoverer_stream_info_unref` after usage.
<!-- impl DiscovererInfo::fn get_stream_list -->
# Returns
the list of
all streams contained in the `info`. Free after usage
with `DiscovererStreamInfo::list_free`.
<!-- impl DiscovererInfo::fn get_streams -->
Finds the `DiscovererStreamInfo` contained in `self` that match the
given `streamtype`.
## `streamtype`
a `glib::Type` derived from `DiscovererStreamInfo`
# Returns
A `glib::List` of
matching `DiscovererStreamInfo`. The caller should free it with
`DiscovererStreamInfo::list_free`.
<!-- impl DiscovererInfo::fn get_subtitle_streams -->
Finds all the `DiscovererSubtitleInfo` contained in `self`
# Returns
A `glib::List` of
matching `DiscovererStreamInfo`. The caller should free it with
`DiscovererStreamInfo::list_free`.
<!-- impl DiscovererInfo::fn get_tags -->
# Returns
all tags contained in the URI. If you wish to use
the tags after the life-time of `self`, you will need to copy them.
<!-- impl DiscovererInfo::fn get_toc -->
# Returns
TOC contained in the URI. If you wish to use
the TOC after the life-time of `self`, you will need to copy it.
<!-- impl DiscovererInfo::fn get_uri -->
# Returns
the URI to which this information corresponds to.
Copy it if you wish to use it after the life-time of `self`.
<!-- impl DiscovererInfo::fn get_video_streams -->
Finds all the `DiscovererVideoInfo` contained in `self`
# Returns
A `glib::List` of
matching `DiscovererStreamInfo`. The caller should free it with
`DiscovererStreamInfo::list_free`.
<!-- impl DiscovererInfo::fn to_variant -->
Serializes `self` to a `glib::Variant` that can be parsed again
through `DiscovererInfo::from_variant`.
Note that any `gst::Toc` (s) that might have been discovered will not be serialized
for now.
## `flags`
A combination of `DiscovererSerializeFlags` to specify
what needs to be serialized.
# Returns
A newly-allocated `glib::Variant` representing `self`.
<!-- enum DiscovererResult -->
Result values for the discovery process.
<!-- enum DiscovererResult::variant Ok -->
The discovery was successful
<!-- enum DiscovererResult::variant UriInvalid -->
the URI is invalid
<!-- enum DiscovererResult::variant Error -->
an error happened and the GError is set
<!-- enum DiscovererResult::variant Timeout -->
the discovery timed-out
<!-- enum DiscovererResult::variant Busy -->
the discoverer was already discovering a file
<!-- enum DiscovererResult::variant MissingPlugins -->
Some plugins are missing for full discovery
<!-- struct DiscovererSerializeFlags -->
You can use these flags to control what is serialized by
`DiscovererInfo::to_variant`
<!-- struct DiscovererSerializeFlags::const BASIC -->
Serialize only basic information, excluding
caps, tags and miscellaneous information
<!-- struct DiscovererSerializeFlags::const CAPS -->
Serialize the caps for each stream
<!-- struct DiscovererSerializeFlags::const TAGS -->
Serialize the tags for each stream
<!-- struct DiscovererSerializeFlags::const MISC -->
Serialize miscellaneous information for each stream
<!-- struct DiscovererSerializeFlags::const ALL -->
Serialize all the available info, including
caps, tags and miscellaneous information
<!-- struct DiscovererStreamInfo -->
Base structure for information concerning a media stream. Depending on the
stream type, one can find more media-specific information in
`DiscovererAudioInfo`, `DiscovererVideoInfo`, and
`DiscovererContainerInfo`.
The `DiscovererStreamInfo` represents the topology of the stream. Siblings
can be iterated over with `DiscovererStreamInfoExt::get_next` and
`DiscovererStreamInfoExt::get_previous`. Children (sub-streams) of a
stream can be accessed using the `DiscovererContainerInfo` API.
As a simple example, if you run `Discoverer` on an AVI file with one audio
and one video stream, you will get a `DiscovererContainerInfo`
corresponding to the AVI container, which in turn will have a
`DiscovererAudioInfo` sub-stream and a `DiscovererVideoInfo` sub-stream
for the audio and video streams respectively.
# Implements
[`DiscovererStreamInfoExt`](trait.DiscovererStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- trait DiscovererStreamInfoExt -->
Trait containing all `DiscovererStreamInfo` methods.
# Implementors
[`DiscovererAudioInfo`](struct.DiscovererAudioInfo.html), [`DiscovererContainerInfo`](struct.DiscovererContainerInfo.html), [`DiscovererStreamInfo`](struct.DiscovererStreamInfo.html), [`DiscovererSubtitleInfo`](struct.DiscovererSubtitleInfo.html), [`DiscovererVideoInfo`](struct.DiscovererVideoInfo.html)
<!-- impl DiscovererStreamInfo::fn list_free -->
Decrements the reference count of all contained `DiscovererStreamInfo`
and fress the `glib::List`.
## `infos`
a `glib::List` of `DiscovererStreamInfo`
<!-- trait DiscovererStreamInfoExt::fn get_caps -->
# Returns
the `gst::Caps` of the stream. Unref with
`gst::Caps::unref` after usage.
<!-- trait DiscovererStreamInfoExt::fn get_misc -->
# Deprecated
This functions is deprecated since version 1.4, use
`DiscovererInfo::get_missing_elements_installer_details`
# Returns
additional information regarding the stream (for
example codec version, profile, etc..). If you wish to use the `gst::Structure`
after the life-time of `self` you will need to copy it.
<!-- trait DiscovererStreamInfoExt::fn get_next -->
# Returns
the next `DiscovererStreamInfo` in a chain. `None`
for final streams.
Unref with `gst_discoverer_stream_info_unref` after usage.
<!-- trait DiscovererStreamInfoExt::fn get_previous -->
# Returns
the previous `DiscovererStreamInfo` in a chain.
`None` for starting points. Unref with `gst_discoverer_stream_info_unref`
after usage.
<!-- trait DiscovererStreamInfoExt::fn get_stream_id -->
# Returns
the stream ID of this stream. If you wish to
use the stream ID after the life-time of `self` you will need to copy it.
<!-- trait DiscovererStreamInfoExt::fn get_stream_type_nick -->
# Returns
a human readable name for the stream type of the given `self` (ex : "audio",
"container",...).
<!-- trait DiscovererStreamInfoExt::fn get_tags -->
# Returns
the tags contained in this stream. If you wish to
use the tags after the life-time of `self` you will need to copy them.
<!-- trait DiscovererStreamInfoExt::fn get_toc -->
# Returns
the TOC contained in this stream. If you wish to
use the TOC after the life-time of `self` you will need to copy it.
<!-- struct DiscovererSubtitleInfo -->
`DiscovererStreamInfo` specific to subtitle streams (this includes text and
image based ones).
# Implements
[`DiscovererStreamInfoExt`](trait.DiscovererStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl DiscovererSubtitleInfo::fn get_language -->
# Returns
the language of the stream, or NULL if unknown.
<!-- struct DiscovererVideoInfo -->
`DiscovererStreamInfo` specific to video streams (this includes images).
# Implements
[`DiscovererStreamInfoExt`](trait.DiscovererStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl DiscovererVideoInfo::fn get_bitrate -->
# Returns
the average or nominal bitrate of the video stream in bits/second.
<!-- impl DiscovererVideoInfo::fn get_depth -->
# Returns
the depth in bits of the video stream.
<!-- impl DiscovererVideoInfo::fn get_framerate_denom -->
# Returns
the framerate of the video stream (denominator).
<!-- impl DiscovererVideoInfo::fn get_framerate_num -->
# Returns
the framerate of the video stream (numerator).
<!-- impl DiscovererVideoInfo::fn get_height -->
# Returns
the height of the video stream in pixels.
<!-- impl DiscovererVideoInfo::fn get_max_bitrate -->
# Returns
the maximum bitrate of the video stream in bits/second.
<!-- impl DiscovererVideoInfo::fn get_par_denom -->
# Returns
the Pixel Aspect Ratio (PAR) of the video stream (denominator).
<!-- impl DiscovererVideoInfo::fn get_par_num -->
# Returns
the Pixel Aspect Ratio (PAR) of the video stream (numerator).
<!-- impl DiscovererVideoInfo::fn get_width -->
# Returns
the width of the video stream in pixels.
<!-- impl DiscovererVideoInfo::fn is_image -->
# Returns
`true` if the video stream corresponds to an image (i.e. only contains
one frame).
<!-- impl DiscovererVideoInfo::fn is_interlaced -->
# Returns
`true` if the stream is interlaced, else `false`.
<!-- struct EncodingAudioProfile -->
Variant of `EncodingProfile` for audio streams.
# Implements
[`EncodingProfileExt`](trait.EncodingProfileExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl EncodingAudioProfile::fn new -->
Creates a new `EncodingAudioProfile`
All provided allocatable arguments will be internally copied, so can be
safely freed/unreferenced after calling this method.
## `format`
the `gst::Caps`
## `preset`
the preset(s) to use on the encoder, can be `None`
## `restriction`
the `gst::Caps` used to restrict the input to the encoder, can be
NULL. See `EncodingProfile::get_restriction` for more details.
## `presence`
the number of time this stream must be used. 0 means any number of
times (including never)
# Returns
the newly created `EncodingAudioProfile`.
<!-- struct EncodingContainerProfile -->
Encoding profiles for containers. Keeps track of a list of `EncodingProfile`
# Implements
[`EncodingProfileExt`](trait.EncodingProfileExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl EncodingContainerProfile::fn new -->
Creates a new `EncodingContainerProfile`.
## `name`
The name of the container profile, can be `None`
## `description`
The description of the container profile,
can be `None`
## `format`
The format to use for this profile
## `preset`
The preset to use for this profile.
# Returns
The newly created `EncodingContainerProfile`.
<!-- impl EncodingContainerProfile::fn add_profile -->
Add a `EncodingProfile` to the list of profiles handled by `self`.
No copy of `profile` will be made, if you wish to use it elsewhere after this
method you should increment its reference count.
## `profile`
the `EncodingProfile` to add.
# Returns
`true` if the `stream` was properly added, else `false`.
<!-- impl EncodingContainerProfile::fn contains_profile -->
Checks if `self` contains a `EncodingProfile` identical to
`profile`.
## `profile`
a `EncodingProfile`
# Returns
`true` if `self` contains a `EncodingProfile` identical
to `profile`, else `false`.
<!-- impl EncodingContainerProfile::fn get_profiles -->
# Returns
the list of contained `EncodingProfile`.
<!-- struct EncodingProfile -->
The opaque base class object for all encoding profiles. This contains generic
information like name, description, format and preset.
# Implements
[`EncodingProfileExt`](trait.EncodingProfileExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- trait EncodingProfileExt -->
Trait containing all `EncodingProfile` methods.
# Implementors
[`EncodingAudioProfile`](struct.EncodingAudioProfile.html), [`EncodingContainerProfile`](struct.EncodingContainerProfile.html), [`EncodingProfile`](struct.EncodingProfile.html), [`EncodingVideoProfile`](struct.EncodingVideoProfile.html)
<!-- impl EncodingProfile::fn find -->
Find the `EncodingProfile` with the specified name and category.
## `targetname`
The name of the target
## `profilename`
The name of the profile, if `None`
provided, it will default to the encoding profile called `default`.
## `category`
The target category. Can be `None`
# Returns
The matching `EncodingProfile` or `None`.
<!-- impl EncodingProfile::fn from_discoverer -->
Creates a `EncodingProfile` matching the formats from the given
`DiscovererInfo`. Streams other than audio or video (eg,
subtitles), are currently ignored.
## `info`
The `DiscovererInfo` to read from
# Returns
The new `EncodingProfile` or `None`.
<!-- trait EncodingProfileExt::fn copy -->
Makes a deep copy of `self`
Feature: `v1_12`
# Returns
The copy of `self`
<!-- trait EncodingProfileExt::fn get_allow_dynamic_output -->
Get whether the format that has been negotiated in at some point can be renegotiated
later during the encoding.
<!-- trait EncodingProfileExt::fn get_description -->
# Returns
the description of the profile, can be `None`.
<!-- trait EncodingProfileExt::fn get_file_extension -->
# Returns
a suitable file extension for `self`, or NULL.
<!-- trait EncodingProfileExt::fn get_format -->
# Returns
the `gst::Caps` corresponding to the media format used
in the profile. Unref after usage.
<!-- trait EncodingProfileExt::fn get_input_caps -->
Computes the full output caps that this `self` will be able to consume.
# Returns
The full caps the given `self` can consume. Call
`gst::Caps::unref` when you are done with the caps.
<!-- trait EncodingProfileExt::fn get_name -->
# Returns
the name of the profile, can be `None`.
<!-- trait EncodingProfileExt::fn get_presence -->
# Returns
The number of times the profile is used in its parent
container profile. If 0, it is not a mandatory stream.
<!-- trait EncodingProfileExt::fn get_preset -->
# Returns
the name of the `gst::Preset` to be used in the profile.
This is the name that has been set when saving the preset.
<!-- trait EncodingProfileExt::fn get_preset_name -->
# Returns
the name of the `gst::Preset` factory to be used in the profile.
<!-- trait EncodingProfileExt::fn get_restriction -->
# Returns
The restriction `gst::Caps` to apply before the encoder
that will be used in the profile. The fields present in restriction caps are
properties of the raw stream (that is before encoding), such as height and
width for video and depth and sampling rate for audio. Does not apply to
`EncodingContainerProfile` (since there is no corresponding raw stream).
Can be `None`. Unref after usage.
<!-- trait EncodingProfileExt::fn get_single_segment -->
Feature: `v1_18`
# Returns
`true` if the stream represented by `self` should use a single
segment before the encoder, `false` otherwise. This means that buffers will be retimestamped
and segments will be eat so as to appear as one segment.
<!-- trait EncodingProfileExt::fn get_type_nick -->
# Returns
the human-readable name of the type of `self`.
<!-- trait EncodingProfileExt::fn is_equal -->
Checks whether the two `EncodingProfile` are equal
## `b`
a `EncodingProfile`
# Returns
`true` if `self` and `b` are equal, else `false`.
<!-- trait EncodingProfileExt::fn set_allow_dynamic_output -->
Sets whether the format that has been negotiated in at some point can be renegotiated
later during the encoding.
## `allow_dynamic_output`
Whether the format that has been negotiated first can be renegotiated
during the encoding
<!-- trait EncodingProfileExt::fn set_description -->
Set `description` as the given description for the `self`. A copy of
`description` will be made internally.
## `description`
the description to set on the profile
<!-- trait EncodingProfileExt::fn set_enabled -->
Set whether the profile should be used or not.
## `enabled`
`false` to disable `self`, `true` to enable it
<!-- trait EncodingProfileExt::fn set_format -->
Sets the media format used in the profile.
## `format`
the media format to use in the profile.
<!-- trait EncodingProfileExt::fn set_name -->
Set `name` as the given name for the `self`. A copy of `name` will be made
internally.
## `name`
the name to set on the profile
<!-- trait EncodingProfileExt::fn set_presence -->
Set the number of time the profile is used in its parent
container profile. If 0, it is not a mandatory stream
## `presence`
the number of time the profile can be used
<!-- trait EncodingProfileExt::fn set_preset -->
Sets the name of the `gst::Element` that implements the `gst::Preset` interface
to use for the profile.
This is the name that has been set when saving the preset.
## `preset`
the element preset to use
<!-- trait EncodingProfileExt::fn set_preset_name -->
Sets the name of the `gst::Preset`'s factory to be used in the profile.
## `preset_name`
The name of the preset to use in this `self`.
<!-- trait EncodingProfileExt::fn set_restriction -->
Set the restriction `gst::Caps` to apply before the encoder
that will be used in the profile. See `EncodingProfile::get_restriction`
for more about restrictions. Does not apply to `EncodingContainerProfile`.
## `restriction`
the restriction to apply
<!-- trait EncodingProfileExt::fn set_single_segment -->
If using a single segment, buffers will be retimestamped
and segments will be eat so as to appear as one segment.
Feature: `v1_18`
## `single_segment`
`true` if the stream represented by `self` should use a single
segment before the encoder `false` otherwise.
<!-- struct EncodingTarget -->
Collection of `EncodingProfile` for a specific target or use-case.
When being stored/loaded, targets come from a specific category, like
`GST_ENCODING_CATEGORY_DEVICE`.
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl EncodingTarget::fn new -->
Creates a new `EncodingTarget`.
The name and category can only consist of lowercase ASCII letters for the
first character, followed by either lowercase ASCII letters, digits or
hyphens ('-').
The `category` *should* be one of the existing
well-defined categories, like `GST_ENCODING_CATEGORY_DEVICE`, but it
*can* be a application or user specific category if
needed.
## `name`
The name of the target.
## `category`
The name of the category to which this `target`
belongs. For example: `GST_ENCODING_CATEGORY_DEVICE`.
## `description`
A description of `EncodingTarget` in the
current locale.
## `profiles`
A `glib::List` of
`EncodingProfile`.
# Returns
The newly created `EncodingTarget` or `None` if
there was an error.
<!-- impl EncodingTarget::fn load -->
Searches for the `EncodingTarget` with the given name, loads it
and returns it.
If the category name is specified only targets from that category will be
searched for.
## `name`
the name of the `EncodingTarget` to load (automatically
converted to lower case internally as capital letters are not
valid for target names).
## `category`
the name of the target category, like
`GST_ENCODING_CATEGORY_DEVICE`. Can be `None`
# Returns
The `EncodingTarget` if available, else `None`.
<!-- impl EncodingTarget::fn load_from_file -->
Opens the provided file and returns the contained `EncodingTarget`.
## `filepath`
The file location to load the `EncodingTarget` from
# Returns
The `EncodingTarget` contained in the file, else
`None`
<!-- impl EncodingTarget::fn add_profile -->
Adds the given `profile` to the `self`. Each added profile must have
a unique name within the profile.
The `self` will steal a reference to the `profile`. If you wish to use
the profile after calling this method, you should increase its reference
count.
## `profile`
the `EncodingProfile` to add
# Returns
`true` if the profile was added, else `false`.
<!-- impl EncodingTarget::fn get_category -->
# Returns
The category of the `self`. For example:
`GST_ENCODING_CATEGORY_DEVICE`.
<!-- impl EncodingTarget::fn get_description -->
# Returns
The description of the `self`.
<!-- impl EncodingTarget::fn get_name -->
# Returns
The name of the `self`.
<!-- impl EncodingTarget::fn get_path -->
Feature: `v1_18`
# Returns
The path to the `self` file.
<!-- impl EncodingTarget::fn get_profile -->
## `name`
the name of the profile to retrieve
# Returns
The matching `EncodingProfile`, or `None`.
<!-- impl EncodingTarget::fn get_profiles -->
# Returns
A list of
`EncodingProfile`(s) this `self` handles.
<!-- impl EncodingTarget::fn save -->
Saves the `self` to a default user-local directory.
# Returns
`true` if the target was correctly saved, else `false`.
<!-- impl EncodingTarget::fn save_to_file -->
Saves the `self` to the provided file location.
## `filepath`
the location to store the `self` at.
# Returns
`true` if the target was correctly saved, else `false`.
<!-- struct EncodingVideoProfile -->
Variant of `EncodingProfile` for video streams, allows specifying the `pass`.
# Implements
[`EncodingProfileExt`](trait.EncodingProfileExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl EncodingVideoProfile::fn new -->
Creates a new `EncodingVideoProfile`
All provided allocatable arguments will be internally copied, so can be
safely freed/unreferenced after calling this method.
If you wish to control the pass number (in case of multi-pass scenarios),
please refer to the `EncodingVideoProfile::set_pass` documentation.
If you wish to use/force a constant framerate please refer to the
`EncodingVideoProfile::set_variableframerate` documentation.
## `format`
the `gst::Caps`
## `preset`
the preset(s) to use on the encoder, can be `None`
## `restriction`
the `gst::Caps` used to restrict the input to the encoder, can be
NULL. See `EncodingProfile::get_restriction` for more details.
## `presence`
the number of time this stream must be used. 0 means any number of
times (including never)
# Returns
the newly created `EncodingVideoProfile`.
<!-- impl EncodingVideoProfile::fn get_pass -->
Get the pass number if this is part of a multi-pass profile.
# Returns
The pass number. Starts at 1 for multi-pass. 0 if this is
not a multi-pass profile
<!-- impl EncodingVideoProfile::fn get_variableframerate -->
# Returns
Whether non-constant video framerate is allowed for encoding.
<!-- impl EncodingVideoProfile::fn set_pass -->
Sets the pass number of this video profile. The first pass profile should have
this value set to 1. If this video profile isn't part of a multi-pass profile,
you may set it to 0 (the default value).
## `pass`
the pass number for this profile
<!-- impl EncodingVideoProfile::fn set_variableframerate -->
If set to `true`, then the incoming stream will be allowed to have non-constant
framerate. If set to `false` (default value), then the incoming stream will
be normalized by dropping/duplicating frames in order to produce a
constance framerate.
## `variableframerate`
a boolean

View file

@ -0,0 +1,740 @@
<!-- file * -->
<!-- struct Player -->
# Implements
[`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl Player::fn new -->
Creates a new `Player` instance that uses `signal_dispatcher` to dispatch
signals to some event loop system, or emits signals directly if NULL is
passed. See `PlayerGMainContextSignalDispatcher::new`.
Video is going to be rendered by `video_renderer`, or if `None` is provided
no special video set up will be done and some default handling will be
performed.
## `video_renderer`
GstPlayerVideoRenderer to use
## `signal_dispatcher`
GstPlayerSignalDispatcher to use
# Returns
a new `Player` instance
<!-- impl Player::fn config_get_position_update_interval -->
## `config`
a `Player` configuration
# Returns
current position update interval in milliseconds
<!-- impl Player::fn config_get_seek_accurate -->
## `config`
a `Player` configuration
# Returns
`true` if accurate seeking is enabled
<!-- impl Player::fn config_get_user_agent -->
Return the user agent which has been configured using
`Player::config_set_user_agent` if any.
## `config`
a `Player` configuration
# Returns
the configured agent, or `None`
<!-- impl Player::fn config_set_position_update_interval -->
set interval in milliseconds between two position-updated signals.
pass 0 to stop updating the position.
## `config`
a `Player` configuration
## `interval`
interval in ms
<!-- impl Player::fn config_set_seek_accurate -->
Enable or disable accurate seeking. When enabled, elements will try harder
to seek as accurately as possible to the requested seek position. Generally
it will be slower especially for formats that don't have any indexes or
timestamp markers in the stream.
If accurate seeking is disabled, elements will seek as close as the request
position without slowing down seeking too much.
Accurate seeking is disabled by default.
## `config`
a `Player` configuration
## `accurate`
accurate seek or not
<!-- impl Player::fn config_set_user_agent -->
Set the user agent to pass to the server if `player` needs to connect
to a server during playback. This is typically used when playing HTTP
or RTSP streams.
## `config`
a `Player` configuration
## `agent`
the string to use as user agent
<!-- impl Player::fn get_audio_streams -->
## `info`
a `PlayerMediaInfo`
# Returns
A `glib::List` of
matching `PlayerAudioInfo`.
<!-- impl Player::fn get_subtitle_streams -->
## `info`
a `PlayerMediaInfo`
# Returns
A `glib::List` of
matching `PlayerSubtitleInfo`.
<!-- impl Player::fn get_video_streams -->
## `info`
a `PlayerMediaInfo`
# Returns
A `glib::List` of
matching `PlayerVideoInfo`.
<!-- impl Player::fn visualizations_free -->
Frees a `None` terminated array of `PlayerVisualization`.
## `viss`
a `None` terminated array of `PlayerVisualization` to free
<!-- impl Player::fn visualizations_get -->
# Returns
a `None` terminated array containing all available
visualizations. Use `Player::visualizations_free` after
usage.
<!-- impl Player::fn get_audio_video_offset -->
Retrieve the current value of audio-video-offset property
# Returns
The current value of audio-video-offset in nanoseconds
<!-- impl Player::fn get_color_balance -->
Retrieve the current value of the indicated `type_`.
## `type_`
`PlayerColorBalanceType`
# Returns
The current value of `type_`, between [0,1]. In case of
error -1 is returned.
<!-- impl Player::fn get_config -->
Get a copy of the current configuration of the player. This configuration
can either be modified and used for the `Player::set_config` call
or it must be freed after usage.
# Returns
a copy of the current configuration of `self`. Use
`gst::Structure::free` after usage or `Player::set_config`.
<!-- impl Player::fn get_current_audio_track -->
A Function to get current audio `PlayerAudioInfo` instance.
# Returns
current audio track.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_current_subtitle_track -->
A Function to get current subtitle `PlayerSubtitleInfo` instance.
# Returns
current subtitle track.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_current_video_track -->
A Function to get current video `PlayerVideoInfo` instance.
# Returns
current video track.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_current_visualization -->
# Returns
Name of the currently enabled visualization.
`g_free` after usage.
<!-- impl Player::fn get_duration -->
Retrieves the duration of the media stream that self represents.
# Returns
the duration of the currently-playing media stream, in
nanoseconds.
<!-- impl Player::fn get_media_info -->
A Function to get the current media info `PlayerMediaInfo` instance.
# Returns
media info instance.
The caller should free it with `gobject::ObjectExt::unref`
<!-- impl Player::fn get_multiview_flags -->
Retrieve the current value of the indicated `type_`.
# Returns
The current value of `type_`, Default: 0x00000000 "none
<!-- impl Player::fn get_multiview_mode -->
Retrieve the current value of the indicated `type_`.
# Returns
The current value of `type_`, Default: -1 "none"
<!-- impl Player::fn get_mute -->
# Returns
`true` if the currently-playing stream is muted.
<!-- impl Player::fn get_pipeline -->
# Returns
The internal playbin instance
<!-- impl Player::fn get_position -->
# Returns
the absolute position time, in nanoseconds, of the
currently-playing stream.
<!-- impl Player::fn get_rate -->
# Returns
current playback rate
<!-- impl Player::fn get_subtitle_uri -->
current subtitle URI
# Returns
URI of the current external subtitle.
`g_free` after usage.
<!-- impl Player::fn get_subtitle_video_offset -->
Retrieve the current value of subtitle-video-offset property
Feature: `v1_16`
# Returns
The current value of subtitle-video-offset in nanoseconds
<!-- impl Player::fn get_uri -->
Gets the URI of the currently-playing stream.
# Returns
a string containing the URI of the
currently-playing stream. `g_free` after usage.
<!-- impl Player::fn get_video_snapshot -->
Get a snapshot of the currently selected video stream, if any. The format can be
selected with `format` and optional configuration is possible with `config`
Currently supported settings are:
- width, height of type G_TYPE_INT
- pixel-aspect-ratio of type GST_TYPE_FRACTION
Except for GST_PLAYER_THUMBNAIL_RAW_NATIVE format, if no config is set, pixel-aspect-ratio would be 1/1
## `format`
output format of the video snapshot
## `config`
Additional configuration
# Returns
Current video snapshot sample or `None` on failure
<!-- impl Player::fn get_volume -->
Returns the current volume level, as a percentage between 0 and 1.
# Returns
the volume as percentage between 0 and 1.
<!-- impl Player::fn has_color_balance -->
Checks whether the `self` has color balance support available.
# Returns
`true` if `self` has color balance support. Otherwise,
`false`.
<!-- impl Player::fn pause -->
Pauses the current stream.
<!-- impl Player::fn play -->
Request to play the loaded stream.
<!-- impl Player::fn seek -->
Seeks the currently-playing stream to the absolute `position` time
in nanoseconds.
## `position`
position to seek in nanoseconds
<!-- impl Player::fn set_audio_track -->
## `stream_index`
stream index
# Returns
`true` or `false`
Sets the audio track `stream_idex`.
<!-- impl Player::fn set_audio_track_enabled -->
Enable or disable the current audio track.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_audio_video_offset -->
Sets audio-video-offset property by value of `offset`
## `offset`
`gint64` in nanoseconds
<!-- impl Player::fn set_color_balance -->
Sets the current value of the indicated channel `type_` to the passed
value.
## `type_`
`PlayerColorBalanceType`
## `value`
The new value for the `type_`, ranged [0,1]
<!-- impl Player::fn set_config -->
Set the configuration of the player. If the player is already configured, and
the configuration haven't change, this function will return `true`. If the
player is not in the GST_PLAYER_STATE_STOPPED, this method will return `false`
and active configuration will remain.
`config` is a `gst::Structure` that contains the configuration parameters for
the player.
This function takes ownership of `config`.
## `config`
a `gst::Structure`
# Returns
`true` when the configuration could be set.
<!-- impl Player::fn set_multiview_flags -->
Sets the current value of the indicated mode `type_` to the passed
value.
## `flags`
The new value for the `type_`
<!-- impl Player::fn set_multiview_mode -->
Sets the current value of the indicated mode `type_` to the passed
value.
## `mode`
The new value for the `type_`
<!-- impl Player::fn set_mute -->
`true` if the currently-playing stream should be muted.
## `val`
Mute state the should be set
<!-- impl Player::fn set_rate -->
Playback at specified rate
## `rate`
playback rate
<!-- impl Player::fn set_subtitle_track -->
## `stream_index`
stream index
# Returns
`true` or `false`
Sets the subtitle stack `stream_index`.
<!-- impl Player::fn set_subtitle_track_enabled -->
Enable or disable the current subtitle track.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_subtitle_uri -->
Sets the external subtitle URI. This should be combined with a call to
gst_player_set_subtitle_track_enabled(`self`, TRUE) so the subtitles are actually
rendered.
## `uri`
subtitle URI
<!-- impl Player::fn set_subtitle_video_offset -->
Sets subtitle-video-offset property by value of `offset`
Feature: `v1_16`
## `offset`
`gint64` in nanoseconds
<!-- impl Player::fn set_uri -->
Sets the next URI to play.
## `uri`
next URI to play.
<!-- impl Player::fn set_video_track -->
## `stream_index`
stream index
# Returns
`true` or `false`
Sets the video track `stream_index`.
<!-- impl Player::fn set_video_track_enabled -->
Enable or disable the current video track.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_visualization -->
## `name`
visualization element obtained from
`Player::visualizations_get`()
# Returns
`true` if the visualizations was set correctly. Otherwise,
`false`.
<!-- impl Player::fn set_visualization_enabled -->
Enable or disable the visualization.
## `enabled`
TRUE or FALSE
<!-- impl Player::fn set_volume -->
Sets the volume level of the stream as a percentage between 0 and 1.
## `val`
the new volume level, as a percentage between 0 and 1
<!-- impl Player::fn stop -->
Stops playing the current stream and resets to the first position
in the stream.
<!-- struct PlayerAudioInfo -->
`PlayerStreamInfo` specific to audio streams.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerAudioInfo::fn get_bitrate -->
# Returns
the audio bitrate in `PlayerAudioInfo`.
<!-- impl PlayerAudioInfo::fn get_channels -->
# Returns
the number of audio channels in `PlayerAudioInfo`.
<!-- impl PlayerAudioInfo::fn get_language -->
# Returns
the language of the stream, or NULL if unknown.
<!-- impl PlayerAudioInfo::fn get_max_bitrate -->
# Returns
the audio maximum bitrate in `PlayerAudioInfo`.
<!-- impl PlayerAudioInfo::fn get_sample_rate -->
# Returns
the audio sample rate in `PlayerAudioInfo`.
<!-- enum PlayerColorBalanceType -->
<!-- enum PlayerColorBalanceType::variant Hue -->
hue or color balance.
<!-- enum PlayerColorBalanceType::variant Brightness -->
brightness or black level.
<!-- enum PlayerColorBalanceType::variant Saturation -->
color saturation or chroma
gain.
<!-- enum PlayerColorBalanceType::variant Contrast -->
contrast or luma gain.
<!-- enum PlayerError -->
<!-- enum PlayerError::variant Failed -->
generic error.
<!-- struct PlayerGMainContextSignalDispatcher -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`PlayerSignalDispatcherExt`](trait.PlayerSignalDispatcherExt.html)
<!-- impl PlayerGMainContextSignalDispatcher::fn new -->
Creates a new GstPlayerSignalDispatcher that uses `application_context`,
or the thread default one if `None` is used. See `Player::new`.
## `application_context`
GMainContext to use or `None`
# Returns
the new GstPlayerSignalDispatcher
<!-- struct PlayerMediaInfo -->
Structure containing the media information of a URI.
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerMediaInfo::fn get_audio_streams -->
# Returns
A `glib::List` of
matching `PlayerAudioInfo`.
<!-- impl PlayerMediaInfo::fn get_container_format -->
# Returns
the container format.
<!-- impl PlayerMediaInfo::fn get_duration -->
# Returns
duration of the media.
<!-- impl PlayerMediaInfo::fn get_image_sample -->
Function to get the image (or preview-image) stored in taglist.
Application can use `gst_sample_*_()` API's to get caps, buffer etc.
# Returns
GstSample or NULL.
<!-- impl PlayerMediaInfo::fn get_number_of_audio_streams -->
# Returns
number of audio streams.
<!-- impl PlayerMediaInfo::fn get_number_of_streams -->
# Returns
number of total streams.
<!-- impl PlayerMediaInfo::fn get_number_of_subtitle_streams -->
# Returns
number of subtitle streams.
<!-- impl PlayerMediaInfo::fn get_number_of_video_streams -->
# Returns
number of video streams.
<!-- impl PlayerMediaInfo::fn get_stream_list -->
# Returns
A `glib::List` of
matching `PlayerStreamInfo`.
<!-- impl PlayerMediaInfo::fn get_subtitle_streams -->
# Returns
A `glib::List` of
matching `PlayerSubtitleInfo`.
<!-- impl PlayerMediaInfo::fn get_tags -->
# Returns
the tags contained in media info.
<!-- impl PlayerMediaInfo::fn get_title -->
# Returns
the media title.
<!-- impl PlayerMediaInfo::fn get_uri -->
# Returns
the URI associated with `PlayerMediaInfo`.
<!-- impl PlayerMediaInfo::fn get_video_streams -->
# Returns
A `glib::List` of
matching `PlayerVideoInfo`.
<!-- impl PlayerMediaInfo::fn is_live -->
# Returns
`true` if the media is live.
<!-- impl PlayerMediaInfo::fn is_seekable -->
# Returns
`true` if the media is seekable.
<!-- struct PlayerSignalDispatcher -->
# Implements
[`PlayerSignalDispatcherExt`](trait.PlayerSignalDispatcherExt.html)
<!-- trait PlayerSignalDispatcherExt -->
Trait containing all `PlayerSignalDispatcher` methods.
# Implementors
[`PlayerGMainContextSignalDispatcher`](struct.PlayerGMainContextSignalDispatcher.html), [`PlayerSignalDispatcher`](struct.PlayerSignalDispatcher.html)
<!-- enum PlayerSnapshotFormat -->
<!-- enum PlayerState -->
<!-- enum PlayerState::variant Stopped -->
the player is stopped.
<!-- enum PlayerState::variant Buffering -->
the player is buffering.
<!-- enum PlayerState::variant Paused -->
the player is paused.
<!-- enum PlayerState::variant Playing -->
the player is currently playing a
stream.
<!-- struct PlayerStreamInfo -->
Base structure for information concerning a media stream. Depending on
the stream type, one can find more media-specific information in
`PlayerVideoInfo`, `PlayerAudioInfo`, `PlayerSubtitleInfo`.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- trait PlayerStreamInfoExt -->
Trait containing all `PlayerStreamInfo` methods.
# Implementors
[`PlayerAudioInfo`](struct.PlayerAudioInfo.html), [`PlayerStreamInfo`](struct.PlayerStreamInfo.html), [`PlayerSubtitleInfo`](struct.PlayerSubtitleInfo.html), [`PlayerVideoInfo`](struct.PlayerVideoInfo.html)
<!-- trait PlayerStreamInfoExt::fn get_caps -->
# Returns
the `gst::Caps` of the stream.
<!-- trait PlayerStreamInfoExt::fn get_codec -->
A string describing codec used in `PlayerStreamInfo`.
# Returns
codec string or NULL on unknown.
<!-- trait PlayerStreamInfoExt::fn get_index -->
Function to get stream index from `PlayerStreamInfo` instance.
# Returns
the stream index of this stream.
<!-- trait PlayerStreamInfoExt::fn get_stream_type -->
Function to return human readable name for the stream type
of the given `self` (ex: "audio", "video", "subtitle")
# Returns
a human readable name
<!-- trait PlayerStreamInfoExt::fn get_tags -->
# Returns
the tags contained in this stream.
<!-- struct PlayerSubtitleInfo -->
`PlayerStreamInfo` specific to subtitle streams.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerSubtitleInfo::fn get_language -->
# Returns
the language of the stream, or NULL if unknown.
<!-- struct PlayerVideoInfo -->
`PlayerStreamInfo` specific to video streams.
# Implements
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl PlayerVideoInfo::fn get_bitrate -->
# Returns
the current bitrate of video in `PlayerVideoInfo`.
<!-- impl PlayerVideoInfo::fn get_framerate -->
## `fps_n`
Numerator of frame rate
## `fps_d`
Denominator of frame rate
<!-- impl PlayerVideoInfo::fn get_height -->
# Returns
the height of video in `PlayerVideoInfo`.
<!-- impl PlayerVideoInfo::fn get_max_bitrate -->
# Returns
the maximum bitrate of video in `PlayerVideoInfo`.
<!-- impl PlayerVideoInfo::fn get_pixel_aspect_ratio -->
Returns the pixel aspect ratio in `par_n` and `par_d`
## `par_n`
numerator
## `par_d`
denominator
<!-- impl PlayerVideoInfo::fn get_width -->
# Returns
the width of video in `PlayerVideoInfo`.
<!-- struct PlayerVideoOverlayVideoRenderer -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`PlayerVideoRendererExt`](trait.PlayerVideoRendererExt.html)
<!-- impl PlayerVideoOverlayVideoRenderer::fn new -->
## `window_handle`
Window handle to use or `None`
<!-- impl PlayerVideoOverlayVideoRenderer::fn new_with_sink -->
## `window_handle`
Window handle to use or `None`
## `video_sink`
the custom video_sink element to be set for the video renderer
<!-- impl PlayerVideoOverlayVideoRenderer::fn expose -->
Tell an overlay that it has been exposed. This will redraw the current frame
in the drawable even if the pipeline is PAUSED.
<!-- impl PlayerVideoOverlayVideoRenderer::fn get_render_rectangle -->
Return the currently configured render rectangle. See `PlayerVideoOverlayVideoRenderer::set_render_rectangle`
for details.
## `x`
the horizontal offset of the render area inside the window
## `y`
the vertical offset of the render area inside the window
## `width`
the width of the render area inside the window
## `height`
the height of the render area inside the window
<!-- impl PlayerVideoOverlayVideoRenderer::fn get_window_handle -->
# Returns
The currently set, platform specific window
handle
<!-- impl PlayerVideoOverlayVideoRenderer::fn set_render_rectangle -->
Configure a subregion as a video target within the window set by
`PlayerVideoOverlayVideoRenderer::set_window_handle`. If this is not
used or not supported the video will fill the area of the window set as the
overlay to 100%. By specifying the rectangle, the video can be overlaid to
a specific region of that window only. After setting the new rectangle one
should call `PlayerVideoOverlayVideoRenderer::expose` to force a
redraw. To unset the region pass -1 for the `width` and `height` parameters.
This method is needed for non fullscreen video overlay in UI toolkits that
do not support subwindows.
## `x`
the horizontal offset of the render area inside the window
## `y`
the vertical offset of the render area inside the window
## `width`
the width of the render area inside the window
## `height`
the height of the render area inside the window
<!-- impl PlayerVideoOverlayVideoRenderer::fn set_window_handle -->
Sets the platform specific window handle into which the video
should be rendered
## `window_handle`
handle referencing to the platform specific window
<!-- struct PlayerVideoRenderer -->
# Implements
[`PlayerVideoRendererExt`](trait.PlayerVideoRendererExt.html)
<!-- trait PlayerVideoRendererExt -->
Trait containing all `PlayerVideoRenderer` methods.
# Implementors
[`PlayerVideoOverlayVideoRenderer`](struct.PlayerVideoOverlayVideoRenderer.html), [`PlayerVideoRenderer`](struct.PlayerVideoRenderer.html)
<!-- struct PlayerVisualization -->
A `PlayerVisualization` descriptor.
<!-- impl PlayerVisualization::fn copy -->
Makes a copy of the `PlayerVisualization`. The result must be
freed using `PlayerVisualization::free`.
# Returns
an allocated copy of `self`.
<!-- impl PlayerVisualization::fn free -->
Frees a `PlayerVisualization`.

199
docs/gstreamer-rtp/docs.md Normal file
View file

@ -0,0 +1,199 @@
<!-- file * -->
<!-- enum RTCPFBType -->
Different types of feedback messages.
<!-- enum RTCPFBType::variant FbTypeInvalid -->
Invalid type
<!-- enum RTCPFBType::variant RtpfbTypeNack -->
Generic NACK
<!-- enum RTCPFBType::variant RtpfbTypeTmmbr -->
Temporary Maximum Media Stream Bit Rate Request
<!-- enum RTCPFBType::variant RtpfbTypeTmmbn -->
Temporary Maximum Media Stream Bit Rate
Notification
<!-- enum RTCPFBType::variant RtpfbTypeRtcpSrReq -->
Request an SR packet for early
synchronization
<!-- enum RTCPFBType::variant PsfbTypePli -->
Picture Loss Indication
<!-- enum RTCPFBType::variant PsfbTypeSli -->
Slice Loss Indication
<!-- enum RTCPFBType::variant PsfbTypeRpsi -->
Reference Picture Selection Indication
<!-- enum RTCPFBType::variant PsfbTypeAfb -->
Application layer Feedback
<!-- enum RTCPFBType::variant PsfbTypeFir -->
Full Intra Request Command
<!-- enum RTCPFBType::variant PsfbTypeTstr -->
Temporal-Spatial Trade-off Request
<!-- enum RTCPFBType::variant PsfbTypeTstn -->
Temporal-Spatial Trade-off Notification
<!-- enum RTCPFBType::variant PsfbTypeVbcn -->
Video Back Channel Message
<!-- enum RTCPSDESType -->
Different types of SDES content.
<!-- enum RTCPSDESType::variant Invalid -->
Invalid SDES entry
<!-- enum RTCPSDESType::variant End -->
End of SDES list
<!-- enum RTCPSDESType::variant Cname -->
Canonical name
<!-- enum RTCPSDESType::variant Name -->
User name
<!-- enum RTCPSDESType::variant Email -->
User's electronic mail address
<!-- enum RTCPSDESType::variant Phone -->
User's phone number
<!-- enum RTCPSDESType::variant Loc -->
Geographic user location
<!-- enum RTCPSDESType::variant Tool -->
Name of application or tool
<!-- enum RTCPSDESType::variant Note -->
Notice about the source
<!-- enum RTCPSDESType::variant Priv -->
Private extensions
<!-- enum RTCPType -->
Different RTCP packet types.
<!-- enum RTCPType::variant Invalid -->
Invalid type
<!-- enum RTCPType::variant Sr -->
Sender report
<!-- enum RTCPType::variant Rr -->
Receiver report
<!-- enum RTCPType::variant Sdes -->
Source description
<!-- enum RTCPType::variant Bye -->
Goodbye
<!-- enum RTCPType::variant App -->
Application defined
<!-- enum RTCPType::variant Rtpfb -->
Transport layer feedback.
<!-- enum RTCPType::variant Psfb -->
Payload-specific feedback.
<!-- enum RTCPType::variant Xr -->
Extended report.
<!-- enum RTCPXRType -->
Types of RTCP Extended Reports, those are defined in RFC 3611 and other RFCs
according to the [IANA registry](https://www.iana.org/assignments/rtcp-xr-block-types/rtcp-xr-block-types.xhtml).
<!-- enum RTCPXRType::variant Invalid -->
Invalid XR Report Block
<!-- enum RTCPXRType::variant Lrle -->
Loss RLE Report Block
<!-- enum RTCPXRType::variant Drle -->
Duplicate RLE Report Block
<!-- enum RTCPXRType::variant Prt -->
Packet Receipt Times Report Block
<!-- enum RTCPXRType::variant Rrt -->
Receiver Reference Time Report Block
<!-- enum RTCPXRType::variant Dlrr -->
Delay since the last Receiver Report
<!-- enum RTCPXRType::variant Ssumm -->
Statistics Summary Report Block
<!-- enum RTCPXRType::variant VoipMetrics -->
VoIP Metrics Report Block
Feature: `v1_16`
<!-- struct RTPBufferFlags -->
Additional RTP buffer flags. These flags can potentially be used on any
buffers carrying RTP packets.
Note that these are only valid for `gst::Caps` of type: application/x-rtp (x-rtcp).
They can conflict with other extended buffer flags.
<!-- struct RTPBufferFlags::const RETRANSMISSION -->
The `gst::Buffer` was once wrapped
in a retransmitted packet as specified by RFC 4588.
<!-- struct RTPBufferFlags::const REDUNDANT -->
The packet represents redundant RTP packet.
The flag is used in gstrtpstorage to be able to hold the packetback
and use it only for recovery from packet loss.
Since: 1.14
<!-- struct RTPBufferFlags::const LAST -->
Offset to define more flags.
Feature: `v1_10`
<!-- struct RTPBufferMapFlags -->
Additional mapping flags for `RTPBuffer::map`.
<!-- struct RTPBufferMapFlags::const SKIP_PADDING -->
Skip mapping and validation of RTP
padding and RTP pad count when present. Useful for buffers where
the padding may be encrypted.
<!-- struct RTPBufferMapFlags::const LAST -->
Offset to define more flags
<!-- enum RTPPayload -->
Standard predefined fixed payload types.
The official list is at:
http://www.iana.org/assignments/rtp-parameters
Audio:
reserved: 19
unassigned: 20-23,
Video:
unassigned: 24, 27, 29, 30, 35-71, 77-95
Reserved for RTCP conflict avoidance: 72-76
<!-- enum RTPPayload::variant Pcmu -->
ITU-T G.711. mu-law audio (RFC 3551)
<!-- enum RTPPayload::variant 1016 -->
RFC 3551 says reserved
<!-- enum RTPPayload::variant G721 -->
RFC 3551 says reserved
<!-- enum RTPPayload::variant Gsm -->
GSM audio
<!-- enum RTPPayload::variant G723 -->
ITU G.723.1 audio
<!-- enum RTPPayload::variant Dvi48000 -->
IMA ADPCM wave type (RFC 3551)
<!-- enum RTPPayload::variant Dvi416000 -->
IMA ADPCM wave type (RFC 3551)
<!-- enum RTPPayload::variant Lpc -->
experimental linear predictive encoding
<!-- enum RTPPayload::variant Pcma -->
ITU-T G.711 A-law audio (RFC 3551)
<!-- enum RTPPayload::variant G722 -->
ITU-T G.722 (RFC 3551)
<!-- enum RTPPayload::variant L16Stereo -->
stereo PCM
<!-- enum RTPPayload::variant L16Mono -->
mono PCM
<!-- enum RTPPayload::variant Qcelp -->
EIA & TIA standard IS-733
<!-- enum RTPPayload::variant Cn -->
Comfort Noise (RFC 3389)
<!-- enum RTPPayload::variant Mpa -->
Audio MPEG 1-3.
<!-- enum RTPPayload::variant G728 -->
ITU-T G.728 Speech coder (RFC 3551)
<!-- enum RTPPayload::variant Dvi411025 -->
IMA ADPCM wave type (RFC 3551)
<!-- enum RTPPayload::variant Dvi422050 -->
IMA ADPCM wave type (RFC 3551)
<!-- enum RTPPayload::variant G729 -->
ITU-T G.729 Speech coder (RFC 3551)
<!-- enum RTPPayload::variant Cellb -->
See RFC 2029
<!-- enum RTPPayload::variant Jpeg -->
ISO Standards 10918-1 and 10918-2 (RFC 2435)
<!-- enum RTPPayload::variant Nv -->
nv encoding by Ron Frederick
<!-- enum RTPPayload::variant H261 -->
ITU-T Recommendation H.261 (RFC 2032)
<!-- enum RTPPayload::variant Mpv -->
Video MPEG 1 & 2 (RFC 2250)
<!-- enum RTPPayload::variant Mp2t -->
MPEG-2 transport stream (RFC 2250)
<!-- enum RTPPayload::variant H263 -->
Video H263 (RFC 2190)
<!-- enum RTPProfile -->
The transfer profile to use.
<!-- enum RTPProfile::variant Unknown -->
invalid profile
<!-- enum RTPProfile::variant Avp -->
the Audio/Visual profile (RFC 3551)
<!-- enum RTPProfile::variant Savp -->
the secure Audio/Visual profile (RFC 3711)
<!-- enum RTPProfile::variant Avpf -->
the Audio/Visual profile with feedback (RFC 4585)
<!-- enum RTPProfile::variant Savpf -->
the secure Audio/Visual profile with feedback (RFC 5124)

File diff suppressed because it is too large Load diff

258
docs/gstreamer-rtsp/docs.md Normal file
View file

@ -0,0 +1,258 @@
<!-- file * -->
<!-- enum RTSPAuthMethod -->
Authentication methods, ordered by strength
<!-- enum RTSPAuthMethod::variant None -->
no authentication
<!-- enum RTSPAuthMethod::variant Basic -->
basic authentication
<!-- enum RTSPAuthMethod::variant Digest -->
digest authentication
<!-- struct RTSPAuthParam -->
RTSP Authentication parameter
Feature: `v1_12`
<!-- struct RTSPEvent -->
The possible events for the connection.
<!-- struct RTSPEvent::const READ -->
connection is readable
<!-- struct RTSPEvent::const WRITE -->
connection is writable
<!-- enum RTSPFamily -->
The possible network families.
<!-- enum RTSPFamily::variant None -->
unknown network family
<!-- enum RTSPFamily::variant Inet -->
internet
<!-- enum RTSPFamily::variant Inet6 -->
internet V6
<!-- enum RTSPHeaderField -->
Enumeration of rtsp header fields
<!-- struct RTSPLowerTrans -->
The different transport methods.
<!-- struct RTSPLowerTrans::const UNKNOWN -->
invalid transport flag
<!-- struct RTSPLowerTrans::const UDP -->
stream data over UDP
<!-- struct RTSPLowerTrans::const UDP_MCAST -->
stream data over UDP multicast
<!-- struct RTSPLowerTrans::const TCP -->
stream data over TCP
<!-- struct RTSPLowerTrans::const HTTP -->
stream data tunneled over HTTP.
<!-- struct RTSPLowerTrans::const TLS -->
encrypt TCP and HTTP with TLS
<!-- struct RTSPMethod -->
The different supported RTSP methods.
<!-- struct RTSPMethod::const INVALID -->
invalid method
<!-- struct RTSPMethod::const DESCRIBE -->
the DESCRIBE method
<!-- struct RTSPMethod::const ANNOUNCE -->
the ANNOUNCE method
<!-- struct RTSPMethod::const GET_PARAMETER -->
the GET_PARAMETER method
<!-- struct RTSPMethod::const OPTIONS -->
the OPTIONS method
<!-- struct RTSPMethod::const PAUSE -->
the PAUSE method
<!-- struct RTSPMethod::const PLAY -->
the PLAY method
<!-- struct RTSPMethod::const RECORD -->
the RECORD method
<!-- struct RTSPMethod::const REDIRECT -->
the REDIRECT method
<!-- struct RTSPMethod::const SETUP -->
the SETUP method
<!-- struct RTSPMethod::const SET_PARAMETER -->
the SET_PARAMETER method
<!-- struct RTSPMethod::const TEARDOWN -->
the TEARDOWN method
<!-- struct RTSPMethod::const GET -->
the GET method (HTTP).
<!-- struct RTSPMethod::const POST -->
the POST method (HTTP).
<!-- enum RTSPMsgType -->
The type of a message.
<!-- enum RTSPMsgType::variant Invalid -->
invalid message type
<!-- enum RTSPMsgType::variant Request -->
RTSP request message
<!-- enum RTSPMsgType::variant Response -->
RTSP response message
<!-- enum RTSPMsgType::variant HttpRequest -->
HTTP request message.
<!-- enum RTSPMsgType::variant HttpResponse -->
HTTP response message.
<!-- enum RTSPMsgType::variant Data -->
data message
<!-- struct RTSPProfile -->
The transfer profile to use.
<!-- struct RTSPProfile::const UNKNOWN -->
invalid profile
<!-- struct RTSPProfile::const AVP -->
the Audio/Visual profile (RFC 3551)
<!-- struct RTSPProfile::const SAVP -->
the secure Audio/Visual profile (RFC 3711)
<!-- struct RTSPProfile::const AVPF -->
the Audio/Visual profile with feedback (RFC 4585)
<!-- struct RTSPProfile::const SAVPF -->
the secure Audio/Visual profile with feedback (RFC 5124)
<!-- enum RTSPRangeUnit -->
Different possible time range units.
<!-- enum RTSPRangeUnit::variant Smpte -->
SMPTE timecode
<!-- enum RTSPRangeUnit::variant Smpte30Drop -->
29.97 frames per second
<!-- enum RTSPRangeUnit::variant Smpte25 -->
25 frames per second
<!-- enum RTSPRangeUnit::variant Npt -->
Normal play time
<!-- enum RTSPRangeUnit::variant Clock -->
Absolute time expressed as ISO 8601 timestamps
<!-- enum RTSPResult -->
Result codes from the RTSP functions.
<!-- enum RTSPResult::variant Ok -->
no error
<!-- enum RTSPResult::variant Error -->
some unspecified error occurred
<!-- enum RTSPResult::variant Einval -->
invalid arguments were provided to a function
<!-- enum RTSPResult::variant Eintr -->
an operation was canceled
<!-- enum RTSPResult::variant Enomem -->
no memory was available for the operation
<!-- enum RTSPResult::variant Eresolv -->
a host resolve error occurred
<!-- enum RTSPResult::variant Enotimpl -->
function not implemented
<!-- enum RTSPResult::variant Esys -->
a system error occurred, errno contains more details
<!-- enum RTSPResult::variant Eparse -->
a parsing error occurred
<!-- enum RTSPResult::variant Ewsastart -->
windows networking could not start
<!-- enum RTSPResult::variant Ewsaversion -->
windows networking stack has wrong version
<!-- enum RTSPResult::variant Eeof -->
end-of-file was reached
<!-- enum RTSPResult::variant Enet -->
a network problem occurred, h_errno contains more details
<!-- enum RTSPResult::variant Enotip -->
the host is not an IP host
<!-- enum RTSPResult::variant Etimeout -->
a timeout occurred
<!-- enum RTSPResult::variant Etget -->
the tunnel GET request has been performed
<!-- enum RTSPResult::variant Etpost -->
the tunnel POST request has been performed
<!-- enum RTSPResult::variant Elast -->
last error
<!-- enum RTSPState -->
The different RTSP states.
<!-- enum RTSPState::variant Invalid -->
invalid state
<!-- enum RTSPState::variant Init -->
initializing
<!-- enum RTSPState::variant Ready -->
ready for operation
<!-- enum RTSPState::variant Seeking -->
seeking in progress
<!-- enum RTSPState::variant Playing -->
playing
<!-- enum RTSPState::variant Recording -->
recording
<!-- enum RTSPStatusCode -->
Enumeration of rtsp status codes
<!-- enum RTSPTimeType -->
Possible time types.
<!-- enum RTSPTimeType::variant Seconds -->
seconds
<!-- enum RTSPTimeType::variant Now -->
now
<!-- enum RTSPTimeType::variant End -->
end
<!-- enum RTSPTimeType::variant Frames -->
frames and subframes
<!-- enum RTSPTimeType::variant Utc -->
UTC time
<!-- struct RTSPTransMode -->
The transfer mode to use.
<!-- struct RTSPTransMode::const UNKNOWN -->
invalid tansport mode
<!-- struct RTSPTransMode::const RTP -->
transfer RTP data
<!-- struct RTSPTransMode::const RDT -->
transfer RDT (RealMedia) data
<!-- struct RTSPUrl -->
Provides helper functions to handle RTSP urls.
<!-- impl RTSPUrl::fn copy -->
Make a copy of `self`.
# Returns
a copy of `self`. Free with gst_rtsp_url_free () after usage.
<!-- impl RTSPUrl::fn decode_path_components -->
Splits the path of `self` on '/' boundaries, decoding the resulting components,
The decoding performed by this routine is "URI decoding", as defined in RFC
3986, commonly known as percent-decoding. For example, a string "foo\%2fbar"
will decode to "foo/bar" -- the \%2f being replaced by the corresponding byte
with hex value 0x2f. Note that there is no guarantee that the resulting byte
sequence is valid in any given encoding. As a special case, \%00 is not
unescaped to NUL, as that would prematurely terminate the string.
Also note that since paths usually start with a slash, the first component
will usually be the empty string.
# Returns
`None`-terminated array of URL components. Free with
`g_strfreev` when no longer needed.
<!-- impl RTSPUrl::fn free -->
Free the memory used by `self`.
<!-- impl RTSPUrl::fn get_port -->
Get the port number of `self`.
## `port`
location to hold the port
# Returns
`RTSPResult::Ok`.
<!-- impl RTSPUrl::fn get_request_uri -->
Get a newly allocated string describing the request URI for `self`.
# Returns
a string with the request URI. `g_free` after usage.
<!-- impl RTSPUrl::fn get_request_uri_with_control -->
Get a newly allocated string describing the request URI for `self`
combined with the control path for `control_path`
Feature: `v1_18`
## `control_path`
an RTSP aggregate control path
# Returns
a string with the request URI combined with the control path.
`g_free` after usage.
<!-- impl RTSPUrl::fn set_port -->
Set the port number in `self` to `port`.
## `port`
the port
# Returns
`RTSPResult::Ok`.
<!-- impl RTSPUrl::fn parse -->
Parse the RTSP `urlstr` into a newly allocated `RTSPUrl`. Free after usage
with `RTSPUrl::free`.
## `urlstr`
the url string to parse
## `url`
location to hold the result.
# Returns
a `RTSPResult`.

View file

@ -0,0 +1 @@
<!-- file * -->

2573
docs/gstreamer-video/docs.md Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,326 @@
<!-- file * -->
<!-- enum WebRTCBundlePolicy -->
GST_WEBRTC_BUNDLE_POLICY_NONE: none
GST_WEBRTC_BUNDLE_POLICY_BALANCED: balanced
GST_WEBRTC_BUNDLE_POLICY_MAX_COMPAT: max-compat
GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE: max-bundle
See https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24`section`-4.1.1
for more information.
Feature: `v1_16`
<!-- enum WebRTCDTLSSetup -->
<!-- enum WebRTCDTLSSetup::variant None -->
none
<!-- enum WebRTCDTLSSetup::variant Actpass -->
actpass
<!-- enum WebRTCDTLSSetup::variant Active -->
sendonly
<!-- enum WebRTCDTLSSetup::variant Passive -->
recvonly
<!-- struct WebRTCDTLSTransport -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- enum WebRTCDTLSTransportState -->
<!-- enum WebRTCDTLSTransportState::variant New -->
new
<!-- enum WebRTCDTLSTransportState::variant Closed -->
closed
<!-- enum WebRTCDTLSTransportState::variant Failed -->
failed
<!-- enum WebRTCDTLSTransportState::variant Connecting -->
connecting
<!-- enum WebRTCDTLSTransportState::variant Connected -->
connected
<!-- struct WebRTCDataChannel -->
Feature: `v1_18`
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl WebRTCDataChannel::fn close -->
Close the `self`.
Feature: `v1_18`
<!-- impl WebRTCDataChannel::fn on_buffered_amount_low -->
Signal that the data channel reached a low buffered amount. Should only be used by subclasses.
Feature: `v1_18`
<!-- impl WebRTCDataChannel::fn on_close -->
Signal that the data channel was closed. Should only be used by subclasses.
Feature: `v1_18`
<!-- impl WebRTCDataChannel::fn on_error -->
Signal that the data channel had an error. Should only be used by subclasses.
Feature: `v1_18`
## `error`
a `glib::Error`
<!-- impl WebRTCDataChannel::fn on_message_data -->
Signal that the data channel received a data message. Should only be used by subclasses.
Feature: `v1_18`
## `data`
a `glib::Bytes` or `None`
<!-- impl WebRTCDataChannel::fn on_message_string -->
Signal that the data channel received a string message. Should only be used by subclasses.
Feature: `v1_18`
## `str`
a string or `None`
<!-- impl WebRTCDataChannel::fn on_open -->
Signal that the data channel was opened. Should only be used by subclasses.
Feature: `v1_18`
<!-- impl WebRTCDataChannel::fn send_data -->
Send `data` as a data message over `self`.
Feature: `v1_18`
## `data`
a `glib::Bytes` or `None`
<!-- impl WebRTCDataChannel::fn send_string -->
Send `str` as a string message over `self`.
Feature: `v1_18`
## `str`
a string or `None`
<!-- impl WebRTCDataChannel::fn connect_close -->
Close the data channel
<!-- impl WebRTCDataChannel::fn connect_on_error -->
## `error`
the `glib::Error` thrown
<!-- impl WebRTCDataChannel::fn connect_on_message_data -->
## `data`
a `glib::Bytes` of the data received
<!-- impl WebRTCDataChannel::fn connect_on_message_string -->
## `data`
the data received as a string
<!-- impl WebRTCDataChannel::fn connect_send_data -->
## `data`
a `glib::Bytes` with the data
<!-- impl WebRTCDataChannel::fn connect_send_string -->
## `data`
the data to send as a string
<!-- enum WebRTCDataChannelState -->
GST_WEBRTC_DATA_CHANNEL_STATE_NEW: new
GST_WEBRTC_DATA_CHANNEL_STATE_CONNECTING: connection
GST_WEBRTC_DATA_CHANNEL_STATE_OPEN: open
GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING: closing
GST_WEBRTC_DATA_CHANNEL_STATE_CLOSED: closed
See <http://w3c.github.io/webrtc-pc/`dom`-rtcdatachannelstate>
Feature: `v1_16`
<!-- enum WebRTCFECType -->
<!-- enum WebRTCFECType::variant None -->
none
<!-- enum WebRTCFECType::variant UlpRed -->
ulpfec + red
Feature: `v1_14_1`
<!-- enum WebRTCICEComponent -->
<!-- enum WebRTCICEComponent::variant Rtp -->
RTP component
<!-- enum WebRTCICEComponent::variant Rtcp -->
RTCP component
<!-- enum WebRTCICEConnectionState -->
See <http://w3c.github.io/webrtc-pc/`dom`-rtciceconnectionstate>
<!-- enum WebRTCICEConnectionState::variant New -->
new
<!-- enum WebRTCICEConnectionState::variant Checking -->
checking
<!-- enum WebRTCICEConnectionState::variant Connected -->
connected
<!-- enum WebRTCICEConnectionState::variant Completed -->
completed
<!-- enum WebRTCICEConnectionState::variant Failed -->
failed
<!-- enum WebRTCICEConnectionState::variant Disconnected -->
disconnected
<!-- enum WebRTCICEConnectionState::variant Closed -->
closed
<!-- enum WebRTCICEGatheringState -->
See <http://w3c.github.io/webrtc-pc/`dom`-rtcicegatheringstate>
<!-- enum WebRTCICEGatheringState::variant New -->
new
<!-- enum WebRTCICEGatheringState::variant Gathering -->
gathering
<!-- enum WebRTCICEGatheringState::variant Complete -->
complete
<!-- enum WebRTCICERole -->
<!-- enum WebRTCICERole::variant Controlled -->
controlled
<!-- enum WebRTCICERole::variant Controlling -->
controlling
<!-- struct WebRTCICETransport -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- enum WebRTCICETransportPolicy -->
GST_WEBRTC_ICE_TRANSPORT_POLICY_ALL: all
GST_WEBRTC_ICE_TRANSPORT_POLICY_RELAY: relay
See https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24`section`-4.1.1
for more information.
Feature: `v1_16`
<!-- enum WebRTCPeerConnectionState -->
See <http://w3c.github.io/webrtc-pc/`dom`-rtcpeerconnectionstate>
<!-- enum WebRTCPeerConnectionState::variant New -->
new
<!-- enum WebRTCPeerConnectionState::variant Connecting -->
connecting
<!-- enum WebRTCPeerConnectionState::variant Connected -->
connected
<!-- enum WebRTCPeerConnectionState::variant Disconnected -->
disconnected
<!-- enum WebRTCPeerConnectionState::variant Failed -->
failed
<!-- enum WebRTCPeerConnectionState::variant Closed -->
closed
<!-- enum WebRTCPriorityType -->
GST_WEBRTC_PRIORITY_TYPE_VERY_LOW: very-low
GST_WEBRTC_PRIORITY_TYPE_LOW: low
GST_WEBRTC_PRIORITY_TYPE_MEDIUM: medium
GST_WEBRTC_PRIORITY_TYPE_HIGH: high
See <http://w3c.github.io/webrtc-pc/`dom`-rtcprioritytype>
Feature: `v1_16`
<!-- struct WebRTCRTPReceiver -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- struct WebRTCRTPSender -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- struct WebRTCRTPTransceiver -->
# Implements
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
<!-- impl WebRTCRTPTransceiver::fn get_property_direction -->
Direction of the transceiver.
Feature: `v1_18`
<!-- impl WebRTCRTPTransceiver::fn set_property_direction -->
Direction of the transceiver.
Feature: `v1_18`
<!-- enum WebRTCRTPTransceiverDirection -->
<!-- enum WebRTCRTPTransceiverDirection::variant None -->
none
<!-- enum WebRTCRTPTransceiverDirection::variant Inactive -->
inactive
<!-- enum WebRTCRTPTransceiverDirection::variant Sendonly -->
sendonly
<!-- enum WebRTCRTPTransceiverDirection::variant Recvonly -->
recvonly
<!-- enum WebRTCRTPTransceiverDirection::variant Sendrecv -->
sendrecv
<!-- enum WebRTCSCTPTransportState -->
GST_WEBRTC_SCTP_TRANSPORT_STATE_NEW: new
GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTING: connecting
GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTED: connected
GST_WEBRTC_SCTP_TRANSPORT_STATE_CLOSED: closed
See <http://w3c.github.io/webrtc-pc/`dom`-rtcsctptransportstate>
Feature: `v1_16`
<!-- enum WebRTCSDPType -->
See <http://w3c.github.io/webrtc-pc/`rtcsdptype`>
<!-- enum WebRTCSDPType::variant Offer -->
offer
<!-- enum WebRTCSDPType::variant Pranswer -->
pranswer
<!-- enum WebRTCSDPType::variant Answer -->
answer
<!-- enum WebRTCSDPType::variant Rollback -->
rollback
<!-- struct WebRTCSessionDescription -->
See <https://www.w3.org/TR/webrtc/`rtcsessiondescription`-class>
<!-- impl WebRTCSessionDescription::fn new -->
## `type_`
a `WebRTCSDPType`
## `sdp`
a `gst_sdp::SDPMessage`
# Returns
a new `WebRTCSessionDescription` from `type_`
and `sdp`
<!-- impl WebRTCSessionDescription::fn copy -->
# Returns
a new copy of `self`
<!-- impl WebRTCSessionDescription::fn free -->
Free `self` and all associated resources
<!-- enum WebRTCSignalingState -->
See <http://w3c.github.io/webrtc-pc/`dom`-rtcsignalingstate>
<!-- enum WebRTCSignalingState::variant Stable -->
stable
<!-- enum WebRTCSignalingState::variant Closed -->
closed
<!-- enum WebRTCSignalingState::variant HaveLocalOffer -->
have-local-offer
<!-- enum WebRTCSignalingState::variant HaveRemoteOffer -->
have-remote-offer
<!-- enum WebRTCSignalingState::variant HaveLocalPranswer -->
have-local-pranswer
<!-- enum WebRTCSignalingState::variant HaveRemotePranswer -->
have-remote-pranswer
<!-- enum WebRTCStatsType -->
<!-- enum WebRTCStatsType::variant Codec -->
codec
<!-- enum WebRTCStatsType::variant InboundRtp -->
inbound-rtp
<!-- enum WebRTCStatsType::variant OutboundRtp -->
outbound-rtp
<!-- enum WebRTCStatsType::variant RemoteInboundRtp -->
remote-inbound-rtp
<!-- enum WebRTCStatsType::variant RemoteOutboundRtp -->
remote-outbound-rtp
<!-- enum WebRTCStatsType::variant Csrc -->
csrc
<!-- enum WebRTCStatsType::variant PeerConnection -->
peer-connectiion
<!-- enum WebRTCStatsType::variant DataChannel -->
data-channel
<!-- enum WebRTCStatsType::variant Stream -->
stream
<!-- enum WebRTCStatsType::variant Transport -->
transport
<!-- enum WebRTCStatsType::variant CandidatePair -->
candidate-pair
<!-- enum WebRTCStatsType::variant LocalCandidate -->
local-candidate
<!-- enum WebRTCStatsType::variant RemoteCandidate -->
remote-candidate
<!-- enum WebRTCStatsType::variant Certificate -->
certificate

16265
docs/gstreamer/docs.md Normal file

File diff suppressed because it is too large Load diff

88
docs/src/lib.rs Normal file
View file

@ -0,0 +1,88 @@
extern crate stripper_lib;
use std::io;
use std::path::Path;
use stripper_lib::{loop_over_files, parse_cmts, regenerate_comments, strip_comments};
#[derive(Clone, Copy, Debug)]
pub enum Library {
GstWebRTC,
GstVideo,
GstSdp,
GstRtspServer,
GstRtsp,
GstRtp,
GstPlayer,
GstNet,
GstGL,
GES,
GstCheck,
GstPbutils,
GstBase,
GstAudio,
GstApp,
Gst,
}
fn docs(lib: Library) -> Option<&'static str> {
match lib {
Library::GstWebRTC => Some(include_str!("../gstreamer-webrtc/docs.md")),
Library::GstVideo => Some(include_str!("../gstreamer-video/docs.md")),
Library::GstSdp => Some(include_str!("../gstreamer-sdp/docs.md")),
Library::GstRtspServer => Some(include_str!("../gstreamer-rtsp-server/docs.md")),
Library::GstRtsp => Some(include_str!("../gstreamer-rtsp/docs.md")),
Library::GstRtp => Some(include_str!("../gstreamer-rtp/docs.md")),
Library::GstPlayer => Some(include_str!("../gstreamer-player/docs.md")),
Library::GstNet => Some(include_str!("../gstreamer-net/docs.md")),
Library::GstGL => Some(include_str!("../gstreamer-gl/docs.md")),
Library::GES => Some(include_str!("../gstreamer-editing-services/docs.md")),
Library::GstCheck => Some(include_str!("../gstreamer-check/docs.md")),
Library::GstPbutils => Some(include_str!("../gstreamer-pbutils/docs.md")),
Library::GstBase => Some(include_str!("../gstreamer-base/docs.md")),
Library::GstAudio => Some(include_str!("../gstreamer-audio/docs.md")),
Library::GstApp => Some(include_str!("../gstreamer-app/docs.md")),
Library::Gst => Some(include_str!("../gstreamer/docs.md")),
}
}
fn vendor_docs(_lib: Library) -> Option<&'static str> {
None
}
/// Embeds the docs.
///
/// `path` is the root directory to process.
///
/// `ignores` is the list of files to skip (relative to `path`).
pub fn embed<P: AsRef<Path>>(library: Library, path: P, ignores: &[&str]) {
if let Some(docs) = docs(library) {
do_embed(docs, path.as_ref(), ignores);
}
if let Some(docs) = vendor_docs(library) {
do_embed(docs, path.as_ref(), ignores);
}
}
fn do_embed(docs: &str, path: &Path, ignores: &[&str]) {
let mut infos = parse_cmts(docs.lines(), true);
loop_over_files(
path,
&mut |w, s| regenerate_comments(w, s, &mut infos, true, true),
&ignores,
false,
);
}
/// Remove any doc comments.
///
/// `path` is the root directory to process.
///
/// `ignores` is the list of files to skip (relative to `path`).
pub fn purge<P: AsRef<Path>>(path: P, ignores: &[&str]) {
loop_over_files(
path.as_ref(),
&mut |w, s| strip_comments(w, s, &mut io::sink(), true),
&ignores,
false,
);
}

View file

@ -1,80 +1,60 @@
[package]
name = "examples"
version.workspace = true
version = "0.16.0"
license = "MIT"
authors = ["Sebastian Dröge <sebastian@centricular.com>"]
edition.workspace = true
rust-version.workspace = true
edition = "2018"
[dependencies]
glib.workspace = true
gst.workspace = true
gst-gl = { workspace = true, optional = true }
gst-gl-egl = { workspace = true, optional = true }
gst-gl-x11 = { workspace = true, optional = true }
gst-app.workspace = true
gst-audio.workspace = true
gst-base.workspace = true
gst-video.workspace = true
gst-pbutils.workspace = true
gst-play = { workspace = true, optional = true }
gst-player = { workspace = true, optional = true }
ges = { workspace = true, optional = true }
gst-sdp = { workspace = true, optional = true }
gst-rtsp = { workspace = true, optional = true }
gst-rtsp-server = { workspace = true, optional = true }
gst-allocators = { workspace = true, optional = true }
gio = { workspace = true, optional = true }
glib = "0.10"
gstreamer = { version = "0.16", path = "../gstreamer" }
gstreamer-gl = { version = "0.16", path = "../gstreamer-gl", optional = true }
gstreamer-app = { version = "0.16", path = "../gstreamer-app" }
gstreamer-audio = { version = "0.16", path = "../gstreamer-audio" }
gstreamer-base = { version = "0.16", path = "../gstreamer-base" }
gstreamer-video = { version = "0.16", path = "../gstreamer-video" }
gstreamer-pbutils = { version = "0.16", path = "../gstreamer-pbutils" }
gstreamer-player = { version = "0.16", path = "../gstreamer-player", optional = true }
gstreamer-editing-services = { version = "0.16", path = "../gstreamer-editing-services", optional = true }
gstreamer-sdp = { version = "0.16", path = "../gstreamer-sdp", optional = true }
gstreamer-rtsp = { version = "0.16", path = "../gstreamer-rtsp", optional = true }
gstreamer-rtsp-server = { version = "0.16", path = "../gstreamer-rtsp-server", optional = true }
gstreamer-rtsp-server-sys = { version = "0.9", features = ["v1_8"], optional = true }
gtk = { version = "0.9", optional = true }
gdk = { version = "0.13", optional = true }
gio = { version = "0.9", optional = true }
anyhow = "1.0"
byte-slice-cast = "1"
cairo-rs = { workspace = true, features=["use_glib"], optional = true }
derive_more = "0.99.5"
futures = "0.3"
# Since there's nothing Windows-specific to enable on gstreamer-rs, unconditionally enable glutin's WGL backend
glutin = { version = "0.31", optional = true, default-features = false, features = ["wgl"] }
glutin-winit = { version = "0.4", optional = true, default-features = false, features = ["wgl"] }
image = { version = "0.24", optional = true, default-features = false, features = ["png", "jpeg"] }
memfd = { version = "0.6", optional = true }
memmap2 = { version = "0.9", optional = true }
pango = { workspace = true, optional = true }
pangocairo = { workspace = true, optional = true }
raw-window-handle = { version = "0.5", optional = true }
uds = { version = "0.4", optional = true }
winit = { version = "0.29", optional = true, default-features = false, features = ["rwh_05"] }
atomic_refcell = "0.1"
data-encoding = "2.0"
once_cell = "1"
[target.'cfg(windows)'.dependencies]
windows = { version = "0.58", features=["Win32_Graphics_Direct3D11",
"Win32_Foundation", "Win32_Graphics_Direct3D", "Win32_Graphics_Dxgi",
"Win32_Graphics_Dxgi_Common", "Win32_Graphics_Direct2D",
"Win32_Graphics_Direct2D_Common", "Win32_Graphics_DirectWrite",
"Win32_Graphics_Imaging", "Win32_System_Com", "Foundation_Numerics"], optional = true }
[target.'cfg(target_os = "macos")'.dependencies]
cocoa = "0.26"
objc = "0.2.7"
[target.'cfg(target_os = "macos")'.build-dependencies]
system-deps = "7"
[package.metadata.system-deps]
"gstreamer-1.0" = "1.14"
byte-slice-cast = "0.3"
cairo-rs = { version = "0.9", features=["use_glib"], optional = true }
cairo-sys-rs = { version = "0.10", features=["use_glib"], optional = true }
pango = { version = "0.9", optional = true }
pangocairo = { version = "0.10", optional = true }
glutin = { version = "0.21", optional = true }
winit = { version = "0.19", optional = true }
once_cell = "1.0"
[build-dependencies]
gl_generator = { version = "0.14", optional = true }
[features]
default = []
rtsp-server = ["gst-rtsp-server", "gst-rtsp", "gst-sdp"]
rtsp-server-record = ["gst-rtsp-server", "gst-rtsp", "gio"]
gst-player = ["gstreamer-player"]
ges = ["gstreamer-editing-services"]
gtksink = ["gtk", "gio"]
gtkvideooverlay = ["gtk", "gdk", "gio"]
gtkvideooverlay-x11 = ["gtkvideooverlay"]
gtkvideooverlay-quartz = ["gtkvideooverlay"]
gst-rtsp-server = ["gstreamer-rtsp-server", "gstreamer-rtsp", "gstreamer-sdp"]
gst-rtsp-server-record = ["gstreamer-rtsp-server-sys", "gstreamer-rtsp-server", "gstreamer-rtsp", "gio"]
v1_10 = ["gstreamer/v1_10"]
pango-cairo = ["pango", "pangocairo", "cairo-rs"]
overlay-composition = ["pango", "pangocairo", "cairo-rs"]
gl = ["dep:gst-gl", "dep:gl_generator", "dep:glutin", "dep:glutin-winit", "dep:winit", "dep:raw-window-handle"]
gst-gl-x11 = ["dep:gst-gl-x11", "glutin-winit?/glx"] # glx turns on x11
gst-gl-egl = ["dep:gst-gl-egl", "glutin-winit?/egl", "glutin-winit?/x11", "glutin-winit?/wayland"] # Use X11 or Wayland via EGL
allocators = ["gst-allocators", "memmap2", "memfd", "uds"]
overlay-composition = ["pango", "pangocairo", "cairo-rs", "cairo-sys-rs" ]
gl = ["gstreamer-gl", "gl_generator", "glutin"]
gl-egl = ["gstreamer-gl/egl"]
gl-x11 = ["gstreamer-gl/x11"]
gl-wayland = ["gstreamer-gl/wayland"]
[[bin]]
name = "appsink"
@ -84,6 +64,7 @@ name = "appsrc"
[[bin]]
name = "custom_events"
required-features = ["v1_10"]
[[bin]]
name = "custom_meta"
@ -91,15 +72,20 @@ name = "custom_meta"
[[bin]]
name = "decodebin"
[[bin]]
name = "debug_ringbuffer"
[[bin]]
name = "encodebin"
[[bin]]
name = "events"
[[bin]]
name = "gtksink"
required-features = ["gtksink"]
[[bin]]
name = "gtkvideooverlay"
required-features = ["gtkvideooverlay"]
[[bin]]
name = "iterator"
@ -115,10 +101,6 @@ name = "transmux"
[[bin]]
name = "pad_probes"
[[bin]]
name = "play"
required-features = ["gst-play"]
[[bin]]
name = "playbin"
@ -137,15 +119,11 @@ name = "rtpfecserver"
[[bin]]
name = "rtsp-server"
required-features = ["rtsp-server"]
required-features = ["gst-rtsp-server"]
[[bin]]
name = "rtsp-server-subclass"
required-features = ["rtsp-server"]
[[bin]]
name = "rtsp-server-custom-auth"
required-features = ["rtsp-server", "gst-rtsp-server/v1_22"]
required-features = ["gst-rtsp-server"]
[[bin]]
name = "tagsetter"
@ -161,7 +139,7 @@ name = "glib-futures"
[[bin]]
name = "rtsp-server-record"
required-features = ["rtsp-server-record"]
required-features = ["gst-rtsp-server-record"]
[[bin]]
name = "discoverer"
@ -174,47 +152,17 @@ required-features = ["pango-cairo"]
name = "overlay-composition"
required-features = ["overlay-composition"]
[[bin]]
name = "overlay-composition-d2d"
required-features = ["windows"]
[[bin]]
name = "ges"
required-features = ["ges"]
[[bin]]
name = "glwindow"
required-features = ["gl"]
[[bin]]
name = "glfilter"
name = "glupload"
required-features = ["gl"]
features = ["gl-egl", "gl-x11", "gl-wayland"]
[[bin]]
name = "subclass"
[[bin]]
name = "video_converter"
[[bin]]
name = "thumbnail"
required-features = ["image"]
[[bin]]
name = "fd_allocator"
required-features = ["allocators"]
[[bin]]
name = "cairo_compositor"
required-features = ["cairo-rs", "gst-video/v1_18"]
[[bin]]
name = "d3d11videosink"
required-features = ["windows"]
[[bin]]
name = "audio_multichannel_interleave"
[[bin]]
name = "zoom"
required-features = ["gst-video/v1_18"]

View file

@ -1,7 +1,10 @@
#[cfg(feature = "gl")]
extern crate gl_generator;
#[cfg(feature = "gl")]
fn generate_gl_bindings() {
let dest = std::path::PathBuf::from(&std::env::var("OUT_DIR").unwrap());
let mut file = std::fs::File::create(dest.join("test_gl_bindings.rs")).unwrap();
let mut file = std::fs::File::create(&dest.join("test_gl_bindings.rs")).unwrap();
gl_generator::Registry::new(
gl_generator::Api::Gles2,
(3, 0),
@ -19,22 +22,4 @@ fn generate_gl_bindings() {}
fn main() {
println!("cargo:rerun-if-changed=build.rs");
generate_gl_bindings();
// https://github.com/rust-lang/cargo/issues/5077#issuecomment-1284482987
#[cfg(all(not(docsrs), target_os = "macos"))]
match system_deps::Config::new().probe() {
Ok(deps) => {
let usr = std::path::Path::new("/usr/lib");
let usr_local = std::path::Path::new("/usr/local/lib");
for dep in deps.all_link_paths() {
if dep != &usr && dep != &usr_local {
println!("cargo:rustc-link-arg=-Wl,-rpath,{:?}", dep.as_os_str());
}
}
}
Err(s) => {
println!("cargo:warning={s}");
std::process::exit(1);
}
}
}

View file

@ -10,42 +10,64 @@
// This is the format we request:
// Audio / Signed 16bit / 1 channel / arbitrary sample rate
use anyhow::Error;
extern crate gstreamer as gst;
use gst::gst_element_error;
use gst::prelude::*;
extern crate gstreamer_app as gst_app;
extern crate gstreamer_audio as gst_audio;
use byte_slice_cast::*;
use std::i16;
use std::i32;
use anyhow::Error;
use derive_more::{Display, Error};
use gst::{element_error, prelude::*};
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[display(fmt = "Missing element {}", _0)]
struct MissingElement(#[error(not(source))] &'static str);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
source: glib::Error,
}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("audiotestsrc").build()?;
let appsink = gst_app::AppSink::builder()
// Tell the appsink what format we want. It will then be the audiotestsrc's job to
// provide the format we request.
// This can be set after linking the two objects, because format negotiation between
// both elements will happen during pre-rolling of the pipeline.
.caps(
&gst_audio::AudioCapsBuilder::new_interleaved()
.format(gst_audio::AUDIO_FORMAT_S16)
.channels(1)
.build(),
)
.build();
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("audiotestsrc", None)
.map_err(|_| MissingElement("audiotestsrc"))?;
let sink = gst::ElementFactory::make("appsink", None).map_err(|_| MissingElement("appsink"))?;
pipeline.add_many([&src, appsink.upcast_ref()])?;
src.link(&appsink)?;
pipeline.add_many(&[&src, &sink])?;
src.link(&sink)?;
let appsink = sink
.dynamic_cast::<gst_app::AppSink>()
.expect("Sink element is expected to be an appsink!");
// Tell the appsink what format we want. It will then be the audiotestsrc's job to
// provide the format we request.
// This can be set after linking the two objects, because format negotiation between
// both elements will happen during pre-rolling of the pipeline.
appsink.set_caps(Some(&gst::Caps::new_simple(
"audio/x-raw",
&[
("format", &gst_audio::AUDIO_FORMAT_S16.to_str()),
("layout", &"interleaved"),
("channels", &(1i32)),
("rate", &gst::IntRange::<i32>::new(1, i32::MAX)),
],
)));
// Getting data out of the appsink is done by setting callbacks on it.
// The appsink will then call those handlers, as soon as data is available.
@ -55,8 +77,8 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
.new_sample(|appsink| {
// Pull the sample in question out of the appsink's buffer.
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or_else(|| {
element_error!(
let buffer = sample.get_buffer().ok_or_else(|| {
gst_element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to get buffer from appsink")
@ -73,7 +95,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// So mapping the buffer makes the underlying memory region accessible to us.
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
let map = buffer.map_readable().map_err(|_| {
element_error!(
gst_element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to map buffer readable")
@ -86,10 +108,10 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// it by setting the appsink's caps. So what we do here is interpret the
// memory region we mapped as an array of signed 16 bit integers.
let samples = map.as_slice_of::<i16>().map_err(|_| {
element_error!(
gst_element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to interpret buffer as S16 PCM")
("Failed to interprete buffer as S16 PCM")
);
gst::FlowError::Error
@ -105,7 +127,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
})
.sum();
let rms = (sum / (samples.len() as f64)).sqrt();
println!("rms: {rms}");
println!("rms: {}", rms);
Ok(gst::FlowSuccess::Ok)
})
@ -119,10 +141,10 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -131,11 +153,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into());
}
@ -151,12 +174,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
fn example_main() {
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -10,20 +10,28 @@
// The application provides data of the following format:
// Video / BGRx (4 bytes) / 2 fps
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_app as gst_app;
extern crate gstreamer_video as gst_video;
use anyhow::Error;
use derive_more::{Display, Error};
use gst::prelude::*;
use gst_video::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[display(fmt = "Missing element {}", _0)]
struct MissingElement(#[error(not(source))] &'static str);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
source: glib::Error,
}
const WIDTH: usize = 320;
@ -32,7 +40,19 @@ const HEIGHT: usize = 240;
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
let pipeline = gst::Pipeline::default();
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("appsrc", None).map_err(|_| MissingElement("appsrc"))?;
let videoconvert = gst::ElementFactory::make("videoconvert", None)
.map_err(|_| MissingElement("videoconvert"))?;
let sink = gst::ElementFactory::make("autovideosink", None)
.map_err(|_| MissingElement("autovideosink"))?;
pipeline.add_many(&[&src, &videoconvert, &sink])?;
gst::Element::link_many(&[&src, &videoconvert, &sink])?;
let appsrc = src
.dynamic_cast::<gst_app::AppSrc>()
.expect("Source element is expected to be an appsrc!");
// Specify the format we want to provide as application into the pipeline
// by creating a video info with the given format and creating caps from it for the appsrc element.
@ -42,16 +62,8 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
.build()
.expect("Failed to create video info");
let appsrc = gst_app::AppSrc::builder()
.caps(&video_info.to_caps().unwrap())
.format(gst::Format::Time)
.build();
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
pipeline.add_many([appsrc.upcast_ref(), &videoconvert, &sink])?;
gst::Element::link_many([appsrc.upcast_ref(), &videoconvert, &sink])?;
appsrc.set_caps(Some(&video_info.to_caps().unwrap()));
appsrc.set_property_format(gst::Format::Time);
// Our frame counter, that is stored in the mutable environment
// of the closure of the need-data callback
@ -76,7 +88,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
return;
}
println!("Producing frame {i}");
println!("Producing frame {}", i);
let r = if i % 2 == 0 { 0 } else { 255 };
let g = if i % 3 == 0 { 0 } else { 255 };
@ -89,37 +101,20 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// For each frame we produce, we set the timestamp when it should be displayed
// (pts = presentation time stamp)
// The autovideosink will use this information to display the frame at the right time.
buffer.set_pts(i * 500 * gst::ClockTime::MSECOND);
buffer.set_pts(i * 500 * gst::MSECOND);
// At this point, buffer is only a reference to an existing memory region somewhere.
// When we want to access its content, we have to map it while requesting the required
// mode of access (read, read/write).
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
let mut vframe =
gst_video::VideoFrameRef::from_buffer_ref_writable(buffer, &video_info)
.unwrap();
let mut data = buffer.map_writable().unwrap();
// Remember some values from the frame for later usage
let width = vframe.width() as usize;
let height = vframe.height() as usize;
// Each line of the first plane has this many bytes
let stride = vframe.plane_stride()[0] as usize;
// Iterate over each of the height many lines of length stride
for line in vframe
.plane_data_mut(0)
.unwrap()
.chunks_exact_mut(stride)
.take(height)
{
// Iterate over each pixel of 4 bytes in that line
for pixel in line[..(4 * width)].chunks_exact_mut(4) {
pixel[0] = b;
pixel[1] = g;
pixel[2] = r;
pixel[3] = 0;
}
for p in data.as_mut_slice().chunks_mut(4) {
assert_eq!(p.len(), 4);
p[0] = b;
p[1] = g;
p[2] = r;
p[3] = 0;
}
}
@ -138,10 +133,10 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -150,11 +145,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into());
}
@ -170,12 +166,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
fn example_main() {
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,153 +0,0 @@
// This example demonstrates how to mix multiple audio
// streams into a single output using the audiomixer element.
// In this case, we're mixing 4 stereo streams into a single 8 channel output.
use gst::prelude::*;
use std::env;
#[path = "../examples-common.rs"]
mod examples_common;
const TRACKS: i32 = 4;
fn create_source_and_link(pipeline: &gst::Pipeline, mixer: &gst::Element, track_number: i32) {
let freq = ((track_number + 1) * 1000) as f64;
let audiosrc = gst::ElementFactory::make("audiotestsrc")
.property("freq", freq)
.property("num-buffers", 2000)
.build()
.unwrap();
let caps = gst_audio::AudioCapsBuilder::new().channels(2).build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps)
.build()
.unwrap();
pipeline.add_many([&audiosrc, &capsfilter]).unwrap();
gst::Element::link_many([&audiosrc, &capsfilter]).unwrap();
let src_pad = capsfilter.static_pad("src").unwrap();
let mixer_pad = mixer.request_pad_simple("sink_%u").unwrap();
// audiomixer expects a mix-matrix set on each input pad,
// indicating which output channels our input should appear in.
// Rows => input channels, columns => output channels.
// Here each input channel will appear in exactly one output channel.
let mut mix_matrix: Vec<Vec<f32>> = vec![];
for i in 0..TRACKS {
if i == track_number {
mix_matrix.push(vec![1.0, 0.0]);
mix_matrix.push(vec![0.0, 1.0]);
} else {
mix_matrix.push(vec![0.0, 0.0]);
mix_matrix.push(vec![0.0, 0.0]);
}
}
let mut audiomixer_config = gst_audio::AudioConverterConfig::new();
audiomixer_config.set_mix_matrix(&mix_matrix);
mixer_pad.set_property("converter-config", audiomixer_config);
src_pad.link(&mixer_pad).unwrap();
}
fn example_main() {
gst::init().unwrap();
let args: Vec<_> = env::args().collect();
let output_file = if args.len() == 2 {
&args[1]
} else {
println!("Usage: audiomixer <output file>");
std::process::exit(-1);
};
let pipeline = gst::Pipeline::new();
let audiomixer = gst::ElementFactory::make("audiomixer").build().unwrap();
// Using an arbitrary layout of 4 stereo pairs.
let positions = [
gst_audio::AudioChannelPosition::FrontLeft,
gst_audio::AudioChannelPosition::FrontRight,
gst_audio::AudioChannelPosition::RearLeft,
gst_audio::AudioChannelPosition::RearRight,
gst_audio::AudioChannelPosition::SideLeft,
gst_audio::AudioChannelPosition::SideRight,
gst_audio::AudioChannelPosition::TopFrontLeft,
gst_audio::AudioChannelPosition::TopFrontRight,
];
let mask = gst_audio::AudioChannelPosition::positions_to_mask(&positions, true).unwrap();
let caps = gst_audio::AudioCapsBuilder::new()
.channels(positions.len() as i32)
.channel_mask(mask)
.build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps)
.build()
.unwrap();
let audioconvert = gst::ElementFactory::make("audioconvert").build().unwrap();
let audioresample = gst::ElementFactory::make("audioresample").build().unwrap();
let wavenc = gst::ElementFactory::make("wavenc").build().unwrap();
let sink = gst::ElementFactory::make("filesink")
.property("location", output_file)
.build()
.unwrap();
pipeline
.add_many([
&audiomixer,
&capsfilter,
&audioconvert,
&audioresample,
&wavenc,
&sink,
])
.unwrap();
gst::Element::link_many([
&audiomixer,
&capsfilter,
&audioconvert,
&audioresample,
&wavenc,
&sink,
])
.unwrap();
for i in 0..TRACKS {
create_source_and_link(&pipeline, &audiomixer, i);
}
let bus = pipeline.bus().expect("Pipeline without bus");
pipeline
.set_state(gst::State::Playing)
.expect("Unable to start pipeline");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
eprintln!(
"Error from {:?}: {} ({:?})",
msg.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to change pipeline state to NULL");
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
examples_common::run(example_main);
}

View file

@ -1,719 +0,0 @@
// This example demonstrates how to implement a custom compositor based on cairo.
#![allow(clippy::non_send_fields_in_send_ty)]
use anyhow::{Context, Error};
use gst::prelude::*;
use gst_base::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
// Our custom compositor element is defined in this module.
mod cairo_compositor {
use gst_base::subclass::prelude::*;
use gst_video::{prelude::*, subclass::prelude::*};
// In the imp submodule we include the actual implementation of the compositor.
mod imp {
use std::sync::Mutex;
use super::*;
// Settings of the compositor.
#[derive(Clone)]
struct Settings {
background_color: u32,
}
impl Default for Settings {
fn default() -> Self {
Self {
background_color: 0xff_00_00_00,
}
}
}
// This is the private data of our compositor.
#[derive(Default)]
pub struct CairoCompositor {
settings: Mutex<Settings>,
}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data.
#[glib::object_subclass]
impl ObjectSubclass for CairoCompositor {
const NAME: &'static str = "CairoCompositor";
type Type = super::CairoCompositor;
type ParentType = gst_video::VideoAggregator;
type Interfaces = (gst::ChildProxy,);
}
// Implementation of glib::Object virtual methods.
impl ObjectImpl for CairoCompositor {
// Specification of the compositor properties.
// In this case a single property for configuring the background color of the
// composition.
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: std::sync::OnceLock<Vec<glib::ParamSpec>> =
std::sync::OnceLock::new();
PROPERTIES.get_or_init(|| {
vec![glib::ParamSpecUInt::builder("background-color")
.nick("Background Color")
.blurb("Background color as 0xRRGGBB")
.default_value(Settings::default().background_color)
.build()]
})
}
// Called by the application whenever the value of a property should be changed.
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
let mut settings = self.settings.lock().unwrap();
match pspec.name() {
"background-color" => {
settings.background_color = value.get().unwrap();
}
_ => unimplemented!(),
};
}
// Called by the application whenever the value of a property should be retrieved.
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
let settings = self.settings.lock().unwrap();
match pspec.name() {
"background-color" => settings.background_color.to_value(),
_ => unimplemented!(),
}
}
}
// Implementation of gst::Object virtual methods.
impl GstObjectImpl for CairoCompositor {}
// Implementation of gst::Element virtual methods.
impl ElementImpl for CairoCompositor {
// The element specific metadata. This information is what is visible from
// gst-inspect-1.0 and can also be programmatically retrieved from the gst::Registry
// after initial registration without having to load the plugin in memory.
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: std::sync::OnceLock<gst::subclass::ElementMetadata> =
std::sync::OnceLock::new();
Some(ELEMENT_METADATA.get_or_init(|| {
gst::subclass::ElementMetadata::new(
"Cairo Compositor",
"Compositor/Video",
"Cairo based compositor",
"Sebastian Dröge <sebastian@centricular.com>",
)
}))
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: std::sync::OnceLock<Vec<gst::PadTemplate>> =
std::sync::OnceLock::new();
PAD_TEMPLATES.get_or_init(|| {
// Create pad templates for our sink and source pad. These are later used for
// actually creating the pads and beforehand already provide information to
// GStreamer about all possible pads that could exist for this type.
// On all pads we can only handle BGRx.
let caps = gst_video::VideoCapsBuilder::new()
.format(gst_video::VideoFormat::Bgrx)
.pixel_aspect_ratio((1, 1).into())
.build();
vec![
// The src pad template must be named "src" for aggregator
// and always be there.
gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap(),
// The sink pad template must be named "sink_%u" by default for aggregator
// and be requested by the application.
//
// Also declare here that it should be a pad with our custom compositor pad
// type that is defined further below.
gst::PadTemplate::with_gtype(
"sink_%u",
gst::PadDirection::Sink,
gst::PadPresence::Request,
&caps,
super::CairoCompositorPad::static_type(),
)
.unwrap(),
]
})
}
// Notify via the child proxy interface whenever a new pad is added or removed.
fn request_new_pad(
&self,
templ: &gst::PadTemplate,
name: Option<&str>,
caps: Option<&gst::Caps>,
) -> Option<gst::Pad> {
let element = self.obj();
let pad = self.parent_request_new_pad(templ, name, caps)?;
element.child_added(&pad, &pad.name());
Some(pad)
}
fn release_pad(&self, pad: &gst::Pad) {
let element = self.obj();
element.child_removed(pad, &pad.name());
self.parent_release_pad(pad);
}
}
// Implementation of gst_base::Aggregator virtual methods.
impl AggregatorImpl for CairoCompositor {
// Called whenever a query arrives at the given sink pad of the compositor.
fn sink_query(
&self,
aggregator_pad: &gst_base::AggregatorPad,
query: &mut gst::QueryRef,
) -> bool {
use gst::QueryViewMut;
// We can accept any input caps that match the pad template. By default
// videoaggregator only allows caps that have the same format as the output.
match query.view_mut() {
QueryViewMut::Caps(q) => {
let caps = aggregator_pad.pad_template_caps();
let filter = q.filter();
let caps = if let Some(filter) = filter {
filter.intersect_with_mode(&caps, gst::CapsIntersectMode::First)
} else {
caps
};
q.set_result(&caps);
true
}
QueryViewMut::AcceptCaps(q) => {
let caps = q.caps();
let template_caps = aggregator_pad.pad_template_caps();
let res = caps.is_subset(&template_caps);
q.set_result(res);
true
}
_ => self.parent_sink_query(aggregator_pad, query),
}
}
}
// Implementation of gst_video::VideoAggregator virtual methods.
impl VideoAggregatorImpl for CairoCompositor {
// Called by videoaggregator whenever the output format should be determined.
fn find_best_format(
&self,
_downstream_caps: &gst::Caps,
) -> Option<(gst_video::VideoInfo, bool)> {
// Let videoaggregator select whatever format downstream wants.
//
// By default videoaggregator doesn't allow a different format than the input
// format.
None
}
// Called whenever a new output frame should be produced. At this point, each pad has
// either no frame queued up at all or the frame that should be used for this output
// time.
fn aggregate_frames(
&self,
token: &gst_video::subclass::AggregateFramesToken,
outbuf: &mut gst::BufferRef,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let element = self.obj();
let pads = element.sink_pads();
// Map the output frame writable.
let out_info = element.video_info().unwrap();
let mut out_frame =
gst_video::VideoFrameRef::from_buffer_ref_writable(outbuf, &out_info).unwrap();
// And then create a cairo context for drawing on the output frame.
with_frame(&mut out_frame, |ctx| {
let settings = self.settings.lock().unwrap().clone();
// First of all, clear the background.
let bg = (
((settings.background_color >> 16) & 0xff) as f64 / 255.0,
((settings.background_color >> 8) & 0xff) as f64 / 255.0,
(settings.background_color & 0xff) as f64 / 255.0,
);
ctx.set_operator(cairo::Operator::Source);
ctx.set_source_rgb(bg.0, bg.1, bg.2);
ctx.paint().unwrap();
ctx.set_operator(cairo::Operator::Over);
// Then for each pad (in zorder), draw it according to the current settings.
for pad in pads {
let pad = pad.downcast_ref::<CairoCompositorPad>().unwrap();
let settings = pad.imp().settings.lock().unwrap().clone();
if settings.alpha <= 0.0 || settings.scale <= 0.0 {
continue;
}
let frame = match pad.prepared_frame(token) {
Some(frame) => frame,
None => continue,
};
ctx.save().unwrap();
ctx.translate(settings.xpos, settings.ypos);
ctx.scale(settings.scale, settings.scale);
ctx.translate(frame.width() as f64 / 2.0, frame.height() as f64 / 2.0);
ctx.rotate(settings.rotate / 360.0 * 2.0 * std::f64::consts::PI);
ctx.translate(
-(frame.width() as f64 / 2.0),
-(frame.height() as f64 / 2.0),
);
paint_frame(ctx, &frame, settings.alpha);
ctx.restore().unwrap();
}
});
Ok(gst::FlowSuccess::Ok)
}
}
// Implementation of gst::ChildProxy virtual methods.
//
// This allows accessing the pads and their properties from e.g. gst-launch.
impl ChildProxyImpl for CairoCompositor {
fn children_count(&self) -> u32 {
let object = self.obj();
object.num_pads() as u32
}
fn child_by_name(&self, name: &str) -> Option<glib::Object> {
let object = self.obj();
object
.pads()
.into_iter()
.find(|p| p.name() == name)
.map(|p| p.upcast())
}
fn child_by_index(&self, index: u32) -> Option<glib::Object> {
let object = self.obj();
object
.pads()
.into_iter()
.nth(index as usize)
.map(|p| p.upcast())
}
}
}
// Creates a cairo context around the given video frame and then calls the closure to operate
// on the cairo context. Ensures that no references to the video frame stay inside cairo.
fn with_frame<F: FnOnce(&cairo::Context)>(
frame: &mut gst_video::VideoFrameRef<&mut gst::BufferRef>,
func: F,
) {
// SAFETY: This is the one and only surface reference and it is dropped at the end, meaning
// nothing from cairo is referencing the frame data anymore.
unsafe {
use glib::translate::*;
let surface = cairo::ImageSurface::create_for_data_unsafe(
frame.plane_data_mut(0).unwrap().as_mut_ptr(),
cairo::Format::Rgb24,
frame.width() as i32,
frame.height() as i32,
frame.plane_stride()[0],
)
.unwrap();
let ctx = cairo::Context::new(&surface).unwrap();
func(&ctx);
drop(ctx);
surface.finish();
assert_eq!(
cairo::ffi::cairo_surface_get_reference_count(surface.to_glib_none().0),
1,
);
}
}
// Paints the frame with the given alpha on the cairo context at the current origin.
// Ensures that no references to the video frame stay inside cairo.
fn paint_frame(
ctx: &cairo::Context,
frame: &gst_video::VideoFrameRef<&gst::BufferRef>,
alpha: f64,
) {
// SAFETY: This is the one and only surface reference and it is dropped at the end, meaning
// nothing from cairo is referencing the frame data anymore.
//
// Also nothing is ever writing to the surface from here.
unsafe {
use glib::translate::*;
let surface = cairo::ImageSurface::create_for_data_unsafe(
frame.plane_data(0).unwrap().as_ptr() as *mut u8,
cairo::Format::Rgb24,
frame.width() as i32,
frame.height() as i32,
frame.plane_stride()[0],
)
.unwrap();
ctx.set_source_surface(&surface, 0.0, 0.0).unwrap();
ctx.paint_with_alpha(alpha).unwrap();
ctx.set_source_rgb(0.0, 0.0, 0.0);
assert_eq!(
cairo::ffi::cairo_surface_get_reference_count(surface.to_glib_none().0),
1,
);
}
}
// This here defines the public interface of our element and implements
// the corresponding traits so that it behaves like any other gst::Element.
glib::wrapper! {
pub struct CairoCompositor(ObjectSubclass<imp::CairoCompositor>) @extends gst_video::VideoAggregator, gst_base::Aggregator, gst::Element, gst::Object, @implements gst::ChildProxy;
}
impl CairoCompositor {
// Creates a new instance of our compositor with the given name.
pub fn new(name: Option<&str>) -> Self {
glib::Object::builder().property("name", name).build()
}
}
// In the imp submodule we include the implementation of the pad subclass.
//
// This doesn't implement any additional logic but only provides properties for configuring the
// appearance of the stream corresponding to this pad and the storage of the property values.
mod imp_pad {
use std::sync::Mutex;
use super::*;
// Settings of our pad.
#[derive(Clone)]
pub(super) struct Settings {
pub(super) alpha: f64,
pub(super) scale: f64,
pub(super) rotate: f64,
pub(super) xpos: f64,
pub(super) ypos: f64,
}
impl Default for Settings {
fn default() -> Self {
Self {
alpha: 1.0,
scale: 1.0,
rotate: 0.0,
xpos: 0.0,
ypos: 0.0,
}
}
}
// This is the private data of our pad.
#[derive(Default)]
pub struct CairoCompositorPad {
pub(super) settings: Mutex<Settings>,
}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data.
#[glib::object_subclass]
impl ObjectSubclass for CairoCompositorPad {
const NAME: &'static str = "CairoCompositorPad";
type Type = super::CairoCompositorPad;
type ParentType = gst_video::VideoAggregatorPad;
}
// Implementation of glib::Object virtual methods.
impl ObjectImpl for CairoCompositorPad {
// Specification of the compositor pad properties.
// In this case there are various properties for defining the position and otherwise
// the appearance of the stream corresponding to this pad.
fn properties() -> &'static [glib::ParamSpec] {
static PROPERTIES: std::sync::OnceLock<Vec<glib::ParamSpec>> =
std::sync::OnceLock::new();
PROPERTIES.get_or_init(|| {
vec![
glib::ParamSpecDouble::builder("alpha")
.nick("Alpha")
.blurb("Alpha value of the input")
.minimum(0.0)
.maximum(1.0)
.default_value(Settings::default().alpha)
.build(),
glib::ParamSpecDouble::builder("scale")
.nick("Scale")
.blurb("Scale factor of the input")
.minimum(0.0)
.maximum(f64::MAX)
.default_value(Settings::default().scale)
.build(),
glib::ParamSpecDouble::builder("rotate")
.nick("Rotate")
.blurb("Rotation of the input")
.minimum(0.0)
.maximum(360.0)
.default_value(Settings::default().rotate)
.build(),
glib::ParamSpecDouble::builder("xpos")
.nick("X Position")
.blurb("Horizontal position of the input")
.minimum(0.0)
.maximum(f64::MAX)
.default_value(Settings::default().xpos)
.build(),
glib::ParamSpecDouble::builder("ypos")
.nick("Y Position")
.blurb("Vertical position of the input")
.minimum(0.0)
.maximum(f64::MAX)
.default_value(Settings::default().ypos)
.build(),
]
})
}
// Called by the application whenever the value of a property should be changed.
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
let mut settings = self.settings.lock().unwrap();
match pspec.name() {
"alpha" => {
settings.alpha = value.get().unwrap();
}
"scale" => {
settings.scale = value.get().unwrap();
}
"rotate" => {
settings.rotate = value.get().unwrap();
}
"xpos" => {
settings.xpos = value.get().unwrap();
}
"ypos" => {
settings.ypos = value.get().unwrap();
}
_ => unimplemented!(),
};
}
// Called by the application whenever the value of a property should be retrieved.
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
let settings = self.settings.lock().unwrap();
match pspec.name() {
"alpha" => settings.alpha.to_value(),
"scale" => settings.scale.to_value(),
"rotate" => settings.rotate.to_value(),
"xpos" => settings.xpos.to_value(),
"ypos" => settings.ypos.to_value(),
_ => unimplemented!(),
}
}
}
// Implementation of gst::Object virtual methods.
impl GstObjectImpl for CairoCompositorPad {}
// Implementation of gst::Pad virtual methods.
impl PadImpl for CairoCompositorPad {}
// Implementation of gst_base::AggregatorPad virtual methods.
impl AggregatorPadImpl for CairoCompositorPad {}
// Implementation of gst_video::VideoAggregatorPad virtual methods.
impl VideoAggregatorPadImpl for CairoCompositorPad {}
}
// This here defines the public interface of our element and implements
// the corresponding traits so that it behaves like any other gst::Pad.
glib::wrapper! {
pub struct CairoCompositorPad(ObjectSubclass<imp_pad::CairoCompositorPad>) @extends gst_video::VideoAggregatorPad, gst_base::AggregatorPad, gst::Pad, gst::Object;
}
}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
// Create our pipeline with the compositor and two input streams.
let pipeline = gst::Pipeline::default();
let src1 = gst::ElementFactory::make("videotestsrc")
.property_from_str("pattern", "ball")
.build()?;
let src2 = gst::ElementFactory::make("videotestsrc")
.property_from_str("pattern", "smpte")
.build()?;
let comp = cairo_compositor::CairoCompositor::new(None);
let conv = gst::ElementFactory::make("videoconvert").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
comp.set_property("background-color", 0xff_33_33_33u32);
pipeline.add_many([&src1, &src2, comp.upcast_ref(), &conv, &sink])?;
// Link everything together.
src1.link_filtered(
&comp,
&gst::Caps::builder("video/x-raw")
.field("width", 320i32)
.field("height", 240i32)
.build(),
)
.context("Linking source 1")?;
src2.link_filtered(
&comp,
&gst::Caps::builder("video/x-raw")
.field("width", 320i32)
.field("height", 240i32)
.build(),
)
.context("Linking source 2")?;
comp.link_filtered(
&conv,
&gst::Caps::builder("video/x-raw")
.field("width", 1280i32)
.field("height", 720i32)
.build(),
)
.context("Linking converter")?;
conv.link(&sink).context("Linking sink")?;
// Change positions etc of both inputs based on a timer
let xmax = 1280.0 - 320.0f64;
let ymax = 720.0 - 240.0f64;
let sink_0 = comp.static_pad("sink_0").unwrap();
sink_0.set_property("xpos", 0.0f64);
sink_0.set_property("ypos", 0.0f64);
let sink_1 = comp.static_pad("sink_1").unwrap();
sink_1.set_property("xpos", xmax);
sink_1.set_property("ypos", ymax);
comp.set_emit_signals(true);
comp.connect_samples_selected(move |_agg, _seg, pts, _dts, _dur, _info| {
// Position and rotation period is 10s.
let pos = (pts.unwrap().nseconds() % gst::ClockTime::from_seconds(10).nseconds()) as f64
/ gst::ClockTime::from_seconds(10).nseconds() as f64;
let xpos = (1.0 + f64::sin(2.0 * std::f64::consts::PI * pos)) * xmax / 2.0;
let ypos = (1.0 + f64::cos(2.0 * std::f64::consts::PI * pos)) * ymax / 2.0;
sink_0.set_property("xpos", xpos);
sink_0.set_property("ypos", ypos);
let xpos = (1.0 + f64::cos(2.0 * std::f64::consts::PI * pos)) * xmax / 2.0;
let ypos = (1.0 + f64::sin(2.0 * std::f64::consts::PI * pos)) * ymax / 2.0;
sink_1.set_property("xpos", xpos);
sink_1.set_property("ypos", ypos);
sink_0.set_property("rotate", pos * 360.0);
sink_1.set_property("rotate", 360.0 - pos * 360.0);
// Alpha period is 2s.
let pos = (pts.unwrap().nseconds() % gst::ClockTime::from_seconds(2).nseconds()) as f64
/ gst::ClockTime::from_seconds(2).nseconds() as f64;
sink_0.set_property(
"alpha",
(1.0 + f64::sin(2.0 * std::f64::consts::PI * pos)) / 2.0,
);
sink_1.set_property(
"alpha",
(1.0 + f64::cos(2.0 * std::f64::consts::PI * pos)) / 2.0,
);
// Scale period is 20s.
let pos = (pts.unwrap().nseconds() % gst::ClockTime::from_seconds(20).nseconds()) as f64
/ gst::ClockTime::from_seconds(20).nseconds() as f64;
sink_0.set_property("scale", pos);
sink_1.set_property("scale", 1.0 - pos);
});
Ok(pipeline)
}
// Start the pipeline and collect messages from the bus until an error or EOS.
fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.expect("Pipeline without bus. Shouldn't happen!");
let mut bus_stream = bus.stream();
let main_context = glib::MainContext::default();
// Storage for any error so we can report it later.
let mut error = None;
main_context.block_on(async {
use futures::prelude::*;
while let Some(msg) = bus_stream.next().await {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
error = Some(anyhow::anyhow!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
));
break;
}
_ => (),
}
}
});
// In case of error, report to the caller.
if let Some(error) = error {
let _ = pipeline.set_state(gst::State::Null);
return Err(error);
}
pipeline.set_state(gst::State::Null)?;
Ok(())
}
fn example_main() -> Result<(), Error> {
create_pipeline().and_then(main_loop)
}
fn main() -> Result<(), Error> {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically).
examples_common::run(example_main)
}

View file

@ -5,6 +5,7 @@
// is sent on the sink pad, we expect to see it emerge on the other side when
// the data in front of it has exited.
extern crate gstreamer as gst;
use gst::prelude::*;
#[path = "../examples-common.rs"]
@ -21,7 +22,7 @@ impl ExampleCustomEvent {
#[allow(clippy::new_ret_no_self)]
pub fn new(send_eos: bool) -> gst::Event {
let s = gst::Structure::builder(Self::EVENT_NAME)
.field("send_eos", send_eos)
.field("send_eos", &send_eos)
.build();
gst::event::CustomDownstream::new(s)
}
@ -29,12 +30,12 @@ impl ExampleCustomEvent {
pub fn parse(ev: &gst::EventRef) -> Option<ExampleCustomEvent> {
match ev.view() {
gst::EventView::CustomDownstream(e) => {
let s = match e.structure() {
Some(s) if s.name() == Self::EVENT_NAME => s,
let s = match e.get_structure() {
Some(s) if s.get_name() == Self::EVENT_NAME => s,
_ => return None, // No structure in this event, or the name didn't match
};
let send_eos = s.get::<bool>("send_eos").unwrap();
let send_eos = s.get_some::<bool>("send_eos").unwrap();
Some(ExampleCustomEvent { send_eos })
}
_ => None, // Not a custom event
@ -48,19 +49,19 @@ fn example_main() {
let main_loop = glib::MainLoop::new(None, false);
// This creates a pipeline by parsing the gst-launch pipeline syntax.
let pipeline = gst::parse::launch(
let pipeline = gst::parse_launch(
"audiotestsrc name=src ! queue max-size-time=2000000000 ! fakesink name=sink sync=true",
)
.unwrap();
let bus = pipeline.bus().unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
let sink = pipeline.by_name("sink").unwrap();
let sinkpad = sink.static_pad("sink").unwrap();
let sink = pipeline.get_by_name("sink").unwrap();
let sinkpad = sink.get_static_pad("sink").unwrap();
// Need to move a new reference into the closure.
// !!ATTENTION!!:
@ -75,33 +76,31 @@ fn example_main() {
// Add a pad probe on the sink pad and catch the custom event we sent, then send
// an EOS event on the pipeline.
sinkpad.add_probe(gst::PadProbeType::EVENT_DOWNSTREAM, move |_, probe_info| {
let Some(event) = probe_info.event() else {
return gst::PadProbeReturn::Ok;
};
let Some(custom_event) = ExampleCustomEvent::parse(event) else {
return gst::PadProbeReturn::Ok;
};
let Some(pipeline) = pipeline_weak.upgrade() else {
return gst::PadProbeReturn::Ok;
};
if custom_event.send_eos {
/* Send EOS event to shut down the pipeline, but from an async callback, as we're
* in a pad probe blocking the stream thread here... */
println!("Got custom event with send_eos=true. Sending EOS");
let ev = gst::event::Eos::new();
let pipeline_weak = pipeline_weak.clone();
pipeline.call_async(move |_| {
if let Some(pipeline) = pipeline_weak.upgrade() {
pipeline.send_event(ev);
match probe_info.data {
Some(gst::PadProbeData::Event(ref ev))
if ev.get_type() == gst::EventType::CustomDownstream =>
{
if let Some(custom_event) = ExampleCustomEvent::parse(ev) {
if let Some(pipeline) = pipeline_weak.upgrade() {
if custom_event.send_eos {
/* Send EOS event to shut down the pipeline, but from an async callback, as we're
* in a pad probe blocking the stream thread here... */
println!("Got custom event with send_eos=true. Sending EOS");
let ev = gst::event::Eos::new();
let pipeline_weak = pipeline_weak.clone();
pipeline.call_async(move |_| {
if let Some(pipeline) = pipeline_weak.upgrade() {
pipeline.send_event(ev);
}
});
} else {
println!("Got custom event, with send_eos=false. Ignoring");
}
}
}
});
} else {
println!("Got custom event, with send_eos=false. Ignoring");
}
_ => (),
}
gst::PadProbeReturn::Ok
});
@ -115,53 +114,56 @@ fn example_main() {
glib::timeout_add_seconds(2 + i as u32, move || {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let Some(pipeline) = pipeline_weak.upgrade() else {
return glib::ControlFlow::Break;
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::Continue(false),
};
println!("Sending custom event to the pipeline with send_eos={send_eos}");
println!(
"Sending custom event to the pipeline with send_eos={}",
send_eos
);
let ev = ExampleCustomEvent::new(*send_eos);
if !pipeline.send_event(ev) {
println!("Warning: Failed to send custom event");
}
// Remove this handler, the pipeline will shutdown once our pad probe catches the custom
// event and sends EOS
glib::ControlFlow::Break
glib::Continue(false)
});
}
let main_loop_clone = main_loop.clone();
// This sets the bus's signal handler (don't be mislead by the "add", there can only be one).
// Every message from the bus is passed through this function. Its returnvalue determines
// whether the handler wants to be called again. If glib::ControlFlow::Break is returned, the
// whether the handler wants to be called again. If glib::Continue(false) is returned, the
// handler is removed and will never be called again. The mainloop still runs though.
let _bus_watch = bus
.add_watch(move |_, msg| {
use gst::MessageView;
bus.add_watch(move |_, msg| {
use gst::MessageView;
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => {
println!("received eos");
// An EndOfStream event was sent to the pipeline, so we tell our main loop
// to stop execution here.
main_loop.quit()
}
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
main_loop.quit();
}
_ => (),
};
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => {
println!("received eos");
// An EndOfStream event was sent to the pipeline, so we tell our main loop
// to stop execution here.
main_loop.quit()
}
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
main_loop.quit();
}
_ => (),
};
// Tell the mainloop to continue executing this callback.
glib::ControlFlow::Continue
})
.expect("Failed to add bus watch");
// Tell the mainloop to continue executing this callback.
glib::Continue(true)
})
.expect("Failed to add bus watch");
// Operate GStreamer's bus, facilitating GLib's mainloop here.
// This function call will block until you tell the mainloop to quit
@ -171,10 +173,15 @@ fn example_main() {
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
// Remove the watch function from the bus.
// Again: There can always only be one watch function.
// Thus we don't have to tell him which function to remove.
bus.remove_watch().unwrap();
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -2,20 +2,24 @@
//
// It simply attaches a GstMeta with a Rust String to buffers that are passed into
// an appsrc and retrieves them again from an appsink.
#![allow(clippy::non_send_fields_in_send_ty)]
use gst::{element_error, prelude::*};
#[macro_use]
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_app as gst_app;
#[path = "../examples-common.rs"]
mod examples_common;
mod custom_meta {
use std::{fmt, mem};
use gst::gst_sys;
use gst::prelude::*;
use std::fmt;
use std::ptr;
// Public Rust type for the custom meta.
#[repr(transparent)]
#[repr(C)]
pub struct CustomMeta(imp::CustomMeta);
// Metas must be Send+Sync.
@ -29,23 +33,25 @@ mod custom_meta {
label: String,
) -> gst::MetaRefMut<Self, gst::meta::Standalone> {
unsafe {
// Manually dropping because gst_buffer_add_meta() takes ownership of the
// content of the struct.
let mut params = mem::ManuallyDrop::new(imp::CustomMetaParams { label });
// The label is passed through via the params to custom_meta_init().
let meta = gst::ffi::gst_buffer_add_meta(
// First add it: this will store an empty label via custom_meta_init().
let meta = gst_sys::gst_buffer_add_meta(
buffer.as_mut_ptr(),
imp::custom_meta_get_info(),
&mut *params as *mut imp::CustomMetaParams as glib::ffi::gpointer,
ptr::null_mut(),
) as *mut imp::CustomMeta;
// Then actually set the label.
{
let meta = &mut *meta;
meta.label = label;
}
Self::from_mut_ptr(buffer, meta)
}
}
// Retrieve the stored label.
pub fn label(&self) -> &str {
pub fn get_label(&self) -> &str {
self.0.label.as_str()
}
}
@ -54,7 +60,7 @@ mod custom_meta {
unsafe impl MetaAPI for CustomMeta {
type GstType = imp::CustomMeta;
fn meta_api() -> glib::Type {
fn get_meta_api() -> glib::Type {
imp::custom_meta_api_get_type()
}
}
@ -62,68 +68,64 @@ mod custom_meta {
impl fmt::Debug for CustomMeta {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("CustomMeta")
.field("label", &self.label())
.field("label", &self.get_label())
.finish()
}
}
// Actual unsafe implementation of the meta.
mod imp {
use std::{mem, ptr};
use glib::glib_sys;
use glib::translate::*;
pub(super) struct CustomMetaParams {
pub label: String,
}
use gst::gst_sys;
use once_cell::sync::Lazy;
use std::mem;
use std::ptr;
// This is the C type that is actually stored as meta inside the buffers.
#[repr(C)]
pub struct CustomMeta {
parent: gst::ffi::GstMeta,
parent: gst_sys::GstMeta,
pub(super) label: String,
}
// Function to register the meta API and get a type back.
pub(super) fn custom_meta_api_get_type() -> glib::Type {
static TYPE: std::sync::OnceLock<glib::Type> = std::sync::OnceLock::new();
*TYPE.get_or_init(|| unsafe {
let t = glib::Type::from_glib(gst::ffi::gst_meta_api_type_register(
static TYPE: Lazy<glib::Type> = Lazy::new(|| unsafe {
let t = from_glib(gst_sys::gst_meta_api_type_register(
b"MyCustomMetaAPI\0".as_ptr() as *const _,
// We provide no tags here as our meta is just a label and does
// not refer to any specific aspect of the buffer.
// not refer to any specific aspect of the buffer
[ptr::null::<std::os::raw::c_char>()].as_ptr() as *mut *const _,
));
assert_ne!(t, glib::Type::INVALID);
assert_ne!(t, glib::Type::Invalid);
t
})
});
*TYPE
}
// Initialization function for our meta. This needs to ensure all fields are correctly
// initialized. They will contain random memory before.
unsafe extern "C" fn custom_meta_init(
meta: *mut gst::ffi::GstMeta,
params: glib::ffi::gpointer,
_buffer: *mut gst::ffi::GstBuffer,
) -> glib::ffi::gboolean {
assert!(!params.is_null());
meta: *mut gst_sys::GstMeta,
_params: glib_sys::gpointer,
_buffer: *mut gst_sys::GstBuffer,
) -> glib_sys::gboolean {
let meta = &mut *(meta as *mut CustomMeta);
let params = ptr::read(params as *const CustomMetaParams);
// Need to initialize all our fields correctly here.
ptr::write(&mut meta.label, params.label);
// Need to initialize all our fields correctly here
ptr::write(&mut meta.label, String::new());
true.into_glib()
true.to_glib()
}
// Free function for our meta. This needs to free/drop all memory we allocated.
unsafe extern "C" fn custom_meta_free(
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
meta: *mut gst_sys::GstMeta,
_buffer: *mut gst_sys::GstBuffer,
) {
let meta = &mut *(meta as *mut CustomMeta);
@ -135,45 +137,42 @@ mod custom_meta {
// in a way that is compatible with the transformation type. In this case we just always
// copy it over.
unsafe extern "C" fn custom_meta_transform(
dest: *mut gst::ffi::GstBuffer,
meta: *mut gst::ffi::GstMeta,
_buffer: *mut gst::ffi::GstBuffer,
_type_: glib::ffi::GQuark,
_data: glib::ffi::gpointer,
) -> glib::ffi::gboolean {
let meta = &*(meta as *mut CustomMeta);
dest: *mut gst_sys::GstBuffer,
meta: *mut gst_sys::GstMeta,
_buffer: *mut gst_sys::GstBuffer,
_type_: glib_sys::GQuark,
_data: glib_sys::gpointer,
) -> glib_sys::gboolean {
let meta = &mut *(meta as *mut CustomMeta);
// We simply copy over our meta here. Other metas might have to look at the type
// and do things conditional on that, or even just drop the meta.
super::CustomMeta::add(gst::BufferRef::from_mut_ptr(dest), meta.label.clone());
true.into_glib()
true.to_glib()
}
// Register the meta itself with its functions.
pub(super) fn custom_meta_get_info() -> *const gst::ffi::GstMetaInfo {
struct MetaInfo(ptr::NonNull<gst::ffi::GstMetaInfo>);
pub(super) fn custom_meta_get_info() -> *const gst_sys::GstMetaInfo {
struct MetaInfo(ptr::NonNull<gst_sys::GstMetaInfo>);
unsafe impl Send for MetaInfo {}
unsafe impl Sync for MetaInfo {}
static META_INFO: std::sync::OnceLock<MetaInfo> = std::sync::OnceLock::new();
static META_INFO: Lazy<MetaInfo> = Lazy::new(|| unsafe {
MetaInfo(
ptr::NonNull::new(gst_sys::gst_meta_register(
custom_meta_api_get_type().to_glib(),
b"MyCustomMeta\0".as_ptr() as *const _,
mem::size_of::<CustomMeta>(),
Some(custom_meta_init),
Some(custom_meta_free),
Some(custom_meta_transform),
) as *mut gst_sys::GstMetaInfo)
.expect("Failed to register meta API"),
)
});
META_INFO
.get_or_init(|| unsafe {
MetaInfo(
ptr::NonNull::new(gst::ffi::gst_meta_register(
custom_meta_api_get_type().into_glib(),
b"MyCustomMeta\0".as_ptr() as *const _,
mem::size_of::<CustomMeta>(),
Some(custom_meta_init),
Some(custom_meta_free),
Some(custom_meta_transform),
) as *mut gst::ffi::GstMetaInfo)
.expect("Failed to register meta API"),
)
})
.0
.as_ptr()
META_INFO.0.as_ptr()
}
}
}
@ -182,9 +181,15 @@ fn example_main() {
gst::init().unwrap();
// This creates a pipeline with appsrc and appsink.
let pipeline = gst::Pipeline::default();
let appsrc = gst_app::AppSrc::builder().build();
let appsink = gst_app::AppSink::builder().build();
let pipeline = gst::Pipeline::new(None);
let appsrc = gst::ElementFactory::make("appsrc", None)
.unwrap()
.downcast::<gst_app::AppSrc>()
.unwrap();
let appsink = gst::ElementFactory::make("appsink", None)
.unwrap()
.downcast::<gst_app::AppSink>()
.unwrap();
pipeline.add(&appsrc).unwrap();
pipeline.add(&appsink).unwrap();
@ -202,13 +207,13 @@ fn example_main() {
return;
}
println!("Producing buffer {i}");
println!("Producing buffer {}", i);
// Add a custom meta with a label to this buffer.
let mut buffer = gst::Buffer::new();
{
let buffer = buffer.get_mut().unwrap();
custom_meta::CustomMeta::add(buffer, format!("This is buffer {i}"));
custom_meta::CustomMeta::add(buffer, format!("This is buffer {}", i));
}
i += 1;
@ -227,8 +232,8 @@ fn example_main() {
.new_sample(|appsink| {
// Pull the sample in question out of the appsink's buffer.
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or_else(|| {
element_error!(
let buffer = sample.get_buffer().ok_or_else(|| {
gst_element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to get buffer from appsink")
@ -239,9 +244,9 @@ fn example_main() {
// Retrieve the custom meta from the buffer and print it.
let meta = buffer
.meta::<custom_meta::CustomMeta>()
.get_meta::<custom_meta::CustomMeta>()
.expect("No custom meta found");
println!("Got buffer with label: {}", meta.label());
println!("Got buffer with label: {}", meta.get_label());
Ok(gst::FlowSuccess::Ok)
})
@ -255,11 +260,11 @@ fn example_main() {
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
// And run until EOS or an error happened.
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -267,9 +272,9 @@ fn example_main() {
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
@ -284,7 +289,7 @@ fn example_main() {
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically).
examples_common::run(example_main);
}

View file

@ -1,364 +0,0 @@
// This example demonstrates the use of the d3d11videosink's "present"
// signal and the use of Direct2D/DirectWrite APIs in Rust.
//
// Application can perform various hardware-accelerated 2D graphics operation
// (e.g., like cairo can support) and text rendering via the Windows APIs.
// In this example, 2D graphics operation and text rendering will happen
// directly to the on the DXGI swapchain's backbuffer via Windows API in
// strictly zero-copy manner
use std::{
collections::VecDeque,
sync::{Arc, Mutex},
time::SystemTime,
};
use gst::{glib, prelude::*};
use windows::{
core::*,
Win32::Graphics::{
Direct2D::{Common::*, *},
Direct3D11::*,
DirectWrite::*,
Dxgi::{Common::*, *},
},
};
struct OverlayContext {
d2d_factory: ID2D1Factory,
dwrite_factory: IDWriteFactory,
text_format: IDWriteTextFormat,
texture_desc: D3D11_TEXTURE2D_DESC,
text_layout: Option<IDWriteTextLayout>,
timestamp_queue: VecDeque<SystemTime>,
avg_fps: f32,
display_fps: f32,
font_size: f32,
}
fn create_overlay_context() -> Arc<Mutex<OverlayContext>> {
// Lots of DirectX APIs are marked as unsafe but the below operations
// are not expected to be failed unless GPU hang or device remove condition
// happens
let d2d_factory = unsafe {
D2D1CreateFactory::<ID2D1Factory>(D2D1_FACTORY_TYPE_MULTI_THREADED, None).unwrap()
};
let dwrite_factory =
unsafe { DWriteCreateFactory::<IDWriteFactory>(DWRITE_FACTORY_TYPE_SHARED).unwrap() };
// Font size can be updated later
let text_format = unsafe {
dwrite_factory
.CreateTextFormat(
w!("Consolas"),
None,
DWRITE_FONT_WEIGHT_REGULAR,
DWRITE_FONT_STYLE_NORMAL,
DWRITE_FONT_STRETCH_NORMAL,
12f32,
w!("en-us"),
)
.unwrap()
};
Arc::new(Mutex::new(OverlayContext {
d2d_factory,
dwrite_factory,
text_format,
texture_desc: D3D11_TEXTURE2D_DESC::default(),
text_layout: None,
timestamp_queue: VecDeque::with_capacity(10),
avg_fps: 0f32,
display_fps: 0f32,
font_size: 12f32,
}))
}
fn main() -> Result<()> {
gst::init().unwrap();
let args: Vec<String> = std::env::args().collect();
if args.len() != 2 {
println!("URI must be specified");
return Ok(());
}
let main_loop = glib::MainLoop::new(None, false);
let overlay_context = create_overlay_context();
let overlay_context_weak = Arc::downgrade(&overlay_context);
// Needs BGRA or RGBA swapchain for D2D interop,
// and "present" signal must be explicitly enabled
let videosink = gst::ElementFactory::make("d3d11videosink")
.property("emit-present", true)
.property_from_str("display-format", "DXGI_FORMAT_B8G8R8A8_UNORM")
.build()
.unwrap();
// Listen "present" signal and draw overlay from the callback
// Required operations here:
// 1) Gets IDXGISurface and ID3D11Texture2D interface from
// given ID3D11RenderTargetView COM object
// - ID3D11Texture2D: To get texture resolution
// - IDXGISurface: To create Direct2D render target
// 2) Creates or reuses IDWriteTextLayout interface
// - This object represents text layout we want to draw on render target
// 3) Draw rectangle (overlay background) and text on render target
//
// NOTE: ID2D1Factory, IDWriteFactory, IDWriteTextFormat, and
// IDWriteTextLayout objects are device-independent. Which can be created
// earlier instead of creating them in the callback.
// But ID2D1RenderTarget is a device-dependent resource.
// The client should not hold the d2d render target object outside of
// this callback scope because the resource must be cleared before
// releasing/resizing DXGI swapchain.
videosink.connect_closure(
"present",
false,
glib::closure!(move |_sink: &gst::Element,
_device: &gst::Object,
rtv_raw: glib::Pointer| {
let overlay_context = overlay_context_weak.upgrade().unwrap();
let mut context = overlay_context.lock().unwrap();
let dwrite_factory = context.dwrite_factory.clone();
let d2d_factory = context.d2d_factory.clone();
// SAFETY: transmute() below is clearly unsafe operation here.
// Regarding the other part of the below block, all DirectX
// APIs are marked as unsafe, except for cast.
//
// In theory, all the Direct3D/Direct2D APIs could fail for
// some reasons (it's hardware!), but in practice, it's very unexpected
// situation and any of failure below would mean we are doing
// something in wrong way or driver bug or so.
unsafe {
let rtv = ID3D11RenderTargetView::from_raw_borrowed(&rtv_raw).unwrap();
let resource = rtv.GetResource().unwrap();
let texture = resource.cast::<ID3D11Texture2D>().unwrap();
let desc = {
let mut desc = D3D11_TEXTURE2D_DESC::default();
texture.GetDesc(&mut desc);
desc
};
// Window size was updated, creates new text layout
let calculate_font_size = if desc != context.texture_desc {
context.texture_desc = desc;
context.text_layout = None;
true
} else {
false
};
// New fps, creates new layout
if context.avg_fps != context.display_fps {
context.display_fps = context.avg_fps;
context.text_layout = None;
}
if context.text_layout.is_none() {
let overlay_string = format!("TextOverlay, Fps {:.1}", context.display_fps);
let overlay_wstring = overlay_string.encode_utf16().collect::<Vec<_>>();
let layout = dwrite_factory
.CreateTextLayout(
&overlay_wstring,
&context.text_format,
desc.Width as f32,
desc.Height as f32 / 5f32,
)
.unwrap();
// Adjust alignment
layout
.SetTextAlignment(DWRITE_TEXT_ALIGNMENT_CENTER)
.unwrap();
layout
.SetParagraphAlignment(DWRITE_PARAGRAPH_ALIGNMENT_CENTER)
.unwrap();
// XXX: This is not an efficient approach.
// The font size can be pre-calculated for a pre-defined
// window size and string length
let mut range = DWRITE_TEXT_RANGE {
startPosition: 0u32,
length: overlay_wstring.len() as u32,
};
if calculate_font_size {
let mut font_size = 12f32;
let mut was_decreased = false;
loop {
let mut metrics = DWRITE_TEXT_METRICS::default();
layout.GetMetrics(&mut metrics).unwrap();
layout
.GetFontSize(0, &mut font_size, Some(&mut range))
.unwrap();
if metrics.widthIncludingTrailingWhitespace >= desc.Width as f32 {
if font_size > 1f32 {
font_size -= 0.5f32;
was_decreased = true;
layout.SetFontSize(font_size, range).unwrap();
continue;
}
break;
}
if was_decreased {
break;
}
if metrics.widthIncludingTrailingWhitespace < desc.Width as f32 {
if metrics.widthIncludingTrailingWhitespace
>= desc.Width as f32 * 0.7f32
{
break;
}
font_size += 0.5f32;
layout.SetFontSize(font_size, range).unwrap();
}
}
context.font_size = font_size;
} else {
layout.SetFontSize(context.font_size, range).unwrap();
}
context.text_layout = Some(layout);
};
let dxgi_surf = resource.cast::<IDXGISurface>().unwrap();
let render_target = d2d_factory
.CreateDxgiSurfaceRenderTarget(
&dxgi_surf,
&D2D1_RENDER_TARGET_PROPERTIES {
r#type: D2D1_RENDER_TARGET_TYPE_DEFAULT,
pixelFormat: D2D1_PIXEL_FORMAT {
format: DXGI_FORMAT_B8G8R8A8_UNORM,
alphaMode: D2D1_ALPHA_MODE_PREMULTIPLIED,
},
// zero means default DPI
dpiX: 0f32,
dpiY: 0f32,
usage: D2D1_RENDER_TARGET_USAGE_NONE,
minLevel: D2D1_FEATURE_LEVEL_DEFAULT,
},
)
.unwrap();
let text_brush = render_target
.CreateSolidColorBrush(
&D2D1_COLOR_F {
r: 0f32,
g: 0f32,
b: 0f32,
a: 1f32,
},
None,
)
.unwrap();
let overlay_brush = render_target
.CreateSolidColorBrush(
&D2D1_COLOR_F {
r: 0f32,
g: 0.5f32,
b: 0.5f32,
a: 0.3f32,
},
None,
)
.unwrap();
render_target.BeginDraw();
// Draws overlay background. It will blend overlay's background
// color with already rendred video frame
render_target.FillRectangle(
&D2D_RECT_F {
left: 0f32,
top: 0f32,
right: desc.Width as f32,
bottom: desc.Height as f32 / 5f32,
},
&overlay_brush,
);
// Then, renders text
render_target.DrawTextLayout(
D2D_POINT_2F { x: 0f32, y: 0f32 },
context.text_layout.as_ref(),
&text_brush,
D2D1_DRAW_TEXT_OPTIONS_NONE,
);
// EndDraw may not be successful for some reasons.
// Ignores any error in this example
let _ = render_target.EndDraw(None, None);
}
}),
);
// Add pad probe to calculate framerate
let sinkpad = videosink.static_pad("sink").unwrap();
let overlay_context_weak = Arc::downgrade(&overlay_context);
sinkpad.add_probe(gst::PadProbeType::BUFFER, move |_pad, _probe_info| {
let overlay_context = overlay_context_weak.upgrade().unwrap();
let mut context = overlay_context.lock().unwrap();
context.timestamp_queue.push_back(SystemTime::now());
// Updates framerate per 10 frames
if context.timestamp_queue.len() >= 10 {
let now = context.timestamp_queue.back().unwrap();
let front = context.timestamp_queue.front().unwrap();
let duration = now.duration_since(*front).unwrap().as_millis() as f32;
context.avg_fps = 1000f32 * (context.timestamp_queue.len() - 1) as f32 / duration;
context.timestamp_queue.clear();
}
gst::PadProbeReturn::Ok
});
let playbin = gst::ElementFactory::make("playbin")
.property("uri", &args[1])
.property("video-sink", &videosink)
.build()
.unwrap();
let main_loop_clone = main_loop.clone();
let bus = playbin.bus().unwrap();
let _bus_watch = bus
.add_watch(move |_, msg| {
use gst::MessageView;
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => {
println!("received eos");
main_loop.quit()
}
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
main_loop.quit();
}
_ => (),
};
glib::ControlFlow::Continue
})
.unwrap();
playbin.set_state(gst::State::Playing).unwrap();
main_loop.run();
playbin.set_state(gst::State::Null).unwrap();
Ok(())
}

View file

@ -1,85 +0,0 @@
// This example shows how to use the debug ringbuffer.
//
// It runs a simple GStreamer pipeline for a short time,
// and on EOS it dumps the last few KB of debug logs.
//
// It's possible to dump the logs at any time in an application,
// not just on exit like is done here.
use std::process;
use gst::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
fn example_main() {
let pipeline_str = "videotestsrc num-buffers=100 ! autovideosink";
gst::init().unwrap();
/* Disable stdout debug, then configure the debug ringbuffer and enable
* all debug */
gst::log::remove_default_log_function();
/* Keep 1KB of logs per thread, removing old threads after 10 seconds */
gst::log::add_ring_buffer_logger(1024, 10);
/* Enable all debug categories */
gst::log::set_default_threshold(gst::DebugLevel::Log);
let mut context = gst::ParseContext::new();
let pipeline =
match gst::parse::launch_full(pipeline_str, Some(&mut context), gst::ParseFlags::empty()) {
Ok(pipeline) => pipeline,
Err(err) => {
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
println!("Missing element(s): {:?}", context.missing_elements());
} else {
println!("Failed to parse pipeline: {err}");
}
process::exit(-1)
}
};
let bus = pipeline.bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => {
break;
}
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
}
}
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
/* Insert a message into the debug log */
gst::error!(gst::CAT_DEFAULT, "Hi from the debug log ringbuffer example");
println!("Dumping debug logs\n");
for s in gst::log::ring_buffer_logger_get_logs().iter() {
println!("{s}\n------------------");
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
examples_common::run(example_main);
}

View file

@ -29,28 +29,42 @@
// Especially Windows APIs tend to be quite picky about samplerate and sample-format.
// The same applies to videostreams.
use std::{
env,
sync::{Arc, Mutex},
};
extern crate gstreamer as gst;
use gst::gst_element_error;
use gst::gst_element_warning;
use gst::prelude::*;
#[cfg(feature = "v1_10")]
use glib::subclass::prelude::*;
#[cfg(feature = "v1_10")]
use glib::GBoxed;
use std::env;
#[cfg(feature = "v1_10")]
use std::sync::{Arc, Mutex};
use anyhow::Error;
use derive_more::{Display, Error};
use gst::{element_error, element_warning, prelude::*};
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[display(fmt = "Missing element {}", _0)]
struct MissingElement(#[error(not(source))] &'static str);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
source: glib::Error,
}
#[derive(Clone, Debug, glib::Boxed)]
#[boxed_type(name = "ErrorValue")]
#[cfg(feature = "v1_10")]
#[derive(Clone, Debug, GBoxed)]
#[gboxed(type_name = "ErrorValue")]
struct ErrorValue(Arc<Mutex<Option<Error>>>);
fn example_main() -> Result<(), Error> {
@ -64,14 +78,16 @@ fn example_main() -> Result<(), Error> {
std::process::exit(-1)
};
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("filesrc")
.property("location", uri)
.build()?;
let decodebin = gst::ElementFactory::make("decodebin").build()?;
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("filesrc", None).map_err(|_| MissingElement("filesrc"))?;
let decodebin =
gst::ElementFactory::make("decodebin", None).map_err(|_| MissingElement("decodebin"))?;
pipeline.add_many([&src, &decodebin])?;
gst::Element::link_many([&src, &decodebin])?;
// Tell the filesrc what file to load
src.set_property("location", &uri)?;
pipeline.add_many(&[&src, &decodebin])?;
gst::Element::link_many(&[&src, &decodebin])?;
// Need to move a new reference into the closure.
// !!ATTENTION!!:
@ -90,26 +106,27 @@ fn example_main() -> Result<(), Error> {
decodebin.connect_pad_added(move |dbin, src_pad| {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let Some(pipeline) = pipeline_weak.upgrade() else {
return;
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return,
};
// Try to detect whether the raw stream decodebin provided us with
// just now is either audio or video (or none of both, e.g. subtitles).
let (is_audio, is_video) = {
let media_type = src_pad.current_caps().and_then(|caps| {
caps.structure(0).map(|s| {
let name = s.name();
let media_type = src_pad.get_current_caps().and_then(|caps| {
caps.get_structure(0).map(|s| {
let name = s.get_name();
(name.starts_with("audio/"), name.starts_with("video/"))
})
});
match media_type {
None => {
element_warning!(
gst_element_warning!(
dbin,
gst::CoreError::Negotiation,
("Failed to get media type from pad {}", src_pad.name())
("Failed to get media type from pad {}", src_pad.get_name())
);
return;
@ -126,10 +143,14 @@ fn example_main() -> Result<(), Error> {
if is_audio {
// decodebin found a raw audiostream, so we build the follow-up pipeline to
// play it on the default audio playback device (using autoaudiosink).
let queue = gst::ElementFactory::make("queue").build()?;
let convert = gst::ElementFactory::make("audioconvert").build()?;
let resample = gst::ElementFactory::make("audioresample").build()?;
let sink = gst::ElementFactory::make("autoaudiosink").build()?;
let queue = gst::ElementFactory::make("queue", None)
.map_err(|_| MissingElement("queue"))?;
let convert = gst::ElementFactory::make("audioconvert", None)
.map_err(|_| MissingElement("audioconvert"))?;
let resample = gst::ElementFactory::make("audioresample", None)
.map_err(|_| MissingElement("audioresample"))?;
let sink = gst::ElementFactory::make("autoaudiosink", None)
.map_err(|_| MissingElement("autoaudiosink"))?;
let elements = &[&queue, &convert, &resample, &sink];
pipeline.add_many(elements)?;
@ -145,15 +166,19 @@ fn example_main() -> Result<(), Error> {
// Get the queue element's sink pad and link the decodebin's newly created
// src pad for the audio stream to it.
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
src_pad.link(&sink_pad)?;
} else if is_video {
// decodebin found a raw videostream, so we build the follow-up pipeline to
// display it using the autovideosink.
let queue = gst::ElementFactory::make("queue").build()?;
let convert = gst::ElementFactory::make("videoconvert").build()?;
let scale = gst::ElementFactory::make("videoscale").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
let queue = gst::ElementFactory::make("queue", None)
.map_err(|_| MissingElement("queue"))?;
let convert = gst::ElementFactory::make("videoconvert", None)
.map_err(|_| MissingElement("videoconvert"))?;
let scale = gst::ElementFactory::make("videoscale", None)
.map_err(|_| MissingElement("videoscale"))?;
let sink = gst::ElementFactory::make("autovideosink", None)
.map_err(|_| MissingElement("autovideosink"))?;
let elements = &[&queue, &convert, &scale, &sink];
pipeline.add_many(elements)?;
@ -165,7 +190,7 @@ fn example_main() -> Result<(), Error> {
// Get the queue element's sink pad and link the decodebin's newly created
// src pad for the video stream to it.
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
src_pad.link(&sink_pad)?;
}
@ -183,29 +208,38 @@ fn example_main() -> Result<(), Error> {
if let Err(err) = insert_sink(is_audio, is_video) {
// The following sends a message of type Error on the bus, containing our detailed
// error information.
element_error!(
#[cfg(feature = "v1_10")]
gst_element_error!(
dbin,
gst::LibraryError::Failed,
("Failed to insert sink"),
details: gst::Structure::builder("error-details")
.field("error",
ErrorValue(Arc::new(Mutex::new(Some(err)))))
&ErrorValue(Arc::new(Mutex::new(Some(err)))))
.build()
);
#[cfg(not(feature = "v1_10"))]
gst_element_error!(
dbin,
gst::LibraryError::Failed,
("Failed to insert sink"),
["{}", err]
);
}
});
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
// This code iterates over all messages that are sent across our pipeline's bus.
// In the callback ("pad-added" on the decodebin), we sent better error information
// using a bus message. This is the position where we get those messages and log
// the contained information.
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -213,40 +247,53 @@ fn example_main() -> Result<(), Error> {
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
match err.details() {
// This bus-message of type error contained our custom error-details struct
// that we sent in the pad-added callback above. So we unpack it and log
// the detailed error information here. details contains a glib::SendValue.
// The unpacked error is the converted to a Result::Err, stopping the
// application's execution.
Some(details) if details.name() == "error-details" => details
.get::<&ErrorValue>("error")
.unwrap()
.clone()
.0
.lock()
.unwrap()
.take()
.map(Result::Err)
.expect("error-details message without actual error"),
_ => Err(ErrorMessage {
#[cfg(feature = "v1_10")]
{
match err.get_details() {
// This bus-message of type error contained our custom error-details struct
// that we sent in the pad-added callback above. So we unpack it and log
// the detailed error information here. details contains a glib::SendValue.
// The unpacked error is the converted to a Result::Err, stopping the
// application's execution.
Some(details) if details.get_name() == "error-details" => details
.get::<&ErrorValue>("error")
.unwrap()
.and_then(|v| v.0.lock().unwrap().take())
.map(Result::Err)
.expect("error-details message without actual error"),
_ => Err(ErrorMessage {
src: msg
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into()),
}?;
}
#[cfg(not(feature = "v1_10"))]
{
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into()),
}?;
.into());
}
}
MessageView::StateChanged(s) => {
println!(
"State changed from {:?}: {:?} -> {:?} ({:?})",
s.src().map(|s| s.path_string()),
s.old(),
s.current(),
s.pending()
s.get_src().map(|s| s.get_path_string()),
s.get_old(),
s.get_current(),
s.get_pending()
);
}
_ => (),
@ -259,10 +306,10 @@ fn example_main() -> Result<(), Error> {
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -8,26 +8,33 @@
// Discovered information could for example contain the stream's duration or whether it is
// seekable (filesystem) or not (some http servers).
use std::env;
extern crate gstreamer as gst;
extern crate gstreamer_pbutils as pbutils;
use crate::pbutils::prelude::*;
use crate::pbutils::DiscovererInfo;
use crate::pbutils::DiscovererStreamInfo;
use anyhow::Error;
use derive_more::{Display, Error};
use gst_pbutils::{prelude::*, DiscovererInfo, DiscovererStreamInfo};
use std::env;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Discoverer error {_0}")]
#[display(fmt = "Discoverer error {}", _0)]
struct DiscovererError(#[error(not(source))] &'static str);
fn print_tags(info: &DiscovererInfo) {
println!("Tags:");
let tags = info.tags();
let tags = info.get_tags();
match tags {
Some(taglist) => {
println!(" {taglist}"); // FIXME use an iterator
println!(" {}", taglist.to_string()); // FIXME use an iterator
}
None => {
println!(" no tags");
@ -37,29 +44,30 @@ fn print_tags(info: &DiscovererInfo) {
fn print_stream_info(stream: &DiscovererStreamInfo) {
println!("Stream: ");
if let Some(stream_id) = stream.stream_id() {
println!(" Stream id: {}", stream_id);
if let Some(id) = stream.get_stream_id() {
println!(" Stream id: {}", id);
}
let caps_str = match stream.caps() {
let caps_str = match stream.get_caps() {
Some(caps) => caps.to_string(),
None => String::from("--"),
};
println!(" Format: {caps_str}");
println!(" Format: {}", caps_str);
}
fn print_discoverer_info(info: &DiscovererInfo) -> Result<(), Error> {
println!("URI: {}", info.uri());
println!("Duration: {}", info.duration().display());
let uri = info
.get_uri()
.ok_or(DiscovererError("URI should not be null"))?;
println!("URI: {}", uri);
println!("Duration: {}", info.get_duration());
print_tags(info);
print_stream_info(
&info
.stream_info()
.get_stream_info()
.ok_or(DiscovererError("Error while obtaining stream info"))?,
);
let children = info.stream_list();
let children = info.get_stream_list();
println!("Children streams:");
for child in children {
print_stream_info(&child);
@ -80,7 +88,7 @@ fn run_discoverer() -> Result<(), Error> {
};
let timeout: gst::ClockTime = gst::ClockTime::from_seconds(15);
let discoverer = gst_pbutils::Discoverer::new(timeout)?;
let discoverer = pbutils::Discoverer::new(timeout)?;
let info = discoverer.discover_uri(uri)?;
print_discoverer_info(&info)?;
Ok(())
@ -89,12 +97,12 @@ fn run_discoverer() -> Result<(), Error> {
fn example_main() {
match run_discoverer() {
Ok(_) => (),
Err(e) => eprintln!("Error: {e}"),
Err(e) => eprintln!("Error: {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -12,60 +12,79 @@
// {uridecodebin} -| {encodebin}-{filesink}
// \-{queue}-{videoconvert}-{videoscale}----/
use std::{
env,
sync::{Arc, Mutex},
};
extern crate gstreamer as gst;
use gst::gst_element_error;
use gst::gst_element_warning;
use gst::prelude::*;
extern crate gstreamer_pbutils as gst_pbutils;
use gst_pbutils::prelude::*;
#[cfg(feature = "v1_10")]
use glib::subclass::prelude::*;
#[cfg(feature = "v1_10")]
use glib::GBoxed;
use std::env;
#[cfg(feature = "v1_10")]
use std::sync::{Arc, Mutex};
use anyhow::Error;
use derive_more::{Display, Error};
use gst::{element_error, element_warning};
use gst_pbutils::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[display(fmt = "Missing element {}", _0)]
struct MissingElement(#[error(not(source))] &'static str);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
source: glib::Error,
}
#[derive(Clone, Debug, glib::Boxed)]
#[boxed_type(name = "ErrorValue")]
#[cfg(feature = "v1_10")]
#[derive(Clone, Debug, GBoxed)]
#[gboxed(type_name = "ErrorValue")]
struct ErrorValue(Arc<Mutex<Option<Error>>>);
fn configure_encodebin(encodebin: &gst::Element) {
fn configure_encodebin(encodebin: &gst::Element) -> Result<(), Error> {
// To tell the encodebin what we want it to produce, we create an EncodingProfile
// https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/GstEncodingProfile.html
// This profile consists of information about the contained audio and video formats
// as well as the container format we want everything to be combined into.
// Every audiostream piped into the encodebin should be encoded using vorbis.
let audio_profile =
gst_pbutils::EncodingAudioProfile::builder(&gst::Caps::builder("audio/x-vorbis").build())
.presence(0)
.build();
let audio_profile = gst_pbutils::EncodingAudioProfileBuilder::new()
.format(&gst::Caps::new_simple("audio/x-vorbis", &[]))
.presence(0)
.build()?;
// Every videostream piped into the encodebin should be encoded using theora.
let video_profile =
gst_pbutils::EncodingVideoProfile::builder(&gst::Caps::builder("video/x-theora").build())
.presence(0)
.build();
let video_profile = gst_pbutils::EncodingVideoProfileBuilder::new()
.format(&gst::Caps::new_simple("video/x-theora", &[]))
.presence(0)
.build()?;
// All streams are then finally combined into a matroska container.
let container_profile = gst_pbutils::EncodingContainerProfile::builder(
&gst::Caps::builder("video/x-matroska").build(),
)
.name("container")
.add_profile(video_profile)
.add_profile(audio_profile)
.build();
let container_profile = gst_pbutils::EncodingContainerProfileBuilder::new()
.name("container")
.format(&gst::Caps::new_simple("video/x-matroska", &[]))
.add_profile(&(video_profile))
.add_profile(&(audio_profile))
.build()?;
// Finally, apply the EncodingProfile onto our encodebin element.
encodebin.set_property("profile", &container_profile);
encodebin
.set_property("profile", &container_profile)
.expect("set profile property failed");
Ok(())
}
fn example_main() -> Result<(), Error> {
@ -83,27 +102,31 @@ fn example_main() -> Result<(), Error> {
std::process::exit(-1)
};
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("uridecodebin")
.property("uri", uri)
.build()?;
let encodebin = gst::ElementFactory::make("encodebin").build()?;
let sink = gst::ElementFactory::make("filesink")
.property("location", output_file)
.build()?;
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("uridecodebin", None)
.map_err(|_| MissingElement("uridecodebin"))?;
let encodebin =
gst::ElementFactory::make("encodebin", None).map_err(|_| MissingElement("encodebin"))?;
let sink =
gst::ElementFactory::make("filesink", None).map_err(|_| MissingElement("filesink"))?;
src.set_property("uri", &uri)
.expect("setting URI Property failed");
sink.set_property("location", &output_file)
.expect("setting location property failed");
// Configure the encodebin.
// Here we tell the bin what format we expect it to create at its output.
configure_encodebin(&encodebin);
configure_encodebin(&encodebin)?;
pipeline
.add_many([&src, &encodebin, &sink])
.add_many(&[&src, &encodebin, &sink])
.expect("failed to add elements to pipeline");
// It is clear from the start, that encodebin has only one src pad, so we can
// directly link it to our filesink without problems.
// The caps of encodebin's src-pad are set after we configured the encoding-profile.
// (But filesink doesn't really care about the caps at its input anyway)
gst::Element::link_many([&encodebin, &sink])?;
gst::Element::link_many(&[&encodebin, &sink])?;
// Need to move a new reference into the closure.
// !!ATTENTION!!:
@ -120,24 +143,28 @@ fn example_main() -> Result<(), Error> {
src.connect_pad_added(move |dbin, dbin_src_pad| {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let Some(pipeline) = pipeline_weak.upgrade() else {
return;
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return,
};
let (is_audio, is_video) = {
let media_type = dbin_src_pad.current_caps().and_then(|caps| {
caps.structure(0).map(|s| {
let name = s.name();
let media_type = dbin_src_pad.get_current_caps().and_then(|caps| {
caps.get_structure(0).map(|s| {
let name = s.get_name();
(name.starts_with("audio/"), name.starts_with("video/"))
})
});
match media_type {
None => {
element_warning!(
gst_element_warning!(
dbin,
gst::CoreError::Negotiation,
("Failed to get media type from pad {}", dbin_src_pad.name())
(
"Failed to get media type from pad {}",
dbin_src_pad.get_name()
)
);
return;
@ -148,9 +175,12 @@ fn example_main() -> Result<(), Error> {
let link_to_encodebin = |is_audio, is_video| -> Result<(), Error> {
if is_audio {
let queue = gst::ElementFactory::make("queue").build()?;
let convert = gst::ElementFactory::make("audioconvert").build()?;
let resample = gst::ElementFactory::make("audioresample").build()?;
let queue = gst::ElementFactory::make("queue", None)
.map_err(|_| MissingElement("queue"))?;
let convert = gst::ElementFactory::make("audioconvert", None)
.map_err(|_| MissingElement("audioconvert"))?;
let resample = gst::ElementFactory::make("audioresample", None)
.map_err(|_| MissingElement("audioresample"))?;
let elements = &[&queue, &convert, &resample];
pipeline
@ -162,9 +192,11 @@ fn example_main() -> Result<(), Error> {
// The encodebin will then automatically create an internal pipeline, that encodes
// the audio stream in the format we specified in the EncodingProfile.
let enc_sink_pad = encodebin
.request_pad_simple("audio_%u")
.get_request_pad("audio_%u")
.expect("Could not get audio pad from encodebin");
let src_pad = resample.static_pad("src").expect("resample has no srcpad");
let src_pad = resample
.get_static_pad("src")
.expect("resample has no srcpad");
src_pad.link(&enc_sink_pad)?;
for e in elements {
@ -173,12 +205,15 @@ fn example_main() -> Result<(), Error> {
// Get the queue element's sink pad and link the decodebin's newly created
// src pad for the audio stream to it.
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
dbin_src_pad.link(&sink_pad)?;
} else if is_video {
let queue = gst::ElementFactory::make("queue").build()?;
let convert = gst::ElementFactory::make("videoconvert").build()?;
let scale = gst::ElementFactory::make("videoscale").build()?;
let queue = gst::ElementFactory::make("queue", None)
.map_err(|_| MissingElement("queue"))?;
let convert = gst::ElementFactory::make("videoconvert", None)
.map_err(|_| MissingElement("videoconvert"))?;
let scale = gst::ElementFactory::make("videoscale", None)
.map_err(|_| MissingElement("videoscale"))?;
let elements = &[&queue, &convert, &scale];
pipeline
@ -188,11 +223,13 @@ fn example_main() -> Result<(), Error> {
// Request a sink pad from our encodebin, that can handle a raw videostream.
// The encodebin will then automatically create an internal pipeline, that encodes
// the video stream in the format we specified in the EncodingProfile.
// the audio stream in the format we specified in the EncodingProfile.
let enc_sink_pad = encodebin
.request_pad_simple("video_%u")
.get_request_pad("video_%u")
.expect("Could not get video pad from encodebin");
let src_pad = scale.static_pad("src").expect("videoscale has no srcpad");
let src_pad = scale
.get_static_pad("src")
.expect("videoscale has no srcpad");
src_pad.link(&enc_sink_pad)?;
for e in elements {
@ -201,7 +238,7 @@ fn example_main() -> Result<(), Error> {
// Get the queue element's sink pad and link the decodebin's newly created
// src pad for the video stream to it.
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
dbin_src_pad.link(&sink_pad)?;
}
@ -209,25 +246,34 @@ fn example_main() -> Result<(), Error> {
};
if let Err(err) = link_to_encodebin(is_audio, is_video) {
element_error!(
#[cfg(feature = "v1_10")]
gst_element_error!(
dbin,
gst::LibraryError::Failed,
("Failed to insert sink"),
details: gst::Structure::builder("error-details")
.field("error",
ErrorValue(Arc::new(Mutex::new(Some(err)))))
&ErrorValue(Arc::new(Mutex::new(Some(err)))))
.build()
);
#[cfg(not(feature = "v1_10"))]
gst_element_error!(
dbin,
gst::LibraryError::Failed,
("Failed to insert sink"),
["{}", err]
);
}
});
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -235,35 +281,49 @@ fn example_main() -> Result<(), Error> {
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
match err.details() {
Some(details) if details.name() == "error-details" => details
.get::<&ErrorValue>("error")
.unwrap()
.clone()
.0
.lock()
.unwrap()
.take()
.map(Result::Err)
.expect("error-details message without actual error"),
_ => Err(ErrorMessage {
#[cfg(feature = "v1_10")]
{
match err.get_details() {
Some(details) if details.get_name() == "error-details" => details
.get::<&ErrorValue>("error")
.unwrap()
.cloned()
.and_then(|v| v.0.lock().unwrap().take())
.map(Result::Err)
.expect("error-details message without actual error"),
_ => Err(ErrorMessage {
src: msg
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into()),
}?;
}
#[cfg(not(feature = "v1_10"))]
{
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into()),
}?;
.into());
}
}
MessageView::StateChanged(s) => {
println!(
"State changed from {:?}: {:?} -> {:?} ({:?})",
s.src().map(|s| s.path_string()),
s.old(),
s.current(),
s.pending()
s.get_src().map(|s| s.get_path_string()),
s.get_old(),
s.get_current(),
s.get_pending()
);
}
_ => (),
@ -276,10 +336,10 @@ fn example_main() -> Result<(), Error> {
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -19,6 +19,7 @@
// This makes it possible, e.g., to schedule an arbitrary piece of code
// to run in the main loop thread - avoiding potential threading issues.
extern crate gstreamer as gst;
use gst::prelude::*;
#[path = "../examples-common.rs"]
@ -30,43 +31,8 @@ fn example_main() {
let main_loop = glib::MainLoop::new(None, false);
// This creates a pipeline by parsing the gst-launch pipeline syntax.
let pipeline = gst::parse::launch("audiotestsrc ! identity name=capsmut ! fakesink").unwrap();
let bus = pipeline.bus().unwrap();
// This is a contrived example to mutate events. This would normally be code inside an element,
// which might transform caps to reflect transformation in the data
let identity = pipeline
.downcast_ref::<gst::Bin>()
.unwrap()
.by_name("capsmut")
.unwrap();
let _ = identity.static_pad("sink").unwrap().add_probe(
gst::PadProbeType::EVENT_DOWNSTREAM,
move |_, probe_info| {
let Some(e) = probe_info.event() else {
return gst::PadProbeReturn::Ok;
};
if e.type_() != gst::EventType::Caps {
return gst::PadProbeReturn::Ok;
};
let mut ev = probe_info.take_event().unwrap();
let ev_ref = ev.make_mut();
let gst::EventViewMut::Caps(caps) = ev_ref.view_mut() else {
unreachable!()
};
caps.structure_mut().set("custom-field", true);
identity
.static_pad("src")
.unwrap()
.push_event(ev_ref.to_owned());
gst::PadProbeReturn::Drop
},
);
let pipeline = gst::parse_launch("audiotestsrc ! fakesink").unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
@ -85,13 +51,14 @@ fn example_main() {
// Add a timeout to the main loop. This closure will be executed
// in an interval of 5 seconds. The return value of the handler function
// determines whether the handler still wants to be called:
// - glib::ControlFlow::Break - stop calling this handler, remove timeout
// - glib::ControlFlow::Continue- continue calling this handler
// - glib::Continue(false) - stop calling this handler, remove timeout
// - glib::Continue(true) - continue calling this handler
glib::timeout_add_seconds(5, move || {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let Some(pipeline) = pipeline_weak.upgrade() else {
return glib::ControlFlow::Break;
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::Continue(false),
};
println!("sending eos");
@ -111,44 +78,43 @@ fn example_main() {
// Remove this handler, the pipeline will shutdown anyway, now that we
// sent the EOS event.
glib::ControlFlow::Break
glib::Continue(false)
});
//bus.add_signal_watch();
//bus.connect_message(None, move |_, msg| {
//bus.connect_message(move |_, msg| {
let main_loop_clone = main_loop.clone();
// This sets the bus's signal handler (don't be mislead by the "add", there can only be one).
// Every message from the bus is passed through this function. Its returnvalue determines
// whether the handler wants to be called again. If glib::ControlFlow::Break is returned, the
// whether the handler wants to be called again. If glib::Continue(false) is returned, the
// handler is removed and will never be called again. The mainloop still runs though.
let _bus_watch = bus
.add_watch(move |_, msg| {
use gst::MessageView;
bus.add_watch(move |_, msg| {
use gst::MessageView;
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => {
println!("received eos");
// An EndOfStream event was sent to the pipeline, so we tell our main loop
// to stop execution here.
main_loop.quit()
}
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
main_loop.quit();
}
_ => (),
};
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => {
println!("received eos");
// An EndOfStream event was sent to the pipeline, so we tell our main loop
// to stop execution here.
main_loop.quit()
}
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
main_loop.quit();
}
_ => (),
};
// Tell the mainloop to continue executing this callback.
glib::ControlFlow::Continue
})
.expect("Failed to add bus watch");
// Tell the mainloop to continue executing this callback.
glib::Continue(true)
})
.expect("Failed to add bus watch");
// Operate GStreamer's bus, facilliating GLib's mainloop here.
// This function call will block until you tell the mainloop to quit
@ -158,10 +124,15 @@ fn example_main() {
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
// Remove the watch function from the bus.
// Again: There can always only be one watch function.
// Thus we don't have to tell him which function to remove.
bus.remove_watch().unwrap();
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,481 +0,0 @@
// This example demonstrates the use of the FdMemory allocator.
// It operates the following two pipelines:
// sender: {videotestsrc} - {appsink}
// receiver: {appsrc} - {FdMemoryVideoFilter} - {videoconvert} - {queue} - {autovideosink}
// The sender creates shared memory files from the appsink which are sent
// to the receiver using a unix domain socket.
// The receiver creates buffers in the appsrc using the FdMemoryAllocator from
// the received file descriptors.
// Additional to demonstrating how the FdMemoryAllocator can be used to share
// file descriptors the example implements a custom VideoFilter demonstrating
// how the file descriptor of FdMemory can be accessed in a pipeline.
// Note that instead of manual mapping the file descriptor it is also possible
// to use map_writable, which will also map the file descriptor.
use std::{
os::unix::{net::UnixStream, prelude::AsRawFd},
sync::{Arc, Mutex},
};
use anyhow::Error;
use futures::StreamExt;
use gst::{element_error, prelude::*};
use memmap2::MmapMut;
use uds::UnixStreamExt;
#[path = "../examples-common.rs"]
mod examples_common;
fn create_receiver_pipeline(
video_info: &gst_video::VideoInfo,
receiver: UnixStream,
) -> Result<gst::Pipeline, Error> {
let caps = video_info.to_caps()?;
let pipeline = gst::Pipeline::default();
let src = gst_app::AppSrc::builder()
.caps(&caps)
.do_timestamp(true)
.is_live(true)
.build();
let filter = video_filter::FdMemoryFadeInVideoFilter::default().upcast::<gst::Element>();
let convert = gst::ElementFactory::make("videoconvert").build()?;
let queue = gst::ElementFactory::make("queue").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
pipeline.add_many([src.upcast_ref(), &filter, &convert, &queue, &sink])?;
gst::Element::link_many([src.upcast_ref(), &filter, &convert, &queue, &sink])?;
let fd_allocator = gst_allocators::FdAllocator::new();
let video_info = video_info.clone();
let mut fd_buf = [-1; 253];
src.set_callbacks(
gst_app::AppSrcCallbacks::builder()
.need_data(move |appsrc, _| {
// Read the next fds from the socket, if 0
// is returned the sender has closed the stream
// which is handled as EOS here.
let fds = match receiver.recv_fds(&mut [0u8; 1], &mut fd_buf) {
Ok((_, 0)) => {
let _ = appsrc.end_of_stream();
return;
}
Ok((_, fds)) => fds,
Err(err) => {
gst::error_msg!(
gst::StreamError::Failed,
("failed to receive fds: {}", err)
);
return;
}
};
for fd in &fd_buf[0..fds] {
// Allocate a new FdMemory for the received file descriptor.
// It is important that the size matches the size of the
// actual backing storage. In this example we just use the
// same video info in both sides, sending and receiving.
// Pass FdMemoryFlags::NONE to make the FdMemory take
// ownership of the passed file descriptor. The file descriptor
// will be closed when the memory is released.
let memory = unsafe {
fd_allocator
.alloc(*fd, video_info.size(), gst_allocators::FdMemoryFlags::NONE)
.unwrap()
};
let mut buffer = gst::Buffer::new();
let buffer_mut = buffer.make_mut();
buffer_mut.append_memory(memory);
let _ = appsrc.push_buffer(buffer);
}
})
.build(),
);
Ok(pipeline)
}
fn create_sender_pipeline(
video_info: &gst_video::VideoInfo,
sender: UnixStream,
) -> Result<gst::Pipeline, Error> {
let sender = Arc::new(Mutex::new(sender));
let caps = video_info.to_caps()?;
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("videotestsrc")
.property("num-buffers", 250i32)
.build()?;
let sink = gst::ElementFactory::make("appsink").build()?;
sink.downcast_ref::<gst_app::AppSink>()
.ok_or_else(|| anyhow::anyhow!("is not a appsink"))?
.set_caps(Some(&caps));
pipeline.add_many([&src, &sink])?;
gst::Element::link_many([&src, &sink])?;
let appsink = sink
.downcast::<gst_app::AppSink>()
.map_err(|_| anyhow::anyhow!("is not a appsink"))?;
appsink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
// Add a handler to the "eos" signal
.eos({
let sender = sender.clone();
move |_| {
// Close the sender part of the UnixSocket pair, this will automatically
// create a eos in the receiving part.
let _ = sender.lock().unwrap().shutdown(std::net::Shutdown::Write);
}
})
// Add a handler to the "new-sample" signal.
.new_sample(move |appsink| {
// Pull the sample in question out of the appsink's buffer.
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or_else(|| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to get buffer from appsink")
);
gst::FlowError::Error
})?;
if buffer.n_memory() != 1 {
element_error!(
appsink,
gst::ResourceError::Failed,
("Expected buffer with single memory")
);
return Err(gst::FlowError::Error);
}
let mem = buffer.peek_memory(0);
// We can use downcast_memory_ref to check if the provided
// memory is allocated by FdMemoryAllocator or a subtype of it.
// Note: This is not used in the example, we will always copy
// the memory to a new shared memory file.
if let Some(fd_memory) = mem.downcast_memory_ref::<gst_allocators::FdMemory>() {
// As we already got a fd we can just directly send it over the socket.
// NOTE: Synchronization is left out of this example, in a real world
// application access to the memory should be synchronized.
// For example wayland provides a release callback to signal that
// the memory is no longer in use.
sender
.lock()
.unwrap()
.send_fds(&[0u8; 1], &[fd_memory.fd()])
.map_err(|_| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to send fd over unix stream")
);
gst::FlowError::Error
})?;
} else {
// At this point, buffer is only a reference to an existing memory region somewhere.
// When we want to access its content, we have to map it while requesting the required
// mode of access (read, read/write).
// This type of abstraction is necessary, because the buffer in question might not be
// on the machine's main memory itself, but rather in the GPU's memory.
// So mapping the buffer makes the underlying memory region accessible to us.
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
let map = buffer.map_readable().map_err(|_| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to map buffer readable")
);
gst::FlowError::Error
})?;
// Note: To simplify this example we always create a new shared memory file instead
// of using a pool of buffers. When using a pool we need to make sure access to the
// file is synchronized.
let opts = memfd::MemfdOptions::default().allow_sealing(true);
let mfd = opts.create("gst-examples").map_err(|err| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to allocated fd: {}", err)
);
gst::FlowError::Error
})?;
mfd.as_file().set_len(map.size() as u64).map_err(|err| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to resize fd memory: {}", err)
);
gst::FlowError::Error
})?;
let mut seals = memfd::SealsHashSet::new();
seals.insert(memfd::FileSeal::SealShrink);
seals.insert(memfd::FileSeal::SealGrow);
mfd.add_seals(&seals).map_err(|err| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to add fd seals: {}", err)
);
gst::FlowError::Error
})?;
mfd.add_seal(memfd::FileSeal::SealSeal).map_err(|err| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to add fd seals: {}", err)
);
gst::FlowError::Error
})?;
unsafe {
let mut mmap = MmapMut::map_mut(mfd.as_file()).map_err(|_| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to mmap fd")
);
gst::FlowError::Error
})?;
mmap.copy_from_slice(map.as_slice());
};
sender
.lock()
.unwrap()
.send_fds(&[0u8; 1], &[mfd.as_raw_fd()])
.map_err(|_| {
element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to send fd over unix stream")
);
gst::FlowError::Error
})?;
};
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
Ok(pipeline)
}
async fn message_loop(bus: gst::Bus) {
let mut messages = bus.stream();
while let Some(msg) = messages.next().await {
use gst::MessageView;
// Determine whether we want to quit: on EOS or error message
// we quit, otherwise simply continue.
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
break;
}
_ => (),
};
}
}
fn example_main() -> Result<(), Error> {
gst::init()?;
let video_info = gst_video::VideoInfo::builder(gst_video::VideoFormat::Bgra, 1920, 1080)
.fps(gst::Fraction::new(30, 1))
.build()?;
let (sender, receiver) = std::os::unix::net::UnixStream::pair()?;
let sender_pipeline = create_sender_pipeline(&video_info, sender)?;
let receiver_pipeline = create_receiver_pipeline(&video_info, receiver)?;
let receiver_bus = receiver_pipeline.bus().expect("pipeline without bus");
receiver_pipeline.set_state(gst::State::Playing)?;
let sender_bus = sender_pipeline.bus().expect("pipeline without bus");
sender_pipeline.set_state(gst::State::Playing)?;
futures::executor::block_on(futures::future::join(
message_loop(sender_bus),
message_loop(receiver_bus),
));
sender_pipeline.set_state(gst::State::Null)?;
receiver_pipeline.set_state(gst::State::Null)?;
Ok(())
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
}
}
// The purpose of this custom video filter is to demonstrate how
// the file descriptor of a FdMemory can be accessed.
mod video_filter {
glib::wrapper! {
pub struct FdMemoryFadeInVideoFilter(ObjectSubclass<imp::FdMemoryFadeInVideoFilter>) @extends gst_video::VideoFilter, gst_base::BaseTransform, gst::Element, gst::Object;
}
impl Default for FdMemoryFadeInVideoFilter {
fn default() -> Self {
glib::Object::builder().build()
}
}
mod imp {
use std::{mem::ManuallyDrop, os::unix::prelude::FromRawFd};
use anyhow::Error;
use gst::{subclass::prelude::*, PadDirection, PadPresence, PadTemplate};
use gst_app::gst_base::subclass::BaseTransformMode;
use gst_video::{prelude::*, subclass::prelude::*, VideoFrameRef};
use memmap2::MmapMut;
use once_cell::sync::Lazy;
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"fdmemoryfilter",
gst::DebugColorFlags::empty(),
Some("Example FdMemory filter"),
)
});
#[derive(Debug, Default)]
pub struct FdMemoryFadeInVideoFilter;
impl FdMemoryFadeInVideoFilter {
fn transform_fd_mem_ip(
&self,
frame: &mut VideoFrameRef<&mut gst::BufferRef>,
) -> Result<(), Error> {
let buffer = frame.buffer();
if buffer.n_memory() != 1 {
return Err(anyhow::anyhow!(
"only buffers with single memory are supported"
));
}
let mem = buffer.peek_memory(0);
if !mem.is_memory_type::<gst_allocators::FdMemory>() {
return Err(anyhow::anyhow!("only fd memory is supported"));
}
let timestamp = buffer.pts().unwrap();
let factor = (timestamp.nseconds() as f64
/ (5 * gst::ClockTime::SECOND).nseconds() as f64)
.min(1.0f64);
// If the fade-in has finished return early
if factor >= 1.0f64 {
return Ok(());
}
let fd = mem
.downcast_memory_ref::<gst_allocators::FdMemory>()
.unwrap()
.fd();
unsafe {
// We wrap the Memmfd in ManuallyDrop here because from_raw_fd takes ownership of
// the file descriptor which would close it on drop
//
// see: https://github.com/lucab/memfd-rs/issues/29
let mfd = ManuallyDrop::new(memfd::Memfd::from_raw_fd(fd));
let mut mmap = MmapMut::map_mut(mfd.as_file())?;
for pixel in mmap.chunks_exact_mut(4) {
pixel[0] = (pixel[0] as f64 * factor).clamp(0.0, 255.0) as u8;
pixel[1] = (pixel[1] as f64 * factor).clamp(0.0, 255.0) as u8;
pixel[2] = (pixel[2] as f64 * factor).clamp(0.0, 255.0) as u8;
}
}
Ok(())
}
}
impl ElementImpl for FdMemoryFadeInVideoFilter {
fn pad_templates() -> &'static [PadTemplate] {
static PAD_TEMPLATES: std::sync::OnceLock<Vec<PadTemplate>> =
std::sync::OnceLock::new();
PAD_TEMPLATES.get_or_init(|| {
let caps = gst_video::VideoCapsBuilder::new()
.format(gst_video::VideoFormat::Bgra)
.build();
vec![
PadTemplate::new("sink", PadDirection::Sink, PadPresence::Always, &caps)
.unwrap(),
PadTemplate::new("src", PadDirection::Src, PadPresence::Always, &caps)
.unwrap(),
]
})
}
}
impl BaseTransformImpl for FdMemoryFadeInVideoFilter {
const MODE: BaseTransformMode = BaseTransformMode::AlwaysInPlace;
const PASSTHROUGH_ON_SAME_CAPS: bool = false;
const TRANSFORM_IP_ON_PASSTHROUGH: bool = true;
}
impl VideoFilterImpl for FdMemoryFadeInVideoFilter {
fn transform_frame_ip(
&self,
frame: &mut VideoFrameRef<&mut gst::BufferRef>,
) -> Result<gst::FlowSuccess, gst::FlowError> {
self.transform_fd_mem_ip(frame).map_err(|err| {
gst::error!(CAT, imp = self, "Failed to transform frame`: {}", err);
gst::FlowError::Error
})?;
Ok(gst::FlowSuccess::Ok)
}
}
impl ObjectImpl for FdMemoryFadeInVideoFilter {}
impl GstObjectImpl for FdMemoryFadeInVideoFilter {}
#[glib::object_subclass]
impl ObjectSubclass for FdMemoryFadeInVideoFilter {
const NAME: &'static str = "FdMemoryVideoFilter";
type Type = super::FdMemoryFadeInVideoFilter;
type ParentType = gst_video::VideoFilter;
}
}
}

View file

@ -3,11 +3,14 @@
// or for an EOS message. When a message notifying about either of both
// is received, the future is resolved.
use std::env;
use futures::{executor::LocalPool, prelude::*};
extern crate gstreamer as gst;
use gst::prelude::*;
use futures::executor::LocalPool;
use futures::prelude::*;
use std::env;
#[path = "../examples-common.rs"]
mod examples_common;
@ -24,9 +27,9 @@ async fn message_loop(bus: gst::Bus) {
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
@ -42,8 +45,8 @@ fn example_main() {
gst::init().unwrap();
// Create a pipeline from the launch-syntax given on the cli.
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
let bus = pipeline.bus().unwrap();
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
@ -61,7 +64,7 @@ fn example_main() {
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -35,44 +35,19 @@
// those with lowers (higher number). Thus, Layers with higher priority are "in the front".
// - The timeline is the enclosing element, grouping all layers and providing a timeframe.
use std::env;
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_editing_services as ges;
use ges::prelude::*;
use std::env;
#[allow(unused_imports)]
#[path = "../examples-common.rs"]
mod examples_common;
fn configure_pipeline(pipeline: &ges::Pipeline, output_name: &str) {
// Every audiostream piped into the encodebin should be encoded using opus.
let audio_profile =
gst_pbutils::EncodingAudioProfile::builder(&gst::Caps::builder("audio/x-opus").build())
.build();
// Every videostream piped into the encodebin should be encoded using vp8.
let video_profile =
gst_pbutils::EncodingVideoProfile::builder(&gst::Caps::builder("video/x-vp8").build())
.build();
// All streams are then finally combined into a webm container.
let container_profile =
gst_pbutils::EncodingContainerProfile::builder(&gst::Caps::builder("video/webm").build())
.name("container")
.add_profile(video_profile)
.add_profile(audio_profile)
.build();
// Apply the EncodingProfile to the pipeline, and set it to render mode
let output_uri = format!("{output_name}.webm");
pipeline
.set_render_settings(&output_uri, &container_profile)
.expect("Failed to set render settings");
pipeline
.set_mode(ges::PipelineFlags::RENDER)
.expect("Failed to set pipeline to render mode");
}
fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError> {
fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
ges::init()?;
// Begin by creating a timeline with audio and video tracks
@ -82,11 +57,6 @@ fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError>
let pipeline = ges::Pipeline::new();
pipeline.set_timeline(&timeline)?;
// If requested, configure the pipeline so it renders to a file.
if let Some(output_name) = output {
configure_pipeline(&pipeline, output_name);
}
// Load a clip from the given uri and add it to the layer.
let clip = ges::UriClip::new(uri).expect("Failed to create clip");
layer.add_clip(&clip)?;
@ -97,7 +67,7 @@ fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError>
println!(
"Agingtv scratch-lines: {}",
clip.child_property("scratch-lines")
clip.get_child_property("scratch-lines")
.unwrap()
.serialize()
.unwrap()
@ -105,17 +75,16 @@ fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError>
// Retrieve the asset that was automatically used behind the scenes, to
// extract the clip from.
let asset = clip.asset().unwrap();
let asset = clip.get_asset().unwrap();
let duration = asset
.downcast::<ges::UriClipAsset>()
.unwrap()
.duration()
.expect("unknown duration");
.get_duration();
println!(
"Clip duration: {} - playing file from {} for {}",
duration,
duration / 2,
duration / 4,
duration / 4
);
// The inpoint specifies where in the clip we start, the duration specifies
@ -129,8 +98,8 @@ fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError>
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let bus = pipeline.bus().unwrap();
for msg in bus.iter_timed(gst::ClockTime::NONE) {
let bus = pipeline.get_bus().unwrap();
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -138,9 +107,9 @@ fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError>
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
@ -158,22 +127,21 @@ fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError>
#[allow(unused_variables)]
fn example_main() {
let args: Vec<_> = env::args().collect();
if args.len() < 2 || args.len() > 3 {
println!("Usage: ges input [output]");
let uri: &str = if args.len() == 2 {
args[1].as_ref()
} else {
println!("Usage: ges launch");
std::process::exit(-1)
}
};
let input_uri: &str = args[1].as_ref();
let output = args.get(2);
match main_loop(input_uri, output) {
match main_loop(uri) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,174 +0,0 @@
#![allow(clippy::non_send_fields_in_send_ty)]
use anyhow::Result;
#[path = "../glupload.rs"]
mod glupload;
use glupload::*;
#[path = "../examples-common.rs"]
pub mod examples_common;
/// The fragment shader used for transforming GL textures travelling through the
/// pipeline. This fragment shader links against the default vertex shader
/// provided by [`GLSLStage::new_default_vertex`].
const FRAGMENT_SHADER: &str = r#"
#ifdef GL_ES
precision mediump float;
#endif
// The filter draws a fullscreen quad and provides its coordinates here:
varying vec2 v_texcoord;
// The input texture is bound on a uniform sampler named `tex`:
uniform sampler2D tex;
void main () {
// Flip texture read coordinate on the x axis to create a mirror effect:
gl_FragColor = texture2D(tex, vec2(1.0 - v_texcoord.x, v_texcoord.y));
}
"#;
mod mirror {
use std::sync::Mutex;
use gst_base::subclass::BaseTransformMode;
use gst_gl::{
prelude::*,
subclass::{prelude::*, GLFilterMode},
*,
};
use once_cell::sync::Lazy;
use super::{gl, FRAGMENT_SHADER};
pub static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
gst::DebugCategory::new(
"rsglmirrorfilter",
gst::DebugColorFlags::empty(),
Some("Rust GL Mirror Filter"),
)
});
glib::wrapper! {
pub struct GLMirrorFilter(ObjectSubclass<imp::GLMirrorFilter>) @extends gst_gl::GLFilter, gst_gl::GLBaseFilter, gst_base::BaseTransform, gst::Element, gst::Object;
}
impl GLMirrorFilter {
pub fn new(name: Option<&str>) -> Self {
glib::Object::builder().property("name", name).build()
}
}
mod imp {
use super::*;
/// Private data consists of the transformation shader which is compiled
/// in advance to running the actual filter.
#[derive(Default)]
pub struct GLMirrorFilter {
shader: Mutex<Option<GLShader>>,
}
impl GLMirrorFilter {
fn create_shader(&self, context: &GLContext) -> Result<(), gst::LoggableError> {
let shader = GLShader::new(context);
let vertex = GLSLStage::new_default_vertex(context);
vertex.compile().unwrap();
shader.attach_unlocked(&vertex)?;
gst::debug!(
CAT,
imp = self,
"Compiling fragment shader {}",
FRAGMENT_SHADER
);
let fragment = GLSLStage::with_strings(
context,
gl::FRAGMENT_SHADER,
// new_default_vertex is compiled with this version and profile:
GLSLVersion::None,
GLSLProfile::ES | GLSLProfile::COMPATIBILITY,
&[FRAGMENT_SHADER],
);
fragment.compile().unwrap();
shader.attach_unlocked(&fragment)?;
shader.link().unwrap();
gst::debug!(
CAT,
imp = self,
"Successfully compiled and linked {:?}",
shader
);
*self.shader.lock().unwrap() = Some(shader);
Ok(())
}
}
// See `subclass.rs` for general documentation on creating a subclass. Extended
// information like element metadata have been omitted for brevity.
#[glib::object_subclass]
impl ObjectSubclass for GLMirrorFilter {
const NAME: &'static str = "RsGLMirrorFilter";
type Type = super::GLMirrorFilter;
type ParentType = gst_gl::GLFilter;
}
impl ElementImpl for GLMirrorFilter {}
impl GstObjectImpl for GLMirrorFilter {}
impl ObjectImpl for GLMirrorFilter {}
impl BaseTransformImpl for GLMirrorFilter {
const MODE: BaseTransformMode = BaseTransformMode::NeverInPlace;
const PASSTHROUGH_ON_SAME_CAPS: bool = false;
const TRANSFORM_IP_ON_PASSTHROUGH: bool = false;
}
impl GLBaseFilterImpl for GLMirrorFilter {
fn gl_start(&self) -> Result<(), gst::LoggableError> {
let filter = self.obj();
// Create a shader when GL is started, knowing that the OpenGL context is
// available.
let context = GLBaseFilterExt::context(&*filter).unwrap();
self.create_shader(&context)?;
self.parent_gl_start()
}
}
impl GLFilterImpl for GLMirrorFilter {
const MODE: GLFilterMode = GLFilterMode::Texture;
fn filter_texture(
&self,
input: &gst_gl::GLMemory,
output: &gst_gl::GLMemory,
) -> Result<(), gst::LoggableError> {
let filter = self.obj();
let shader = self.shader.lock().unwrap();
// Use the underlying filter implementation to transform the input texture into
// an output texture with the shader.
filter.render_to_target_with_shader(
input,
output,
shader
.as_ref()
.expect("No shader, call `create_shader` first!"),
);
self.parent_filter_texture(input, output)
}
}
}
}
fn example_main() -> Result<()> {
gst::init().unwrap();
let glfilter = mirror::GLMirrorFilter::new(Some("Mirror filter"));
App::new(Some(glfilter.as_ref())).and_then(main_loop)
}
fn main() -> Result<()> {
examples_common::run(example_main)
}

View file

@ -1,7 +1,9 @@
use std::env;
extern crate gstreamer as gst;
use gst::prelude::*;
use futures::prelude::*;
use gst::prelude::*;
use std::env;
#[path = "../examples-common.rs"]
mod examples_common;
@ -19,9 +21,9 @@ async fn message_handler(loop_: glib::MainLoop, bus: gst::Bus) {
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
loop_.quit();
}
@ -34,6 +36,7 @@ fn example_main() {
// Get the default main context and make it also the thread default, then create
// a main loop for it
let ctx = glib::MainContext::default();
ctx.push_thread_default();
let loop_ = glib::MainLoop::new(Some(&ctx), false);
// Read the pipeline to launch from the commandline, using the launch syntax.
@ -42,8 +45,8 @@ fn example_main() {
gst::init().unwrap();
// Create a pipeline from the launch-syntax given on the cli.
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
let bus = pipeline.bus().unwrap();
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
@ -59,10 +62,12 @@ fn example_main() {
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
ctx.pop_thread_default();
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -0,0 +1,701 @@
// This example demostrates how to output GL textures, within an
// EGL/X11 context provided by the application, and render those
// textures in the GL application.
// {videotestsrc} - { glsinkbin }
extern crate gstreamer as gst;
use gst::gst_element_error;
use gst::prelude::*;
extern crate gstreamer_app as gst_app;
extern crate gstreamer_gl as gst_gl;
use gst_gl::prelude::*;
extern crate gstreamer_video as gst_video;
use std::ffi::CStr;
use std::mem;
use std::ptr;
use std::sync::mpsc;
use anyhow::Error;
use derive_more::{Display, Error};
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Missing element {}", _0)]
struct MissingElement(#[error(not(source))] &'static str);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: String,
error: String,
debug: Option<String>,
source: glib::Error,
}
#[rustfmt::skip]
static VERTICES: [f32; 20] = [
1.0, 1.0, 0.0, 1.0, 0.0,
-1.0, 1.0, 0.0, 0.0, 0.0,
-1.0, -1.0, 0.0, 0.0, 1.0,
1.0, -1.0, 0.0, 1.0, 1.0,
];
static INDICES: [u16; 6] = [0, 1, 2, 0, 2, 3];
#[rustfmt::skip]
static IDENTITY: [f32; 16] = [
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
];
const VS_SRC: &[u8] = b"
uniform mat4 u_transformation;
attribute vec4 a_position;
attribute vec2 a_texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = u_transformation * a_position;
v_texcoord = a_texcoord;
}
\0";
const FS_SRC: &[u8] = b"
#ifdef GL_ES
precision mediump float;
#endif
varying vec2 v_texcoord;
uniform sampler2D tex;
void main() {
gl_FragColor = texture2D(tex, v_texcoord);
}
\0";
#[allow(clippy::unreadable_literal)]
#[allow(clippy::unused_unit)]
#[allow(clippy::too_many_arguments)]
mod gl {
pub use self::Gles2 as Gl;
include!(concat!(env!("OUT_DIR"), "/test_gl_bindings.rs"));
}
struct Gl {
gl: gl::Gl,
program: gl::types::GLuint,
attr_position: gl::types::GLint,
attr_texture: gl::types::GLint,
vao: Option<gl::types::GLuint>,
vertex_buffer: gl::types::GLuint,
vbo_indices: gl::types::GLuint,
}
impl Gl {
fn draw_frame(&self, texture_id: gl::types::GLuint) {
unsafe {
// render
self.gl.ClearColor(0.0, 0.0, 0.0, 1.0);
self.gl.Clear(gl::COLOR_BUFFER_BIT);
self.gl.BlendColor(0.0, 0.0, 0.0, 1.0);
if self.gl.BlendFuncSeparate.is_loaded() {
self.gl.BlendFuncSeparate(
gl::SRC_ALPHA,
gl::CONSTANT_COLOR,
gl::ONE,
gl::ONE_MINUS_SRC_ALPHA,
);
} else {
self.gl.BlendFunc(gl::SRC_ALPHA, gl::CONSTANT_COLOR);
}
self.gl.BlendEquation(gl::FUNC_ADD);
self.gl.Enable(gl::BLEND);
self.gl.UseProgram(self.program);
if self.gl.BindVertexArray.is_loaded() {
self.gl.BindVertexArray(self.vao.unwrap());
}
{
self.gl
.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, self.vbo_indices);
self.gl.BindBuffer(gl::ARRAY_BUFFER, self.vertex_buffer);
// Load the vertex position
self.gl.VertexAttribPointer(
self.attr_position as gl::types::GLuint,
3,
gl::FLOAT,
gl::FALSE,
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
ptr::null(),
);
// Load the texture coordinate
self.gl.VertexAttribPointer(
self.attr_texture as gl::types::GLuint,
2,
gl::FLOAT,
gl::FALSE,
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
(3 * mem::size_of::<f32>()) as *const () as *const _,
);
self.gl.EnableVertexAttribArray(self.attr_position as _);
self.gl.EnableVertexAttribArray(self.attr_texture as _);
}
self.gl.ActiveTexture(gl::TEXTURE0);
self.gl.BindTexture(gl::TEXTURE_2D, texture_id);
let location = self
.gl
.GetUniformLocation(self.program, b"tex\0".as_ptr() as *const _);
self.gl.Uniform1i(location, 0);
let location = self
.gl
.GetUniformLocation(self.program, b"u_transformation\0".as_ptr() as *const _);
self.gl
.UniformMatrix4fv(location, 1, gl::FALSE, IDENTITY.as_ptr() as *const _);
self.gl
.DrawElements(gl::TRIANGLES, 6, gl::UNSIGNED_SHORT, ptr::null());
self.gl.BindTexture(gl::TEXTURE_2D, 0);
self.gl.UseProgram(0);
if self.gl.BindVertexArray.is_loaded() {
self.gl.BindVertexArray(0);
}
{
self.gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, 0);
self.gl.BindBuffer(gl::ARRAY_BUFFER, 0);
self.gl.DisableVertexAttribArray(self.attr_position as _);
self.gl.DisableVertexAttribArray(self.attr_texture as _);
}
}
}
fn resize(&self, size: glutin::dpi::PhysicalSize) {
unsafe {
self.gl
.Viewport(0, 0, size.width as i32, size.height as i32);
}
}
}
fn load(gl_context: &glutin::WindowedContext<glutin::PossiblyCurrent>) -> Gl {
let gl = gl::Gl::load_with(|ptr| gl_context.get_proc_address(ptr) as *const _);
let version = unsafe {
let data = CStr::from_ptr(gl.GetString(gl::VERSION) as *const _)
.to_bytes()
.to_vec();
String::from_utf8(data).unwrap()
};
println!("OpenGL version {}", version);
let (program, attr_position, attr_texture, vao, vertex_buffer, vbo_indices) = unsafe {
let vs = gl.CreateShader(gl::VERTEX_SHADER);
gl.ShaderSource(vs, 1, [VS_SRC.as_ptr() as *const _].as_ptr(), ptr::null());
gl.CompileShader(vs);
let fs = gl.CreateShader(gl::FRAGMENT_SHADER);
gl.ShaderSource(fs, 1, [FS_SRC.as_ptr() as *const _].as_ptr(), ptr::null());
gl.CompileShader(fs);
let program = gl.CreateProgram();
gl.AttachShader(program, vs);
gl.AttachShader(program, fs);
gl.LinkProgram(program);
{
let mut success: gl::types::GLint = 1;
gl.GetProgramiv(fs, gl::LINK_STATUS, &mut success);
assert!(success != 0);
}
let attr_position = gl.GetAttribLocation(program, b"a_position\0".as_ptr() as *const _);
let attr_texture = gl.GetAttribLocation(program, b"a_texcoord\0".as_ptr() as *const _);
let vao = if gl.BindVertexArray.is_loaded() {
let mut vao = mem::MaybeUninit::uninit();
gl.GenVertexArrays(1, vao.as_mut_ptr());
let vao = vao.assume_init();
gl.BindVertexArray(vao);
Some(vao)
} else {
None
};
let mut vertex_buffer = mem::MaybeUninit::uninit();
gl.GenBuffers(1, vertex_buffer.as_mut_ptr());
let vertex_buffer = vertex_buffer.assume_init();
gl.BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);
gl.BufferData(
gl::ARRAY_BUFFER,
(VERTICES.len() * mem::size_of::<f32>()) as gl::types::GLsizeiptr,
VERTICES.as_ptr() as *const _,
gl::STATIC_DRAW,
);
let mut vbo_indices = mem::MaybeUninit::uninit();
gl.GenBuffers(1, vbo_indices.as_mut_ptr());
let vbo_indices = vbo_indices.assume_init();
gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, vbo_indices);
gl.BufferData(
gl::ELEMENT_ARRAY_BUFFER,
(INDICES.len() * mem::size_of::<u16>()) as gl::types::GLsizeiptr,
INDICES.as_ptr() as *const _,
gl::STATIC_DRAW,
);
if gl.BindVertexArray.is_loaded() {
gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, vbo_indices);
gl.BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);
// Load the vertex position
gl.VertexAttribPointer(
attr_position as gl::types::GLuint,
3,
gl::FLOAT,
gl::FALSE,
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
ptr::null(),
);
// Load the texture coordinate
gl.VertexAttribPointer(
attr_texture as gl::types::GLuint,
2,
gl::FLOAT,
gl::FALSE,
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
(3 * mem::size_of::<f32>()) as *const () as *const _,
);
gl.EnableVertexAttribArray(attr_position as _);
gl.EnableVertexAttribArray(attr_texture as _);
gl.BindVertexArray(0);
}
gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, 0);
gl.BindBuffer(gl::ARRAY_BUFFER, 0);
(
program,
attr_position,
attr_texture,
vao,
vertex_buffer,
vbo_indices,
)
};
Gl {
gl,
program,
attr_position,
attr_texture,
vao,
vertex_buffer,
vbo_indices,
}
}
struct App {
pipeline: gst::Pipeline,
appsink: gst_app::AppSink,
glupload: gst::Element,
bus: gst::Bus,
events_loop: glutin::EventsLoop,
windowed_context: glutin::WindowedContext<glutin::PossiblyCurrent>,
shared_context: gst_gl::GLContext,
}
impl App {
fn new() -> Result<App, Error> {
gst::init()?;
let (pipeline, appsink, glupload) = App::create_pipeline()?;
let bus = pipeline
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
let events_loop = glutin::EventsLoop::new();
let window = glutin::WindowBuilder::new().with_title("GL rendering");
let windowed_context = glutin::ContextBuilder::new()
.with_vsync(true)
.build_windowed(window, &events_loop)?;
let windowed_context = unsafe { windowed_context.make_current().map_err(|(_, err)| err)? };
#[cfg(any(feature = "gl-x11", feature = "gl-wayland"))]
let inner_window = windowed_context.window();
let shared_context: gst_gl::GLContext;
if cfg!(target_os = "linux") {
use glutin::os::unix::RawHandle;
#[cfg(any(feature = "gl-x11", feature = "gl-wayland"))]
use glutin::os::unix::WindowExt;
use glutin::os::ContextTraitExt;
let api = App::map_gl_api(windowed_context.get_api());
let (gl_context, gl_display, platform) = match unsafe { windowed_context.raw_handle() }
{
#[cfg(any(feature = "gl-egl", feature = "gl-wayland"))]
RawHandle::Egl(egl_context) => {
#[cfg(feature = "gl-egl")]
let gl_display = if let Some(display) =
unsafe { windowed_context.get_egl_display() }
{
unsafe { gst_gl::GLDisplayEGL::with_egl_display(display as usize) }.unwrap()
} else {
panic!("EGL context without EGL display");
};
#[cfg(not(feature = "gl-egl"))]
let gl_display = if let Some(display) = inner_window.get_wayland_display() {
unsafe { gst_gl::GLDisplayWayland::with_display(display as usize) }.unwrap()
} else {
panic!("Wayland window without Wayland display");
};
(
egl_context as usize,
gl_display.upcast::<gst_gl::GLDisplay>(),
gst_gl::GLPlatform::EGL,
)
}
#[cfg(feature = "gl-x11")]
RawHandle::Glx(glx_context) => {
let gl_display = if let Some(display) = inner_window.get_xlib_display() {
unsafe { gst_gl::GLDisplayX11::with_display(display as usize) }.unwrap()
} else {
panic!("X11 window without X Display");
};
(
glx_context as usize,
gl_display.upcast::<gst_gl::GLDisplay>(),
gst_gl::GLPlatform::GLX,
)
}
#[allow(unreachable_patterns)]
handler => panic!("Unsupported platform: {:?}.", handler),
};
shared_context =
unsafe { gst_gl::GLContext::new_wrapped(&gl_display, gl_context, platform, api) }
.unwrap();
shared_context
.activate(true)
.expect("Couldn't activate wrapped GL context");
shared_context.fill_info()?;
let gl_context = shared_context.clone();
let events_proxy = events_loop.create_proxy();
#[allow(clippy::single_match)]
bus.set_sync_handler(move |_, msg| {
match msg.view() {
gst::MessageView::NeedContext(ctxt) => {
let context_type = ctxt.get_context_type();
if context_type == *gst_gl::GL_DISPLAY_CONTEXT_TYPE {
if let Some(el) =
msg.get_src().map(|s| s.downcast::<gst::Element>().unwrap())
{
let context = gst::Context::new(context_type, true);
context.set_gl_display(&gl_display);
el.set_context(&context);
}
}
if context_type == "gst.gl.app_context" {
if let Some(el) =
msg.get_src().map(|s| s.downcast::<gst::Element>().unwrap())
{
let mut context = gst::Context::new(context_type, true);
{
let context = context.get_mut().unwrap();
let s = context.get_mut_structure();
s.set_value("context", gl_context.to_send_value());
}
el.set_context(&context);
}
}
}
_ => (),
}
let _ = events_proxy.wakeup();
gst::BusSyncReply::Pass
});
} else {
panic!("This example only has Linux support");
}
Ok(App {
pipeline,
appsink,
glupload,
bus,
events_loop,
windowed_context,
shared_context,
})
}
fn setup(
&self,
events_loop: &glutin::EventsLoop,
) -> Result<mpsc::Receiver<gst::Sample>, Error> {
let events_proxy = events_loop.create_proxy();
let (sender, receiver) = mpsc::channel();
self.appsink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |appsink| {
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
{
let _buffer = sample.get_buffer().ok_or_else(|| {
gst_element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to get buffer from appsink")
);
gst::FlowError::Error
})?;
let _info = sample
.get_caps()
.and_then(|caps| gst_video::VideoInfo::from_caps(caps).ok())
.ok_or_else(|| {
gst_element_error!(
appsink,
gst::ResourceError::Failed,
("Failed to get video info from sample")
);
gst::FlowError::Error
})?;
}
sender
.send(sample)
.map(|_| gst::FlowSuccess::Ok)
.map_err(|_| gst::FlowError::Error)?;
let _ = events_proxy.wakeup();
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
self.pipeline.set_state(gst::State::Playing)?;
Ok(receiver)
}
fn map_gl_api(api: glutin::Api) -> gst_gl::GLAPI {
match api {
glutin::Api::OpenGl => gst_gl::GLAPI::OPENGL3,
glutin::Api::OpenGlEs => gst_gl::GLAPI::GLES2,
_ => gst_gl::GLAPI::empty(),
}
}
fn create_pipeline() -> Result<(gst::Pipeline, gst_app::AppSink, gst::Element), Error> {
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("videotestsrc", None)
.map_err(|_| MissingElement("videotestsrc"))?;
let sink = gst::ElementFactory::make("glsinkbin", None)
.map_err(|_| MissingElement("glsinkbin"))?;
pipeline.add_many(&[&src, &sink])?;
src.link(&sink)?;
let appsink = gst::ElementFactory::make("appsink", None)
.map_err(|_| MissingElement("appsink"))?
.dynamic_cast::<gst_app::AppSink>()
.expect("Sink element is expected to be an appsink!");
sink.set_property("sink", &appsink)?;
appsink.set_property("enable-last-sample", &false.to_value())?;
appsink.set_property("emit-signals", &false.to_value())?;
appsink.set_property("max-buffers", &1u32.to_value())?;
let caps = gst::Caps::builder("video/x-raw")
.features(&[&gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY])
.field("format", &gst_video::VideoFormat::Rgba.to_str())
.field("texture-target", &"2D")
.build();
appsink.set_caps(Some(&caps));
// get the glupload element to extract later the used context in it
let mut iter = sink.dynamic_cast::<gst::Bin>().unwrap().iterate_elements();
let glupload = loop {
match iter.next() {
Ok(Some(element)) => {
if "glupload" == element.get_factory().unwrap().get_name() {
break Some(element);
}
}
Err(gst::IteratorError::Resync) => iter.resync(),
_ => break None,
}
};
Ok((pipeline, appsink, glupload.unwrap()))
}
fn handle_messages(bus: &gst::Bus) -> Result<(), Error> {
use gst::MessageView;
for msg in bus.iter() {
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
return Err(ErrorMessage {
src: msg
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into());
}
_ => (),
}
}
Ok(())
}
fn into_context(self: App) -> glutin::WindowedContext<glutin::PossiblyCurrent> {
self.windowed_context
}
}
fn main_loop(mut app: App) -> Result<glutin::WindowedContext<glutin::PossiblyCurrent>, Error> {
println!(
"Pixel format of the window's GL context {:?}",
app.windowed_context.get_pixel_format()
);
let gl = load(&app.windowed_context);
let receiver = app.setup(&app.events_loop)?;
let mut curr_frame: Option<gst_video::VideoFrame<gst_video::video_frame::Readable>> = None;
let mut running = true;
let mut gst_gl_context: Option<gst_gl::GLContext> = None;
let events_loop = &mut app.events_loop;
let windowed_context = &mut app.windowed_context;
let bus = &app.bus;
while running {
#[allow(clippy::single_match)]
events_loop.poll_events(|event| match event {
glutin::Event::WindowEvent { event, .. } => match event {
glutin::WindowEvent::CloseRequested => running = false,
glutin::WindowEvent::Resized(logical_size) => {
let dpi_factor = windowed_context.window().get_hidpi_factor();
windowed_context.resize(logical_size.to_physical(dpi_factor));
gl.resize(logical_size.to_physical(dpi_factor));
}
_ => (),
},
_ => (),
});
// Handle all pending messages. Whenever there is a message we will
// wake up the events loop above
App::handle_messages(&bus)?;
// get the last frame in channel
if let Some(sample) = receiver.try_iter().last() {
let buffer = sample.get_buffer_owned().unwrap();
let info = sample
.get_caps()
.and_then(|caps| gst_video::VideoInfo::from_caps(caps).ok())
.unwrap();
{
if gst_gl_context.is_none() {
gst_gl_context = app
.glupload
.get_property("context")
.unwrap()
.get::<gst_gl::GLContext>()
.unwrap();
}
let sync_meta = buffer.get_meta::<gst_gl::GLSyncMeta>().unwrap();
sync_meta.set_sync_point(gst_gl_context.as_ref().unwrap());
}
if let Ok(frame) = gst_video::VideoFrame::from_buffer_readable_gl(buffer, &info) {
curr_frame = Some(frame);
}
}
if let Some(frame) = curr_frame.as_ref() {
let sync_meta = frame.buffer().get_meta::<gst_gl::GLSyncMeta>().unwrap();
sync_meta.wait(&app.shared_context);
if let Some(texture) = frame.get_texture_id(0) {
gl.draw_frame(texture as gl::types::GLuint);
}
}
windowed_context.swap_buffers()?;
}
app.pipeline.send_event(gst::event::Eos::new());
app.pipeline.set_state(gst::State::Null)?;
Ok(app.into_context())
}
fn cleanup(
_windowed_context: glutin::WindowedContext<glutin::PossiblyCurrent>,
) -> Result<(), Error> {
// To ensure that the context stays alive longer than the pipeline or any reference
// inside GStreamer to the GL context, its display or anything else. See
// https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/issues/196
//
// We might do any window/GL specific cleanup here as needed.
Ok(())
}
fn example_main() {
match App::new().and_then(main_loop).and_then(cleanup) {
Ok(r) => r,
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
examples_common::run(example_main);
}

View file

@ -1,18 +0,0 @@
#![allow(clippy::non_send_fields_in_send_ty)]
use anyhow::Result;
#[path = "../glupload.rs"]
mod glupload;
use glupload::*;
#[path = "../examples-common.rs"]
pub mod examples_common;
fn example_main() -> Result<()> {
App::new(None).and_then(main_loop)
}
fn main() -> Result<()> {
examples_common::run(example_main)
}

163
examples/src/bin/gtksink.rs Normal file
View file

@ -0,0 +1,163 @@
// This example demonstrates how to use gstreamer in conjunction with the gtk widget toolkit.
// This example shows the video produced by a videotestsrc within a small gtk gui.
// For this, the gtkglsink is used, which creates a gtk widget one can embed the gtk gui.
// For this, there multiple types of widgets. gtkglsink uses OpenGL to render frames, and
// gtksink uses the CPU to render the frames (which is way slower).
// So the example application first tries to use OpenGL, and when that fails, fall back.
// The pipeline looks like the following:
// gtk-gui: {gtkglsink}-widget
// (|)
// {videotestsrc} - {glsinkbin}
extern crate gstreamer as gst;
use gst::prelude::*;
use gio::prelude::*;
use gtk::prelude::*;
use std::cell::RefCell;
use std::env;
fn create_ui(app: &gtk::Application) {
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
// Create the gtk sink and retrieve the widget from it. The sink element will be used
// in the pipeline, and the widget will be embedded in our gui.
// Gstreamer then displays frames in the gtk widget.
// First, we try to use the OpenGL version - and if that fails, we fall back to non-OpenGL.
let (sink, widget) = if let Ok(gtkglsink) = gst::ElementFactory::make("gtkglsink", None) {
// Using the OpenGL widget succeeded, so we are in for a nice playback experience with
// low cpu usage. :)
// The gtkglsink essentially allocates an OpenGL texture on the GPU, that it will display.
// Now we create the glsinkbin element, which is responsible for conversions and for uploading
// video frames to our texture (if they are not already in the GPU). Now we tell the OpenGL-sink
// about our gtkglsink element, form where it will retrieve the OpenGL texture to fill.
let glsinkbin = gst::ElementFactory::make("glsinkbin", None).unwrap();
glsinkbin
.set_property("sink", &gtkglsink.to_value())
.unwrap();
// The gtkglsink creates the gtk widget for us. This is accessible through a property.
// So we get it and use it later to add it to our gui.
let widget = gtkglsink.get_property("widget").unwrap();
(glsinkbin, widget.get::<gtk::Widget>().unwrap().unwrap())
} else {
// Unfortunately, using the OpenGL widget didn't work out, so we will have to render
// our frames manually, using the CPU. An example why this may fail is, when
// the PC doesn't have proper graphics drivers installed.
let sink = gst::ElementFactory::make("gtksink", None).unwrap();
// The gtksink creates the gtk widget for us. This is accessible through a property.
// So we get it and use it later to add it to our gui.
let widget = sink.get_property("widget").unwrap();
(sink, widget.get::<gtk::Widget>().unwrap().unwrap())
};
pipeline.add_many(&[&src, &sink]).unwrap();
src.link(&sink).unwrap();
// Create a simple gtk gui window to place our widget into.
let window = gtk::Window::new(gtk::WindowType::Toplevel);
window.set_default_size(320, 240);
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
// Add our widget to the gui
vbox.pack_start(&widget, true, true, 0);
let label = gtk::Label::new(Some("Position: 00:00:00"));
vbox.pack_start(&label, true, true, 5);
window.add(&vbox);
window.show_all();
app.add_window(&window);
// Need to move a new reference into the closure.
// !!ATTENTION!!:
// It might seem appealing to use pipeline.clone() here, because that greatly
// simplifies the code within the callback. What this actually does, however, is creating
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
// which is in turn stored in another strong reference on the pipeline is creating a
// reference cycle.
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
let pipeline_weak = pipeline.downgrade();
// Add a timeout to the main loop that will periodically (every 500ms) be
// executed. This will query the current position within the stream from
// the underlying pipeline, and display it in our gui.
// Since this closure is called by the mainloop thread, we are allowed
// to modify the gui widgets here.
let timeout_id = gtk::timeout_add(500, move || {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::Continue(true),
};
// Query the current playing position from the underlying pipeline.
let position = pipeline
.query_position::<gst::ClockTime>()
.unwrap_or_else(|| 0.into());
// Display the playing position in the gui.
label.set_text(&format!("Position: {:.0}", position));
// Tell the callback to continue calling this closure.
glib::Continue(true)
});
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let app_weak = app.downgrade();
bus.add_watch_local(move |_, msg| {
use gst::MessageView;
let app = match app_weak.upgrade() {
Some(app) => app,
None => return glib::Continue(false),
};
match msg.view() {
MessageView::Eos(..) => gtk::main_quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
app.quit();
}
_ => (),
};
glib::Continue(true)
})
.expect("Failed to add bus watch");
// Pipeline reference is owned by the closure below, so will be
// destroyed once the app is destroyed
let timeout_id = RefCell::new(Some(timeout_id));
app.connect_shutdown(move |_| {
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
bus.remove_watch().unwrap();
if let Some(timeout_id) = timeout_id.borrow_mut().take() {
glib::source_remove(timeout_id);
}
});
}
fn main() {
// Initialize gstreamer and the gtk widget toolkit libraries.
gst::init().unwrap();
gtk::init().unwrap();
let app = gtk::Application::new(None, gio::ApplicationFlags::FLAGS_NONE).unwrap();
app.connect_activate(create_ui);
let args = env::args().collect::<Vec<_>>();
app.run(&args);
}

View file

@ -0,0 +1,280 @@
// This example demonstrates another type of combination of gtk and gstreamer,
// in comparision to the gtksink example.
// This example uses regions that are managed by the window system, and uses
// the window system's api to insert a videostream into these regions.
// So essentially, the window system of the system overlays our gui with
// the video frames - within the region that we tell it to use.
// Disadvantage of this method is, that it's highly platform specific, since
// the big platforms all have their own window system. Thus, this example
// has special code to handle differences between platforms.
// Windows could theoretically be supported by this example, but is not yet implemented.
// One of the very few (if not the single one) platform, that can not provide the API
// needed for this are Linux desktops using Wayland.
// TODO: Add Windows support
// In this case, a testvideo is displayed within our gui, using the
// following pipeline:
// {videotestsrc} - {xvimagesink(on linux)}
// {videotestsrc} - {glimagesink(on mac)}
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_video as gst_video;
use gst_video::prelude::*;
use glib::object::ObjectType;
use gio::prelude::*;
use gtk::prelude::*;
use gdk::prelude::*;
use std::env;
use std::os::raw::c_void;
use std::cell::RefCell;
use std::process;
#[cfg(all(target_os = "linux", feature = "gtkvideooverlay-x11"))]
fn create_video_sink() -> gst::Element {
// When we are on linux with the Xorg display server, we use the
// X11 protocol's XV extension, which allows to overlay regions
// with video streams. For this, we use the xvimagesink element.
gst::ElementFactory::make("xvimagesink", None).unwrap()
}
#[cfg(all(target_os = "linux", feature = "gtkvideooverlay-x11"))]
fn set_window_handle(video_overlay: &gst_video::VideoOverlay, gdk_window: &gdk::Window) {
let display_type_name = gdk_window.get_display().get_type().name();
// Check if we're using X11 or ...
if display_type_name == "GdkX11Display" {
extern "C" {
pub fn gdk_x11_window_get_xid(window: *mut glib::object::GObject) -> *mut c_void;
}
// This is unsafe because the "window handle" we pass here is basically like a raw pointer.
// If a wrong value were to be passed here (and you can pass any integer), then the window
// system will most likely cause the application to crash.
#[allow(clippy::cast_ptr_alignment)]
unsafe {
// Here we ask gdk what native window handle we got assigned for
// our video region from the window system, and then we will
// pass this unique identifier to the overlay provided by our
// sink - so the sink can then arrange the overlay.
let xid = gdk_x11_window_get_xid(gdk_window.as_ptr() as *mut _);
video_overlay.set_window_handle(xid as usize);
}
} else {
println!("Add support for display type '{}'", display_type_name);
process::exit(-1);
}
}
#[cfg(all(target_os = "macos", feature = "gtkvideooverlay-quartz"))]
fn create_video_sink() -> gst::Element {
// On Mac, this is done by overlaying a window region with an
// OpenGL-texture, using the glimagesink element.
gst::ElementFactory::make("glimagesink", None).unwrap()
}
#[cfg(all(target_os = "macos", feature = "gtkvideooverlay-quartz"))]
fn set_window_handle(video_overlay: &gst_video::VideoOverlay, gdk_window: &gdk::Window) {
let display_type_name = gdk_window.get_display().get_type().name();
if display_type_name == "GdkQuartzDisplay" {
extern "C" {
pub fn gdk_quartz_window_get_nsview(window: *mut glib::object::GObject) -> *mut c_void;
}
// This is unsafe because the "window handle" we pass here is basically like a raw pointer.
// If a wrong value were to be passed here (and you can pass any integer), then the window
// system will most likely cause the application to crash.
#[allow(clippy::cast_ptr_alignment)]
unsafe {
// Here we ask gdk what native window handle we got assigned for
// our video region from the windowing system, and then we will
// pass this unique identifier to the overlay provided by our
// sink - so the sink can then arrange the overlay.
let window = gdk_quartz_window_get_nsview(gdk_window.as_ptr() as *mut _);
video_overlay.set_window_handle(window as usize);
}
} else {
println!("Unsupported display type '{}", display_type_name);
process::exit(-1);
}
}
fn create_ui(app: &gtk::Application) {
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
// Since using the window system to overlay our gui window is making
// direct contact with the windowing system, this is highly platform-
// specific. This example supports Linux and Mac (using X11 and Quartz).
let sink = create_video_sink();
pipeline.add_many(&[&src, &sink]).unwrap();
src.link(&sink).unwrap();
// First, we create our gtk window - which will contain a region where
// our overlayed video will be displayed in.
let window = gtk::Window::new(gtk::WindowType::Toplevel);
window.set_default_size(320, 240);
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
// This creates the widget we will display our overlay in.
// Later, we will try to tell our window system about this region, so
// it can overlay it with our video stream.
let video_window = gtk::DrawingArea::new();
video_window.set_size_request(320, 240);
// Use the platform-specific sink to create our overlay.
// Since we only use the video_overlay in the closure below, we need a weak reference.
// !!ATTENTION!!:
// It might seem appealing to use .clone() here, because that greatly
// simplifies the code within the callback. What this actually does, however, is creating
// a memory leak.
let video_overlay = sink
.dynamic_cast::<gst_video::VideoOverlay>()
.unwrap()
.downgrade();
// Connect to this widget's realize signal, which will be emitted
// after its display has been initialized. This is neccessary, because
// the window system doesn't know about our region until it was initialized.
video_window.connect_realize(move |video_window| {
// Here we temporarily retrieve a strong reference on the video-overlay from the
// weak reference that we moved into the closure.
let video_overlay = match video_overlay.upgrade() {
Some(video_overlay) => video_overlay,
None => return,
};
// Gtk uses gdk under the hood, to handle its drawing. Drawing regions are
// called gdk windows. We request this underlying drawing region from the
// widget we will overlay with our video.
let gdk_window = video_window.get_window().unwrap();
// This is where we tell our window system about the drawing-region we
// want it to overlay. Most often, the window system would only know
// about our most outer region (or: our window).
if !gdk_window.ensure_native() {
println!("Can't create native window for widget");
process::exit(-1);
}
set_window_handle(&video_overlay, &gdk_window);
});
vbox.pack_start(&video_window, true, true, 0);
let label = gtk::Label::new(Some("Position: 00:00:00"));
vbox.pack_start(&label, true, true, 5);
window.add(&vbox);
window.show_all();
app.add_window(&window);
// Need to move a new reference into the closure.
// !!ATTENTION!!:
// It might seem appealing to use pipeline.clone() here, because that greatly
// simplifies the code within the callback. What this actually does, however, is creating
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
// which is in turn stored in another strong reference on the pipeline is creating a
// reference cycle.
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
let pipeline_weak = pipeline.downgrade();
// Add a timeout to the main loop that will periodically (every 500ms) be
// executed. This will query the current position within the stream from
// the underlying pipeline, and display it in our gui.
// Since this closure is called by the mainloop thread, we are allowed
// to modify the gui widgets here.
let timeout_id = gtk::timeout_add(500, move || {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::Continue(false),
};
// Query the current playing position from the underlying pipeline.
let position = pipeline
.query_position::<gst::ClockTime>()
.unwrap_or_else(|| 0.into());
// Display the playing position in the gui.
label.set_text(&format!("Position: {:.0}", position));
// Tell the timeout to continue calling this callback.
glib::Continue(true)
});
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let app_weak = app.downgrade();
bus.add_watch_local(move |_, msg| {
use gst::MessageView;
let app = match app_weak.upgrade() {
Some(app) => app,
None => return glib::Continue(false),
};
match msg.view() {
MessageView::Eos(..) => gtk::main_quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
app.quit();
}
_ => (),
};
glib::Continue(true)
})
.expect("Failed to add bus watch");
// Pipeline reference is owned by the closure below, so will be
// destroyed once the app is destroyed
let timeout_id = RefCell::new(Some(timeout_id));
app.connect_shutdown(move |_| {
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
bus.remove_watch().unwrap();
if let Some(timeout_id) = timeout_id.borrow_mut().take() {
glib::source_remove(timeout_id);
}
});
}
fn main() {
#[cfg(not(unix))]
{
println!("Add support for target platform");
process::exit(-1);
}
// Initialize gstreamer and the gtk widget toolkit libraries.
gst::init().unwrap();
gtk::init().unwrap();
let app = gtk::Application::new(None, gio::ApplicationFlags::FLAGS_NONE).unwrap();
app.connect_activate(create_ui);
let args = env::args().collect::<Vec<_>>();
app.run(&args);
}

View file

@ -1,7 +1,8 @@
// This example demonstrates how to use GStreamer's iteration APIs.
// This is used at multiple occasions - for example to iterate an
// This is used at multiple occassions - for example to iterate an
// element's pads.
extern crate gstreamer as gst;
use gst::prelude::*;
#[path = "../examples-common.rs"]
@ -13,14 +14,14 @@ fn example_main() {
// Create and use an identity element here.
// This element does nothing, really. We also never add it to a pipeline.
// We just want to iterate the identity element's pads.
let identity = gst::ElementFactory::make("identity").build().unwrap();
let identity = gst::ElementFactory::make("identity", None).unwrap();
// Get an iterator over all pads of the identity-element.
let mut iter = identity.iterate_pads();
loop {
// In an endless-loop, we use the iterator until we either reach the end
// or we hit an error.
match iter.next() {
Ok(Some(pad)) => println!("Pad: {}", pad.name()),
Ok(Some(pad)) => println!("Pad: {}", pad.get_name()),
Ok(None) => {
// We reached the end of the iterator, there are no more pads
println!("Done");
@ -45,7 +46,7 @@ fn example_main() {
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -3,10 +3,12 @@
// as launch syntax.
// When the parsing succeeded, the pipeline is run until the stream ends or an error happens.
use std::{env, process};
extern crate gstreamer as gst;
use gst::prelude::*;
use std::env;
use std::process;
#[path = "../examples-common.rs"]
mod examples_common;
@ -17,7 +19,7 @@ fn example_main() {
gst::init().unwrap();
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
// In comparison to the launch_glib_main example, this is using the advanced launch syntax
// In comparision to the launch_glib_main example, this is using the advanced launch syntax
// parsing API of GStreamer. The function returns a Result, handing us the pipeline if
// parsing and creating succeeded, and hands us detailed error information if something
// went wrong. The error is passed as gst::ParseError. In this example, we separately
@ -26,29 +28,26 @@ fn example_main() {
// Especially GUIs should probably handle this case, to tell users that they need to
// install the corresponding gstreamer plugins.
let mut context = gst::ParseContext::new();
let pipeline = match gst::parse::launch_full(
&pipeline_str,
Some(&mut context),
gst::ParseFlags::empty(),
) {
Ok(pipeline) => pipeline,
Err(err) => {
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
println!("Missing element(s): {:?}", context.missing_elements());
} else {
println!("Failed to parse pipeline: {err}");
}
let pipeline =
match gst::parse_launch_full(&pipeline_str, Some(&mut context), gst::ParseFlags::empty()) {
Ok(pipeline) => pipeline,
Err(err) => {
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
println!("Missing element(s): {:?}", context.get_missing_elements());
} else {
println!("Failed to parse pipeline: {}", err);
}
process::exit(-1)
}
};
let bus = pipeline.bus().unwrap();
process::exit(-1)
}
};
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -56,9 +55,9 @@ fn example_main() {
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
@ -72,7 +71,7 @@ fn example_main() {
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -7,10 +7,11 @@
// things from the main loop (timeouts, UI events, socket events, ...) instead
// of just handling messages from GStreamer's bus.
use std::env;
extern crate gstreamer as gst;
use gst::prelude::*;
use std::env;
#[path = "../examples-common.rs"]
mod examples_common;
@ -24,8 +25,8 @@ fn example_main() {
let main_loop = glib::MainLoop::new(None, false);
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
let bus = pipeline.bus().unwrap();
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
@ -34,39 +35,43 @@ fn example_main() {
let main_loop_clone = main_loop.clone();
//bus.add_signal_watch();
//bus.connect_message(None, move |_, msg| {
let _bus_watch = bus
.add_watch(move |_, msg| {
use gst::MessageView;
//bus.connect_message(move |_, msg| {
bus.add_watch(move |_, msg| {
use gst::MessageView;
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => main_loop.quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
main_loop.quit();
}
_ => (),
};
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => main_loop.quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
main_loop.quit();
}
_ => (),
};
glib::ControlFlow::Continue
})
.expect("Failed to add bus watch");
glib::Continue(true)
})
.expect("Failed to add bus watch");
main_loop.run();
pipeline
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
// Here we remove the bus watch we added above. This avoids a memory leak, that might
// otherwise happen because we moved a strong reference (clone of main_loop) into the
// callback closure above.
bus.remove_watch().unwrap();
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,368 +0,0 @@
// This example demonstrates how to draw an overlay on a video stream using
// Direct2D/DirectWrite/WIC and the overlay composition element.
// {videotestsrc} - {overlaycomposition} - {capsfilter} - {videoconvert} - {autovideosink}
// The capsfilter element allows us to dictate the video resolution we want for the
// videotestsrc and the overlaycomposition element.
use std::sync::{Arc, Mutex};
use byte_slice_cast::*;
use anyhow::Error;
use derive_more::{Display, Error};
use gst::prelude::*;
use windows::{
Foundation::Numerics::*,
Win32::{
Graphics::{
Direct2D::{Common::*, *},
DirectWrite::*,
Dxgi::Common::*,
Imaging::*,
},
System::Com::*,
},
};
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
}
struct DrawingContext {
// Factory for creating render target
d2d_factory: ID2D1Factory,
// Used to create WIC bitmap surface
wic_factory: IWICImagingFactory,
// text layout holding text information (string, font, size, etc)
text_layout: IDWriteTextLayout,
// Holding rendred image
bitmap: Option<IWICBitmap>,
// Bound to bitmap and used to actual Direct2D rendering
render_target: Option<ID2D1RenderTarget>,
info: Option<gst_video::VideoInfo>,
}
// Required for IWICBitmap
unsafe impl Send for DrawingContext {}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
let pipeline = gst::Pipeline::default();
// The videotestsrc supports multiple test patterns. In this example, we will use the
// pattern with a white ball moving around the video's center point.
let src = gst::ElementFactory::make("videotestsrc")
.property_from_str("pattern", "ball")
.build()?;
let overlay = gst::ElementFactory::make("overlaycomposition").build()?;
let caps = gst_video::VideoCapsBuilder::new()
.width(800)
.height(800)
.framerate((30, 1).into())
.build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps)
.build()?;
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
pipeline.add_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
gst::Element::link_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
// Most Direct2D/DirectWrite APIs (including factory methods) are marked as
// "unsafe", but they shouldn't fail in practice
let drawer = unsafe {
let d2d_factory =
D2D1CreateFactory::<ID2D1Factory>(D2D1_FACTORY_TYPE_MULTI_THREADED, None).unwrap();
let dwrite_factory =
DWriteCreateFactory::<IDWriteFactory>(DWRITE_FACTORY_TYPE_SHARED).unwrap();
let text_format = dwrite_factory
.CreateTextFormat(
windows::core::w!("Arial"),
None,
DWRITE_FONT_WEIGHT_BOLD,
DWRITE_FONT_STYLE_NORMAL,
DWRITE_FONT_STRETCH_NORMAL,
32f32,
windows::core::w!("en-us"),
)
.unwrap();
let text_layout = dwrite_factory
.CreateTextLayout(
windows::core::w!("GStreamer").as_wide(),
&text_format,
// Size will be updated later on "caps-changed" signal
800f32,
800f32,
)
.unwrap();
// Top (default) and center alignment
text_layout
.SetTextAlignment(DWRITE_TEXT_ALIGNMENT_CENTER)
.unwrap();
let wic_factory: IWICImagingFactory =
CoCreateInstance(&CLSID_WICImagingFactory, None, CLSCTX_ALL).unwrap();
Arc::new(Mutex::new(DrawingContext {
d2d_factory,
wic_factory,
text_layout,
bitmap: None,
render_target: None,
info: None,
}))
};
overlay.connect_closure(
"draw",
false,
glib::closure!(
#[strong]
drawer,
move |_overlay: &gst::Element, sample: &gst::Sample| {
use std::f64::consts::PI;
let drawer = drawer.lock().unwrap();
let buffer = sample.buffer().unwrap();
let timestamp = buffer.pts().unwrap();
let info = drawer.info.as_ref().unwrap();
let text_layout = &drawer.text_layout;
let bitmap = drawer.bitmap.as_ref().unwrap();
let render_target = drawer.render_target.as_ref().unwrap();
let global_angle = 360.
* (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
let center_x = (info.width() / 2) as f32;
let center_y = (info.height() / 2) as f32;
let top_margin = (info.height() / 20) as f32;
unsafe {
// Begin drawing
render_target.BeginDraw();
// Clear background
render_target.Clear(Some(&D2D1_COLOR_F {
r: 0f32,
g: 0f32,
b: 0f32,
a: 0f32,
}));
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
let angle = (360. * f64::from(i)) / 10.0;
let red = ((1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0) as f32;
let text_brush = render_target
.CreateSolidColorBrush(
&D2D1_COLOR_F {
r: red,
g: 0f32,
b: 1f32 - red,
a: 1f32,
},
None,
)
.unwrap();
let angle = (angle + global_angle) as f32;
let matrix = Matrix3x2::rotation(angle, center_x, center_y);
render_target.SetTransform(&matrix);
render_target.DrawTextLayout(
D2D_POINT_2F {
x: 0f32,
y: top_margin,
},
text_layout,
&text_brush,
D2D1_DRAW_TEXT_OPTIONS_NONE,
);
}
// EndDraw may not be successful for some reasons.
// Ignores any error in this example
let _ = render_target.EndDraw(None, None);
// Make sure all operations is completed before copying
// bitmap to buffer
let _ = render_target.Flush(None::<*mut u64>, None::<*mut u64>);
}
let mut buffer =
gst::Buffer::with_size((info.width() * info.height() * 4) as usize).unwrap();
{
let buffer_mut = buffer.get_mut().unwrap();
let mut map = buffer_mut.map_writable().unwrap();
let dst = map.as_mut_slice_of::<u8>().unwrap();
unsafe {
// Bitmap size is equal to the background image size.
// Copy entire memory
bitmap
.CopyPixels(std::ptr::null(), info.width() * 4, dst)
.unwrap();
}
}
gst_video::VideoMeta::add_full(
buffer.get_mut().unwrap(),
gst_video::VideoFrameFlags::empty(),
gst_video::VideoFormat::Bgra,
info.width(),
info.height(),
&[0],
&[(info.width() * 4) as i32],
)
.unwrap();
// Turn the buffer into a VideoOverlayRectangle, then place
// that into a VideoOverlayComposition and return it.
//
// A VideoOverlayComposition can take a Vec of such rectangles
// spaced around the video frame, but we're just outputting 1
// here
let rect = gst_video::VideoOverlayRectangle::new_raw(
&buffer,
0,
0,
info.width(),
info.height(),
gst_video::VideoOverlayFormatFlags::PREMULTIPLIED_ALPHA,
);
gst_video::VideoOverlayComposition::new(Some(&rect)).unwrap()
}
),
);
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
// be called when the sink that we render to does not support resizing the image
// itself - but the user just changed the window-size. The element after the overlay
// will then change its caps and we use the notification about this change to
// resize our canvas's size.
// Another possibility for when this might happen is, when our video is a network
// stream that dynamically changes resolution when enough bandwidth is available.
overlay.connect_closure(
"caps-changed",
false,
glib::closure!(move |_overlay: &gst::Element,
caps: &gst::Caps,
_width: u32,
_height: u32| {
let mut drawer = drawer.lock().unwrap();
let info = gst_video::VideoInfo::from_caps(caps).unwrap();
unsafe {
// Update text layout to be identical to new video resolution
drawer.text_layout.SetMaxWidth(info.width() as f32).unwrap();
drawer
.text_layout
.SetMaxHeight(info.height() as f32)
.unwrap();
// Create new WIC bitmap with PBGRA format (pre-multiplied BGRA)
let bitmap = drawer
.wic_factory
.CreateBitmap(
info.width(),
info.height(),
&GUID_WICPixelFormat32bppPBGRA,
WICBitmapCacheOnDemand,
)
.unwrap();
let render_target = drawer
.d2d_factory
.CreateWicBitmapRenderTarget(
&bitmap,
&D2D1_RENDER_TARGET_PROPERTIES {
r#type: D2D1_RENDER_TARGET_TYPE_DEFAULT,
pixelFormat: D2D1_PIXEL_FORMAT {
format: DXGI_FORMAT_B8G8R8A8_UNORM,
alphaMode: D2D1_ALPHA_MODE_PREMULTIPLIED,
},
// zero means default DPI
dpiX: 0f32,
dpiY: 0f32,
usage: D2D1_RENDER_TARGET_USAGE_NONE,
minLevel: D2D1_FEATURE_LEVEL_DEFAULT,
},
)
.unwrap();
drawer.render_target = Some(render_target);
drawer.bitmap = Some(bitmap);
}
drawer.info = Some(info);
}),
);
Ok(pipeline)
}
fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
use gst::MessageView;
match msg.view() {
MessageView::Eos(..) => break,
MessageView::Error(err) => {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
}
.into());
}
_ => (),
}
}
pipeline.set_state(gst::State::Null)?;
Ok(())
}
fn main() {
// WIC requires COM initialization
unsafe {
CoInitializeEx(None, COINIT_MULTITHREADED).unwrap();
}
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {}", e),
}
unsafe {
CoUninitialize();
}
}

View file

@ -7,30 +7,40 @@
// {videotestsrc} - {overlaycomposition} - {capsfilter} - {videoconvert} - {autovideosink}
// The capsfilter element allows us to dictate the video resolution we want for the
// videotestsrc and the overlaycomposition element.
//
// There is a small amount of unsafe code that demonstrates how to work around
// Cairo's internal refcounting of the target buffer surface
use std::{
ops,
sync::{Arc, Mutex},
};
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_video as gst_video;
use pango::prelude::*;
use std::ops;
use std::sync::{Arc, Mutex};
use anyhow::Error;
use derive_more::{Display, Error};
use gst::prelude::*;
use pango::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[display(fmt = "Missing element {}", _0)]
struct MissingElement(#[error(not(source))] &'static str);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
source: glib::Error,
}
struct DrawingContext {
layout: LayoutWrapper,
layout: glib::SendUniqueCell<LayoutWrapper>,
info: Option<gst_video::VideoInfo>,
}
@ -41,49 +51,52 @@ impl ops::Deref for LayoutWrapper {
type Target = pango::Layout;
fn deref(&self) -> &pango::Layout {
assert_eq!(self.0.ref_count(), 1);
&self.0
}
}
// SAFETY: We ensure that there are never multiple references to the layout.
unsafe impl Send for LayoutWrapper {}
unsafe impl glib::SendUnique for LayoutWrapper {
fn is_unique(&self) -> bool {
self.0.ref_count() == 1
}
}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
let pipeline = gst::Pipeline::default();
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("videotestsrc", None)
.map_err(|_| MissingElement("videotestsrc"))?;
let overlay = gst::ElementFactory::make("overlaycomposition", None)
.map_err(|_| MissingElement("overlaycomposition"))?;
let capsfilter =
gst::ElementFactory::make("capsfilter", None).map_err(|_| MissingElement("capsfilter"))?;
let videoconvert = gst::ElementFactory::make("videoconvert", None)
.map_err(|_| MissingElement("videoconvert"))?;
let sink = gst::ElementFactory::make("autovideosink", None)
.map_err(|_| MissingElement("autovideosink"))?;
// The videotestsrc supports multiple test patterns. In this example, we will use the
// pattern with a white ball moving around the video's center point.
let src = gst::ElementFactory::make("videotestsrc")
.property_from_str("pattern", "ball")
.build()?;
let overlay = gst::ElementFactory::make("overlaycomposition").build()?;
pipeline.add_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
gst::Element::link_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
// Plug in a capsfilter element that will force the videotestsrc and the overlay to work
// with images of the size 800x800, and framerate of 15 fps, since my laptop struggles
// rendering it at the default 30 fps
let caps = gst_video::VideoCapsBuilder::new()
.width(800)
.height(800)
.framerate((15, 1).into())
let caps = gst::Caps::builder("video/x-raw")
.field("width", &800i32)
.field("height", &800i32)
.field("framerate", &gst::Fraction::new(15, 1))
.build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps)
.build()?;
capsfilter.set_property("caps", &caps).unwrap();
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
pipeline.add_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
gst::Element::link_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
// The videotestsrc supports multiple test patterns. In this example, we will use the
// pattern with a white ball moving around the video's center point.
src.set_property_from_str("pattern", "ball");
// The PangoFontMap represents the set of fonts available for a particular rendering system.
let fontmap = pangocairo::FontMap::new();
let fontmap = pangocairo::FontMap::new().unwrap();
// Create a new pango layouting context for the fontmap.
let context = fontmap.create_context();
let context = fontmap.create_context().unwrap();
// Create a pango layout object. This object is a string of text we want to layout.
// It is wrapped in a LayoutWrapper (defined above) to be able to send it across threads.
let layout = LayoutWrapper(pango::Layout::new(&context));
@ -103,8 +116,12 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// interior mutability (see Rust docs). Via this we can get a mutable reference to the contained
// data which is checked at runtime for uniqueness (blocking in case of mutex, panic in case
// of refcell) instead of compile-time (like with normal references).
let drawer = Arc::new(Mutex::new(DrawingContext { layout, info: None }));
let drawer = Arc::new(Mutex::new(DrawingContext {
layout: glib::SendUniqueCell::new(layout).unwrap(),
info: None,
}));
let drawer_clone = drawer.clone();
// Connect to the overlaycomposition element's "draw" signal, which is emitted for
// each videoframe piped through the element. The signal handler needs to
// return a gst_video::VideoOverlayComposition to be drawn on the frame
@ -116,105 +133,125 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
//
// In this case, the signal passes the gst::Element and a gst::Sample with
// the current buffer
overlay.connect_closure(
"draw",
false,
glib::closure!(
#[strong] drawer,
move |_overlay: &gst::Element, sample: &gst::Sample| {
overlay
.connect("draw", false, move |args| {
use std::f64::consts::PI;
let drawer = &drawer_clone;
let drawer = drawer.lock().unwrap();
let buffer = sample.buffer().unwrap();
let timestamp = buffer.pts().unwrap();
// Get the signal's arguments
let _overlay = args[0].get::<gst::Element>().unwrap().unwrap();
let sample = args[1].get::<gst::Sample>().unwrap().unwrap();
let buffer = sample.get_buffer().unwrap();
let timestamp = buffer.get_pts();
let info = drawer.info.as_ref().unwrap();
let layout = &drawer.layout;
let layout = drawer.layout.borrow();
let angle = 2.0 * PI * (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
let angle = 2.0
* PI
* ((timestamp % (10 * gst::SECOND)).unwrap() as f64
/ (10.0 * gst::SECOND_VAL as f64));
/* Create a Cairo image surface to draw into and the context around it. */
let surface = cairo::ImageSurface::create(
cairo::Format::ARgb32,
info.width() as i32,
info.height() as i32,
)
.unwrap();
let cr = cairo::Context::new(&surface).expect("Failed to create cairo context");
cr.save().expect("Failed to save state");
cr.set_operator(cairo::Operator::Clear);
cr.paint().expect("Failed to clear background");
cr.restore().expect("Failed to restore state");
// The image we draw (the text) will be static, but we will change the
// transformation on the drawing context, which rotates and shifts everything
// that we draw afterwards. Like this, we have no complicated calculations
// in the actual drawing below.
// Calling multiple transformation methods after each other will apply the
// new transformation on top. If you repeat the cr.rotate(angle) line below
// this a second time, everything in the canvas will rotate twice as fast.
cr.translate(
f64::from(info.width()) / 2.0,
f64::from(info.height()) / 2.0,
);
cr.rotate(angle);
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
// Cairo, like most rendering frameworks, is using a stack for transformations
// with this, we push our current transformation onto this stack - allowing us
// to make temporary changes / render something / and then returning to the
// previous transformations.
cr.save().expect("Failed to save state");
let angle = (360. * f64::from(i)) / 10.0;
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
cr.set_source_rgb(red, 0.0, 1.0 - red);
cr.rotate(angle * PI / 180.0);
// Update the text layout. This function is only updating pango's internal state.
// So e.g. that after a 90 degree rotation it knows that what was previously going
// to end up as a 200x100 rectangle would now be 100x200.
pangocairo::functions::update_layout(&cr, layout);
let (width, _height) = layout.size();
// Using width and height of the text, we can properly position it within
// our canvas.
cr.move_to(
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
-(f64::from(info.height())) / 2.0,
);
// After telling the layout object where to draw itself, we actually tell
// it to draw itself into our cairo context.
pangocairo::functions::show_layout(&cr, layout);
// Here we go one step up in our stack of transformations, removing any
// changes we did to them since the last call to cr.save();
cr.restore().expect("Failed to restore state");
}
/* Drop the Cairo context to release the additional reference to the data and
* then take ownership of the data. This only works if we have the one and only
* reference to the image surface */
drop(cr);
let stride = surface.stride();
let data = surface.take_data().unwrap();
/* Create a gst::Buffer for Cairo to draw into */
let frame_width = info.width() as usize;
let frame_height = info.height() as usize;
let stride = 4 * frame_width;
let frame_size = stride * frame_height;
/* Create an RGBA buffer, and add a video meta that the videooverlaycomposition expects */
let mut buffer = gst::Buffer::from_mut_slice(data);
let mut buffer = gst::Buffer::with_size(frame_size).unwrap();
gst_video::VideoMeta::add_full(
gst_video::VideoMeta::add(
buffer.get_mut().unwrap(),
gst_video::VideoFrameFlags::empty(),
gst_video::VideoFormat::Bgra,
info.width(),
info.height(),
&[0],
&[stride],
)
.unwrap();
frame_width as u32,
frame_height as u32,
).unwrap();
let buffer = buffer.into_mapped_buffer_writable().unwrap();
let buffer = {
let buffer_ptr = unsafe { buffer.get_buffer().as_ptr() };
let surface = cairo::ImageSurface::create_for_data(
buffer,
cairo::Format::ARgb32,
frame_width as i32,
frame_height as i32,
stride as i32,
)
.unwrap();
let cr = cairo::Context::new(&surface);
cr.save();
cr.set_operator(cairo::Operator::Clear);
cr.paint();
cr.restore();
// The image we draw (the text) will be static, but we will change the
// transformation on the drawing context, which rotates and shifts everything
// that we draw afterwards. Like this, we have no complicated calulations
// in the actual drawing below.
// Calling multiple transformation methods after each other will apply the
// new transformation on top. If you repeat the cr.rotate(angle) line below
// this a second time, everything in the canvas will rotate twice as fast.
cr.translate(
f64::from(info.width()) / 2.0,
f64::from(info.height()) / 2.0,
);
cr.rotate(angle);
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
// Cairo, like most rendering frameworks, is using a stack for transformations
// with this, we push our current transformation onto this stack - allowing us
// to make temporary changes / render something / and then returning to the
// previous transformations.
cr.save();
let angle = (360. * f64::from(i)) / 10.0;
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
cr.set_source_rgb(red, 0.0, 1.0 - red);
cr.rotate(angle * PI / 180.0);
// Update the text layout. This function is only updating pango's internal state.
// So e.g. that after a 90 degree rotation it knows that what was previously going
// to end up as a 200x100 rectangle would now be 100x200.
pangocairo::functions::update_layout(&cr, &**layout);
let (width, _height) = layout.get_size();
// Using width and height of the text, we can properly possition it within
// our canvas.
cr.move_to(
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
-(f64::from(info.height())) / 2.0,
);
// After telling the layout object where to draw itself, we actually tell
// it to draw itself into our cairo context.
pangocairo::functions::show_layout(&cr, &**layout);
// Here we go one step up in our stack of transformations, removing any
// changes we did to them since the last call to cr.save();
cr.restore();
}
// Safety: The surface still owns a mutable reference to the buffer but our reference
// to the surface here is the last one. After dropping the surface the buffer would be
// freed, so we keep an additional strong reference here before dropping the surface,
// which is then returned. As such it's guaranteed that nothing is using the buffer
// anymore mutably.
drop(cr);
unsafe {
assert_eq!(
cairo_sys::cairo_surface_get_reference_count(surface.to_raw_none()),
1
);
let buffer = glib::translate::from_glib_none(buffer_ptr);
drop(surface);
buffer
}
};
/* Turn the buffer into a VideoOverlayRectangle, then place
* that into a VideoOverlayComposition and return it.
@ -223,18 +260,14 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
* spaced around the video frame, but we're just outputting 1
* here */
let rect = gst_video::VideoOverlayRectangle::new_raw(
&buffer,
0,
0,
info.width(),
info.height(),
gst_video::VideoOverlayFormatFlags::PREMULTIPLIED_ALPHA,
&buffer,
0, 0, frame_width as u32, frame_height as u32,
gst_video::VideoOverlayFormatFlags::PREMULTIPLIED_ALPHA,
);
gst_video::VideoOverlayComposition::new(Some(&rect))
.unwrap()
}),
);
Some(gst_video::VideoOverlayComposition::new(Some(&rect)).unwrap().to_value())
})
.unwrap();
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
// be called when the sink that we render to does not support resizing the image
@ -242,18 +275,18 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// will then change its caps and we use the notification about this change to
// resize our canvas's size.
// Another possibility for when this might happen is, when our video is a network
// stream that dynamically changes resolution when enough bandwidth is available.
overlay.connect_closure(
"caps-changed",
false,
glib::closure!(move |_overlay: &gst::Element,
caps: &gst::Caps,
_width: u32,
_height: u32| {
// stream that dynamically changes resolution when enough bandwith is available.
overlay
.connect("caps-changed", false, move |args| {
let _overlay = args[0].get::<gst::Element>().unwrap().unwrap();
let caps = args[1].get::<gst::Caps>().unwrap().unwrap();
let mut drawer = drawer.lock().unwrap();
drawer.info = Some(gst_video::VideoInfo::from_caps(caps).unwrap());
}),
);
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
None
})
.unwrap();
Ok(pipeline)
}
@ -262,10 +295,10 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -274,11 +307,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into());
}
@ -294,12 +328,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
fn example_main() {
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -6,10 +6,14 @@
// |-[probe]
// /
// {audiotestsrc} - {fakesink}
#![allow(clippy::question_mark)]
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_audio as gst_audio;
use byte_slice_cast::*;
use gst::prelude::*;
use std::i16;
#[path = "../examples-common.rs"]
mod examples_common;
@ -20,54 +24,52 @@ fn example_main() {
// Parse the pipeline we want to probe from a static in-line string.
// Here we give our audiotestsrc a name, so we can retrieve that element
// from the resulting pipeline.
let pipeline = gst::parse::launch(&format!(
let pipeline = gst::parse_launch(&format!(
"audiotestsrc name=src ! audio/x-raw,format={},channels=1 ! fakesink",
gst_audio::AUDIO_FORMAT_S16
gst_audio::AUDIO_FORMAT_S16.to_str()
))
.unwrap();
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
// Get the audiotestsrc element from the pipeline that GStreamer
// created for us while parsing the launch syntax above.
let src = pipeline.by_name("src").unwrap();
let src = pipeline.get_by_name("src").unwrap();
// Get the audiotestsrc's src-pad.
let src_pad = src.static_pad("src").unwrap();
let src_pad = src.get_static_pad("src").unwrap();
// Add a probe handler on the audiotestsrc's src-pad.
// This handler gets called for every buffer that passes the pad we probe.
src_pad.add_probe(gst::PadProbeType::BUFFER, |_, probe_info| {
// Interpret the data sent over the pad as one buffer
let Some(buffer) = probe_info.buffer() else {
return gst::PadProbeReturn::Ok;
};
if let Some(gst::PadProbeData::Buffer(ref buffer)) = probe_info.data {
// At this point, buffer is only a reference to an existing memory region somewhere.
// When we want to access its content, we have to map it while requesting the required
// mode of access (read, read/write).
// This type of abstraction is necessary, because the buffer in question might not be
// on the machine's main memory itself, but rather in the GPU's memory.
// So mapping the buffer makes the underlying memory region accessible to us.
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
let map = buffer.map_readable().unwrap();
// At this point, buffer is only a reference to an existing memory region somewhere.
// When we want to access its content, we have to map it while requesting the required
// mode of access (read, read/write).
// This type of abstraction is necessary, because the buffer in question might not be
// on the machine's main memory itself, but rather in the GPU's memory.
// So mapping the buffer makes the underlying memory region accessible to us.
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
let map = buffer.map_readable().unwrap();
// We know what format the data in the memory region has, since we requested
// it by setting the appsink's caps. So what we do here is interpret the
// memory region we mapped as an array of signed 16 bit integers.
let samples = if let Ok(samples) = map.as_slice_of::<i16>() {
samples
} else {
return gst::PadProbeReturn::Ok;
};
// We know what format the data in the memory region has, since we requested
// it by setting the appsink's caps. So what we do here is interpret the
// memory region we mapped as an array of signed 16 bit integers.
let samples = if let Ok(samples) = map.as_slice_of::<i16>() {
samples
} else {
return gst::PadProbeReturn::Ok;
};
// For buffer (= chunk of samples), we calculate the root mean square:
let sum: f64 = samples
.iter()
.map(|sample| {
let f = f64::from(*sample) / f64::from(i16::MAX);
f * f
})
.sum();
let rms = (sum / (samples.len() as f64)).sqrt();
println!("rms: {rms}");
// For buffer (= chunk of samples), we calculate the root mean square:
let sum: f64 = samples
.iter()
.map(|sample| {
let f = f64::from(*sample) / f64::from(i16::MAX);
f * f
})
.sum();
let rms = (sum / (samples.len() as f64)).sqrt();
println!("rms: {}", rms);
}
gst::PadProbeReturn::Ok
});
@ -76,8 +78,8 @@ fn example_main() {
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
let bus = pipeline.bus().unwrap();
for msg in bus.iter_timed(gst::ClockTime::NONE) {
let bus = pipeline.get_bus().unwrap();
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -85,9 +87,9 @@ fn example_main() {
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
@ -101,7 +103,7 @@ fn example_main() {
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -11,29 +11,36 @@
// The capsfilter element allows us to dictate the video resolution we want for the
// videotestsrc and the cairooverlay element.
use std::{
ops,
sync::{Arc, Mutex},
};
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_video as gst_video;
use pango::prelude::*;
use std::ops;
use std::sync::{Arc, Mutex};
use anyhow::Error;
use derive_more::{Display, Error};
use gst::prelude::*;
use pango::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[display(fmt = "Missing element {}", _0)]
struct MissingElement(#[error(not(source))] &'static str);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
source: glib::Error,
}
struct DrawingContext {
layout: LayoutWrapper,
layout: glib::SendUniqueCell<LayoutWrapper>,
info: Option<gst_video::VideoInfo>,
}
@ -44,45 +51,50 @@ impl ops::Deref for LayoutWrapper {
type Target = pango::Layout;
fn deref(&self) -> &pango::Layout {
assert_eq!(self.0.ref_count(), 1);
&self.0
}
}
// SAFETY: We ensure that there are never multiple references to the layout.
unsafe impl Send for LayoutWrapper {}
unsafe impl glib::SendUnique for LayoutWrapper {
fn is_unique(&self) -> bool {
self.0.ref_count() == 1
}
}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("videotestsrc")
// The videotestsrc supports multiple test patterns. In this example, we will use the
// pattern with a white ball moving around the video's center point.
.property_from_str("pattern", "ball")
.build()?;
let overlay = gst::ElementFactory::make("cairooverlay").build()?;
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("videotestsrc", None)
.map_err(|_| MissingElement("videotestsrc"))?;
let overlay = gst::ElementFactory::make("cairooverlay", None)
.map_err(|_| MissingElement("cairooverlay"))?;
let capsfilter =
gst::ElementFactory::make("capsfilter", None).map_err(|_| MissingElement("capsfilter"))?;
let videoconvert = gst::ElementFactory::make("videoconvert", None)
.map_err(|_| MissingElement("videoconvert"))?;
let sink = gst::ElementFactory::make("autovideosink", None)
.map_err(|_| MissingElement("autovideosink"))?;
pipeline.add_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
gst::Element::link_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
// Plug in a capsfilter element that will force the videotestsrc and the cairooverlay to work
// with images of the size 800x800.
let caps = gst_video::VideoCapsBuilder::new()
.width(800)
.height(800)
let caps = gst::Caps::builder("video/x-raw")
.field("width", &800i32)
.field("height", &800i32)
.build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps)
.build()?;
capsfilter.set_property("caps", &caps).unwrap();
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
let sink = gst::ElementFactory::make("autovideosink").build()?;
pipeline.add_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
gst::Element::link_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
// The videotestsrc supports multiple test patterns. In this example, we will use the
// pattern with a white ball moving around the video's center point.
src.set_property_from_str("pattern", "ball");
// The PangoFontMap represents the set of fonts available for a particular rendering system.
let fontmap = pangocairo::FontMap::new();
let fontmap = pangocairo::FontMap::new().unwrap();
// Create a new pango layouting context for the fontmap.
let context = fontmap.create_context();
let context = fontmap.create_context().unwrap();
// Create a pango layout object. This object is a string of text we want to layout.
// It is wrapped in a LayoutWrapper (defined above) to be able to send it across threads.
let layout = LayoutWrapper(pango::Layout::new(&context));
@ -102,7 +114,10 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// interior mutability (see Rust docs). Via this we can get a mutable reference to the contained
// data which is checked at runtime for uniqueness (blocking in case of mutex, panic in case
// of refcell) instead of compile-time (like with normal references).
let drawer = Arc::new(Mutex::new(DrawingContext { layout, info: None }));
let drawer = Arc::new(Mutex::new(DrawingContext {
layout: glib::SendUniqueCell::new(layout).unwrap(),
info: None,
}));
let drawer_clone = drawer.clone();
// Connect to the cairooverlay element's "draw" signal, which is emitted for
@ -112,74 +127,78 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// passed as array of glib::Value. For a documentation about the actual arguments
// it is always a good idea to either check the element's signals using either
// gst-inspect, or the online documentation.
overlay.connect("draw", false, move |args| {
use std::f64::consts::PI;
overlay
.connect("draw", false, move |args| {
use std::f64::consts::PI;
let drawer = &drawer_clone;
let drawer = drawer.lock().unwrap();
let drawer = &drawer_clone;
let drawer = drawer.lock().unwrap();
// Get the signal's arguments
let _overlay = args[0].get::<gst::Element>().unwrap();
// This is the cairo context. This is the root of all of cairo's
// drawing functionality.
let cr = args[1].get::<cairo::Context>().unwrap();
let timestamp = args[2].get::<gst::ClockTime>().unwrap();
let _duration = args[3].get::<gst::ClockTime>().unwrap();
// Get the signal's arguments
let _overlay = args[0].get::<gst::Element>().unwrap().unwrap();
// This is the cairo context. This is the root of all of cairo's
// drawing functionality.
let cr = args[1].get::<cairo::Context>().unwrap().unwrap();
let timestamp = args[2].get_some::<gst::ClockTime>().unwrap();
let _duration = args[3].get_some::<gst::ClockTime>().unwrap();
let info = drawer.info.as_ref().unwrap();
let layout = &drawer.layout;
let info = drawer.info.as_ref().unwrap();
let layout = drawer.layout.borrow();
let angle = 2.0 * PI * (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
let angle = 2.0
* PI
* ((timestamp % (10 * gst::SECOND)).unwrap() as f64
/ (10.0 * gst::SECOND_VAL as f64));
// The image we draw (the text) will be static, but we will change the
// transformation on the drawing context, which rotates and shifts everything
// that we draw afterwards. Like this, we have no complicated calculations
// in the actual drawing below.
// Calling multiple transformation methods after each other will apply the
// new transformation on top. If you repeat the cr.rotate(angle) line below
// this a second time, everything in the canvas will rotate twice as fast.
cr.translate(
f64::from(info.width()) / 2.0,
f64::from(info.height()) / 2.0,
);
cr.rotate(angle);
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
// Cairo, like most rendering frameworks, is using a stack for transformations
// with this, we push our current transformation onto this stack - allowing us
// to make temporary changes / render something / and then returning to the
// previous transformations.
cr.save().expect("Failed to save state");
let angle = (360. * f64::from(i)) / 10.0;
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
cr.set_source_rgb(red, 0.0, 1.0 - red);
cr.rotate(angle * PI / 180.0);
// Update the text layout. This function is only updating pango's internal state.
// So e.g. that after a 90 degree rotation it knows that what was previously going
// to end up as a 200x100 rectangle would now be 100x200.
pangocairo::functions::update_layout(&cr, layout);
let (width, _height) = layout.size();
// Using width and height of the text, we can properly position it within
// our canvas.
cr.move_to(
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
-(f64::from(info.height())) / 2.0,
// The image we draw (the text) will be static, but we will change the
// transformation on the drawing context, which rotates and shifts everything
// that we draw afterwards. Like this, we have no complicated calulations
// in the actual drawing below.
// Calling multiple transformation methods after each other will apply the
// new transformation on top. If you repeat the cr.rotate(angle) line below
// this a second time, everything in the canvas will rotate twice as fast.
cr.translate(
f64::from(info.width()) / 2.0,
f64::from(info.height()) / 2.0,
);
// After telling the layout object where to draw itself, we actually tell
// it to draw itself into our cairo context.
pangocairo::functions::show_layout(&cr, layout);
cr.rotate(angle);
// Here we go one step up in our stack of transformations, removing any
// changes we did to them since the last call to cr.save();
cr.restore().expect("Failed to restore state");
}
// This loop will render 10 times the string "GStreamer" in a circle
for i in 0..10 {
// Cairo, like most rendering frameworks, is using a stack for transformations
// with this, we push our current transformation onto this stack - allowing us
// to make temporary changes / render something / and then returning to the
// previous transformations.
cr.save();
None
});
let angle = (360. * f64::from(i)) / 10.0;
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
cr.set_source_rgb(red, 0.0, 1.0 - red);
cr.rotate(angle * PI / 180.0);
// Update the text layout. This function is only updating pango's internal state.
// So e.g. that after a 90 degree rotation it knows that what was previously going
// to end up as a 200x100 rectangle would now be 100x200.
pangocairo::functions::update_layout(&cr, &**layout);
let (width, _height) = layout.get_size();
// Using width and height of the text, we can properly possition it within
// our canvas.
cr.move_to(
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
-(f64::from(info.height())) / 2.0,
);
// After telling the layout object where to draw itself, we actually tell
// it to draw itself into our cairo context.
pangocairo::functions::show_layout(&cr, &**layout);
// Here we go one step up in our stack of transformations, removing any
// changes we did to them since the last call to cr.save();
cr.restore();
}
None
})
.unwrap();
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
// be called when the sink that we render to does not support resizing the image
@ -187,16 +206,18 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
// will then change its caps and we use the notification about this change to
// resize our canvas's size.
// Another possibility for when this might happen is, when our video is a network
// stream that dynamically changes resolution when enough bandwidth is available.
overlay.connect("caps-changed", false, move |args| {
let _overlay = args[0].get::<gst::Element>().unwrap();
let caps = args[1].get::<gst::Caps>().unwrap();
// stream that dynamically changes resolution when enough bandwith is available.
overlay
.connect("caps-changed", false, move |args| {
let _overlay = args[0].get::<gst::Element>().unwrap().unwrap();
let caps = args[1].get::<gst::Caps>().unwrap().unwrap();
let mut drawer = drawer.lock().unwrap();
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
let mut drawer = drawer.lock().unwrap();
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
None
});
None
})
.unwrap();
Ok(pipeline)
}
@ -205,10 +226,10 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -217,11 +238,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into());
}
@ -237,12 +259,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
fn example_main() {
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,67 +0,0 @@
// This example shows how to use the GstPlay API.
// The GstPlay API is a convenience API to allow implement playback applications
// without having to write too much code.
// Most of the tasks a play needs to support (such as seeking and switching
// audio / subtitle streams or changing the volume) are all supported by simple
// one-line function calls on the GstPlay.
use std::env;
use anyhow::Error;
#[path = "../examples-common.rs"]
mod examples_common;
use gst_play::{Play, PlayMessage, PlayVideoRenderer};
fn main_loop(uri: &str) -> Result<(), Error> {
gst::init()?;
let play = Play::new(None::<PlayVideoRenderer>);
play.set_uri(Some(uri));
play.play();
let mut result = Ok(());
for msg in play.message_bus().iter_timed(gst::ClockTime::NONE) {
match PlayMessage::parse(&msg) {
Ok(PlayMessage::EndOfStream) => {
play.stop();
break;
}
Ok(PlayMessage::Error { error, details: _ }) => {
result = Err(error);
play.stop();
break;
}
Ok(_) => (),
Err(_) => unreachable!(),
}
}
// Set the message bus to flushing to ensure that all pending messages are dropped and there
// are no further references to the play instance.
play.message_bus().set_flushing(true);
result.map_err(|e| e.into())
}
fn example_main() {
let args: Vec<_> = env::args().collect();
let uri: &str = if args.len() == 2 {
args[1].as_ref()
} else {
println!("Usage: play uri");
std::process::exit(-1)
};
match main_loop(uri) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
examples_common::run(example_main);
}

View file

@ -9,10 +9,11 @@
// Much of the playbin's behavior can be controlled by so-called flags, as well
// as the playbin's properties and signals.
use std::env;
extern crate gstreamer as gst;
use gst::prelude::*;
use std::env;
#[path = "../examples-common.rs"]
mod examples_common;
@ -28,22 +29,22 @@ fn example_main() {
};
// Create a new playbin element, and tell it what uri to play back.
let playbin = gst::ElementFactory::make("playbin")
.property("uri", uri)
.build()
let playbin = gst::ElementFactory::make("playbin", None).unwrap();
playbin
.set_property("uri", &glib::Value::from(uri))
.unwrap();
// For flags handling
// With flags, one can configure playbin's behavior such as whether it
// should play back contained video streams, or if it should render subtitles.
// let flags = playbin.property_value("flags");
// let flags_class = FlagsClass::with_type(flags.type_()).unwrap();
// let flags = playbin.get_property("flags").unwrap();
// let flags_class = FlagsClass::new(flags.type_()).unwrap();
// let flags = flags_class.builder_with_value(flags).unwrap()
// .unset_by_nick("text")
// .unset_by_nick("video")
// .build()
// .unwrap();
// playbin.set_property_from_value("flags", &flags);
// playbin.set_property("flags", &flags).unwrap();
// The playbin also provides any kind of metadata that it found in the played stream.
// For this, the playbin provides signals notifying about changes in the metadata.
@ -52,59 +53,64 @@ fn example_main() {
// - Live streams (such as internet radios) update this metadata during the stream
// Note that this signal will be emitted from the streaming threads usually,
// not the application's threads!
playbin.connect("audio-tags-changed", false, |values| {
// The metadata of any of the contained audio streams changed
// In the case of a live-stream from an internet radio, this could for example
// mark the beginning of a new track, or a new DJ.
let playbin = values[0]
.get::<glib::Object>()
.expect("playbin \"audio-tags-changed\" signal values[1]");
// This gets the index of the stream that changed. This is necessary, since
// there could e.g. be multiple audio streams (english, spanish, ...).
let idx = values[1]
.get::<i32>()
.expect("playbin \"audio-tags-changed\" signal values[1]");
playbin
.connect("audio-tags-changed", false, |values| {
// The metadata of any of the contained audio streams changed
// In the case of a live-stream from an internet radio, this could for example
// mark the beginning of a new track, or a new DJ.
let playbin = values[0]
.get::<glib::Object>()
.expect("playbin \"audio-tags-changed\" signal values[1]")
.unwrap();
// This gets the index of the stream that changed. This is neccessary, since
// there could e.g. be multiple audio streams (english, spanish, ...).
let idx = values[1]
.get_some::<i32>()
.expect("playbin \"audio-tags-changed\" signal values[1]");
println!("audio tags of audio stream {idx} changed:");
println!("audio tags of audio stream {} changed:", idx);
// HELP: is this correct?
// We were only notified about the change of metadata. If we want to do
// something with it, we first need to actually query the metadata from the playbin.
// We do this by facilliating the get-audio-tags action-signal on playbin.
// Sending an action-signal to an element essentially is a function call on the element.
// It is done that way, because elements do not have their own function API, they are
// relying on GStreamer and GLib's API. The only way an element can communicate with an
// application is via properties, signals or action signals (or custom messages, events, queries).
// So what the following code does, is essentially asking playbin to tell us its already
// internally stored tag list for this stream index.
let tags = playbin.emit_by_name::<Option<gst::TagList>>("get-audio-tags", &[&idx]);
// HELP: is this correct?
// We were only notified about the change of metadata. If we want to do
// something with it, we first need to actually query the metadata from the playbin.
// We do this by facilliating the get-audio-tags action-signal on playbin.
// Sending an action-signal to an element essentially is a function call on the element.
// It is done that way, because elements do not have their own function API, they are
// relying on GStreamer and GLib's API. The only way an element can communicate with an
// application is via properties, signals or action signals (or custom messages, events, queries).
// So what the following code does, is essentially asking playbin to tell us its already
// internally stored tag list for this stream index.
let tags = playbin
.emit("get-audio-tags", &[&idx.to_value()])
.unwrap()
.unwrap();
let tags = tags.get::<gst::TagList>().expect("tags").unwrap();
if let Some(tags) = tags {
if let Some(artist) = tags.get::<gst::tags::Artist>() {
println!(" Artist: {}", artist.get());
println!(" Artist: {}", artist.get().unwrap());
}
if let Some(title) = tags.get::<gst::tags::Title>() {
println!(" Title: {}", title.get());
println!(" Title: {}", title.get().unwrap());
}
if let Some(album) = tags.get::<gst::tags::Album>() {
println!(" Album: {}", album.get());
println!(" Album: {}", album.get().unwrap());
}
}
None
});
None
})
.unwrap();
// The playbin element itself is a playbin, so it can be used as one, despite being
// created from an element factory.
let bus = playbin.bus().unwrap();
let bus = playbin.get_bus().unwrap();
playbin
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -112,17 +118,20 @@ fn example_main() {
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
break;
}
MessageView::StateChanged(state_changed) =>
// We are only interested in state-changed messages from playbin
{
if state_changed.src().map(|s| s == &playbin).unwrap_or(false)
&& state_changed.current() == gst::State::Playing
if state_changed
.get_src()
.map(|s| s == playbin)
.unwrap_or(false)
&& state_changed.get_current() == gst::State::Playing
{
// Generate a dot graph of the pipeline to GST_DEBUG_DUMP_DOT_DIR if defined
let bin_ref = playbin.downcast_ref::<gst::Bin>().unwrap();
@ -140,7 +149,7 @@ fn example_main() {
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -5,13 +5,15 @@
// audio / subtitle streams or changing the volume) are all supported by simple
// one-line function calls on the GstPlayer.
use std::{
env,
sync::{Arc, Mutex},
};
extern crate gstreamer as gst;
use gst::prelude::*;
extern crate gstreamer_player as gst_player;
use std::env;
use std::sync::{Arc, Mutex};
use anyhow::Error;
use gst::prelude::*;
#[allow(unused_imports)]
#[path = "../examples-common.rs"]
@ -24,12 +26,12 @@ fn main_loop(uri: &str) -> Result<(), Error> {
let dispatcher = gst_player::PlayerGMainContextSignalDispatcher::new(None);
let player = gst_player::Player::new(
None::<gst_player::PlayerVideoRenderer>,
Some(dispatcher.upcast::<gst_player::PlayerSignalDispatcher>()),
None,
Some(&dispatcher.upcast::<gst_player::PlayerSignalDispatcher>()),
);
// Tell the player what uri to play.
player.set_uri(Some(uri));
player.set_uri(uri);
let error = Arc::new(Mutex::new(Ok(())));
@ -76,12 +78,12 @@ fn example_main() {
match main_loop(uri) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -12,10 +12,12 @@
// For convenience, the API has a set of pre-defined queries, but also
// allows custom queries (which can be defined and used by your own elements).
use std::env;
extern crate gstreamer as gst;
use gst::prelude::*;
use std::convert::TryInto;
use std::env;
#[path = "../examples-common.rs"]
mod examples_common;
@ -28,8 +30,8 @@ fn example_main() {
let main_loop = glib::MainLoop::new(None, false);
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
let bus = pipeline.bus().unwrap();
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
let bus = pipeline.get_bus().unwrap();
pipeline
.set_state(gst::State::Playing)
@ -50,19 +52,20 @@ fn example_main() {
let timeout_id = glib::timeout_add_seconds(1, move || {
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
// we moved into this callback.
let Some(pipeline) = pipeline_weak.upgrade() else {
return glib::ControlFlow::Break;
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return glib::Continue(true),
};
//let pos = pipeline.query_position(gst::Format::Time).unwrap_or(-1);
//let dur = pipeline.query_duration(gst::Format::Time).unwrap_or(-1);
let pos: Option<gst::ClockTime> = {
let pos: gst::ClockTime = {
// Create a new position query and send it to the pipeline.
// This will traverse all elements in the pipeline, until one feels
// capable of answering the query.
let mut q = gst::query::Position::new(gst::Format::Time);
if pipeline.query(&mut q) {
Some(q.result())
Some(q.get_result())
} else {
None
}
@ -70,13 +73,13 @@ fn example_main() {
.and_then(|pos| pos.try_into().ok())
.unwrap();
let dur: Option<gst::ClockTime> = {
let dur: gst::ClockTime = {
// Create a new duration query and send it to the pipeline.
// This will traverse all elements in the pipeline, until one feels
// capable of answering the query.
let mut q = gst::query::Duration::new(gst::Format::Time);
if pipeline.query(&mut q) {
Some(q.result())
Some(q.get_result())
} else {
None
}
@ -84,37 +87,36 @@ fn example_main() {
.and_then(|dur| dur.try_into().ok())
.unwrap();
println!("{} / {}", pos.display(), dur.display());
println!("{} / {}", pos, dur);
glib::ControlFlow::Continue
glib::Continue(true)
});
// Need to move a new reference into the closure.
let main_loop_clone = main_loop.clone();
//bus.add_signal_watch();
//bus.connect_message(None, move |_, msg| {
let _bus_watch = bus
.add_watch(move |_, msg| {
use gst::MessageView;
//bus.connect_message(move |_, msg| {
bus.add_watch(move |_, msg| {
use gst::MessageView;
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => main_loop.quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
main_loop.quit();
}
_ => (),
};
let main_loop = &main_loop_clone;
match msg.view() {
MessageView::Eos(..) => main_loop.quit(),
MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
main_loop.quit();
}
_ => (),
};
glib::ControlFlow::Continue
})
.expect("Failed to add bus watch");
glib::Continue(true)
})
.expect("Failed to add bus watch");
main_loop.run();
@ -122,11 +124,12 @@ fn example_main() {
.set_state(gst::State::Null)
.expect("Unable to set the pipeline to the `Null` state");
timeout_id.remove();
bus.remove_watch().unwrap();
glib::source_remove(timeout_id);
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

View file

@ -1,6 +1,8 @@
use std::env;
extern crate gstreamer as gst;
use gst::gst_element_error;
use gst::prelude::*;
use gst::{element_error, prelude::*};
use std::env;
#[path = "../examples-common.rs"]
mod examples_common;
@ -9,54 +11,69 @@ use anyhow::Error;
use derive_more::{Display, Error};
#[derive(Debug, Display, Error)]
#[display(fmt = "No such pad {_0} in {_1}")]
#[display(fmt = "Missing element {}", _0)]
struct MissingElement(#[error(not(source))] &'static str);
#[derive(Debug, Display, Error)]
#[display(fmt = "No such pad {} in {}", _0, _1)]
struct NoSuchPad(#[error(not(source))] &'static str, String);
#[derive(Debug, Display, Error)]
#[display(fmt = "Unknown payload type {_0}")]
#[display(fmt = "Unknown payload type {}", _0)]
struct UnknownPT(#[error(not(source))] u32);
#[derive(Debug, Display, Error)]
#[display(fmt = "Usage: {_0} (play | record) DROP_PROBABILITY")]
#[display(fmt = "Usage: {} (play | record) DROP_PROBABILITY", _0)]
struct UsageError(#[error(not(source))] String);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
source: glib::Error,
}
fn static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.static_pad(pad_name) {
fn make_element(
factory_name: &'static str,
element_name: Option<&str>,
) -> Result<gst::Element, Error> {
match gst::ElementFactory::make(factory_name, element_name) {
Ok(elem) => Ok(elem),
Err(_) => Err(Error::from(MissingElement(factory_name))),
}
}
fn get_static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.get_static_pad(pad_name) {
Some(pad) => Ok(pad),
None => {
let element_name = element.name();
let element_name = element.get_name();
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
}
}
}
fn request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.request_pad_simple(pad_name) {
fn get_request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.get_request_pad(pad_name) {
Some(pad) => Ok(pad),
None => {
let element_name = element.name();
let element_name = element.get_name();
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
}
}
}
fn connect_rtpbin_srcpad(src_pad: &gst::Pad, sink: &gst::Element) -> Result<(), Error> {
let name = src_pad.name();
let name = src_pad.get_name();
let split_name = name.split('_');
let split_name = split_name.collect::<Vec<&str>>();
let pt = split_name[5].parse::<u32>()?;
match pt {
96 => {
let sinkpad = static_pad(sink, "sink")?;
let sinkpad = get_static_pad(sink, "sink")?;
src_pad.link(&sinkpad)?;
Ok(())
}
@ -65,11 +82,14 @@ fn connect_rtpbin_srcpad(src_pad: &gst::Pad, sink: &gst::Element) -> Result<(),
}
fn make_fec_decoder(rtpbin: &gst::Element, sess_id: u32) -> Result<gst::Element, Error> {
let internal_storage = rtpbin.emit_by_name::<glib::Object>("get-internal-storage", &[&sess_id]);
let fecdec = gst::ElementFactory::make("rtpulpfecdec")
.property("storage", &internal_storage)
.property("pt", 100u32)
.build()?;
let fecdec = make_element("rtpulpfecdec", None)?;
let internal_storage = rtpbin
.emit("get-internal-storage", &[&sess_id.to_value()])
.unwrap()
.unwrap();
fecdec.set_property("storage", &internal_storage.to_value())?;
fecdec.set_property("pt", &100u32.to_value())?;
Ok(fecdec)
}
@ -85,55 +105,34 @@ fn example_main() -> Result<(), Error> {
let drop_probability = args[2].parse::<f32>()?;
let pipeline = gst::Pipeline::default();
let pipeline = gst::Pipeline::new(None);
let src = make_element("udpsrc", None)?;
let netsim = make_element("netsim", None)?;
let rtpbin = make_element("rtpbin", None)?;
let depay = make_element("rtpvp8depay", None)?;
let dec = make_element("vp8dec", None)?;
let conv = make_element("videoconvert", None)?;
let scale = make_element("videoscale", None)?;
let filter = make_element("capsfilter", None)?;
let rtp_caps = gst::Caps::builder("application/x-rtp")
.field("clock-rate", 90000i32)
.build();
let video_caps = gst_video::VideoCapsBuilder::new()
.width(1920)
.height(1080)
.build();
let src = gst::ElementFactory::make("udpsrc")
.property("address", "127.0.0.1")
.property("caps", &rtp_caps)
.build()?;
let netsim = gst::ElementFactory::make("netsim")
.property("drop-probability", drop_probability)
.build()?;
let rtpbin = gst::ElementFactory::make("rtpbin")
.property("do-lost", true)
.build()?;
let depay = gst::ElementFactory::make("rtpvp8depay").build()?;
let dec = gst::ElementFactory::make("vp8dec").build()?;
let conv = gst::ElementFactory::make("videoconvert").build()?;
let scale = gst::ElementFactory::make("videoscale").build()?;
let filter = gst::ElementFactory::make("capsfilter")
.property("caps", &video_caps)
.build()?;
pipeline.add_many([&src, &netsim, &rtpbin, &depay, &dec, &conv, &scale, &filter])?;
gst::Element::link_many([&depay, &dec, &conv, &scale, &filter])?;
pipeline.add_many(&[&src, &netsim, &rtpbin, &depay, &dec, &conv, &scale, &filter])?;
gst::Element::link_many(&[&depay, &dec, &conv, &scale, &filter])?;
match args[1].as_str() {
"play" => {
let sink = gst::ElementFactory::make("autovideosink").build()?;
let sink = make_element("autovideosink", None)?;
pipeline.add(&sink)?;
filter.link(&sink)?;
}
"record" => {
let enc = gst::ElementFactory::make("x264enc")
.property_from_str("tune", "zerolatency")
.build()?;
let mux = gst::ElementFactory::make("matroskamux").build()?;
let sink = gst::ElementFactory::make("filesink")
.property("location", "out.mkv")
.build()?;
let enc = make_element("x264enc", None)?;
let mux = make_element("matroskamux", None)?;
let sink = make_element("filesink", None)?;
pipeline.add_many([&enc, &mux, &sink])?;
gst::Element::link_many([&filter, &enc, &mux, &sink])?;
pipeline.add_many(&[&enc, &mux, &sink])?;
gst::Element::link_many(&[&filter, &enc, &mux, &sink])?;
sink.set_property("location", &"out.mkv".to_value())?;
enc.set_property_from_str("tune", "zerolatency");
eprintln!("Recording to out.mkv");
}
_ => return Err(Error::from(UsageError(args[0].clone()))),
@ -144,49 +143,59 @@ fn example_main() -> Result<(), Error> {
rtpbin.connect("new-storage", false, |values| {
let storage = values[1]
.get::<gst::Element>()
.expect("rtpbin \"new-storage\" signal values[1]");
storage.set_property("size-time", 250_000_000u64);
.expect("rtpbin \"new-storage\" signal values[1]")
.expect("rtpbin \"new-storage\" signal values[1]: no `Element`");
storage
.set_property("size-time", &250_000_000u64.to_value())
.unwrap();
None
});
})?;
rtpbin.connect("request-pt-map", false, |values| {
let pt = values[2]
.get::<u32>()
.get_some::<u32>()
.expect("rtpbin \"new-storage\" signal values[2]");
match pt {
100 => Some(
gst::Caps::builder("application/x-rtp")
.field("media", "video")
.field("clock-rate", 90000i32)
.field("is-fec", true)
.build()
.to_value(),
gst::Caps::new_simple(
"application/x-rtp",
&[
("media", &"video"),
("clock-rate", &90000i32),
("is-fec", &true),
],
)
.to_value(),
),
96 => Some(
gst::Caps::builder("application/x-rtp")
.field("media", "video")
.field("clock-rate", 90000i32)
.field("encoding-name", "VP8")
.build()
.to_value(),
gst::Caps::new_simple(
"application/x-rtp",
&[
("media", &"video"),
("clock-rate", &90000i32),
("encoding-name", &"VP8"),
],
)
.to_value(),
),
_ => None,
}
});
})?;
rtpbin.connect("request-fec-decoder", false, |values| {
let rtpbin = values[0]
.get::<gst::Element>()
.expect("rtpbin \"request-fec-decoder\" signal values[0]");
.expect("rtpbin \"request-fec-decoder\" signal values[0]")
.expect("rtpbin \"request-fec-decoder\" signal values[0]: no `Element`");
let sess_id = values[1]
.get::<u32>()
.get_some::<u32>()
.expect("rtpbin \"request-fec-decoder\" signal values[1]");
match make_fec_decoder(&rtpbin, sess_id) {
Ok(elem) => Some(elem.to_value()),
Err(err) => {
element_error!(
gst_element_error!(
rtpbin,
gst::LibraryError::Failed,
("Failed to make FEC decoder"),
@ -195,22 +204,23 @@ fn example_main() -> Result<(), Error> {
None
}
}
});
})?;
let srcpad = static_pad(&netsim, "src")?;
let sinkpad = request_pad(&rtpbin, "recv_rtp_sink_0")?;
let srcpad = get_static_pad(&netsim, "src")?;
let sinkpad = get_request_pad(&rtpbin, "recv_rtp_sink_0")?;
srcpad.link(&sinkpad)?;
let depay_weak = depay.downgrade();
rtpbin.connect_pad_added(move |rtpbin, src_pad| {
let Some(depay) = depay_weak.upgrade() else {
return;
let depay = match depay_weak.upgrade() {
Some(depay) => depay,
None => return,
};
match connect_rtpbin_srcpad(src_pad, &depay) {
match connect_rtpbin_srcpad(&src_pad, &depay) {
Ok(_) => (),
Err(err) => {
element_error!(
gst_element_error!(
rtpbin,
gst::LibraryError::Failed,
("Failed to link srcpad"),
@ -220,15 +230,26 @@ fn example_main() -> Result<(), Error> {
}
});
let rtp_caps = gst::Caps::new_simple("application/x-rtp", &[("clock-rate", &90000i32)]);
let video_caps =
gst::Caps::new_simple("video/x-raw", &[("width", &1920i32), ("height", &1080i32)]);
src.set_property("address", &"127.0.0.1".to_value())?;
src.set_property("caps", &rtp_caps.to_value())?;
netsim.set_property("drop-probability", &drop_probability.to_value())?;
rtpbin.set_property("do-lost", &true.to_value())?;
filter.set_property("caps", &video_caps.to_value())?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -240,19 +261,24 @@ fn example_main() -> Result<(), Error> {
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into());
}
MessageView::StateChanged(s) => {
if let Some(element) = msg.src() {
if element == &pipeline && s.current() == gst::State::Playing {
if let Some(element) = msg.get_src() {
if element == pipeline && s.get_current() == gst::State::Playing {
eprintln!("PLAYING");
pipeline.debug_to_dot_file(gst::DebugGraphDetails::all(), "client-playing");
gst::debug_bin_to_dot_file(
&pipeline,
gst::DebugGraphDetails::all(),
"client-playing",
);
}
}
}
@ -270,6 +296,6 @@ fn example_main() -> Result<(), Error> {
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -1,4 +1,6 @@
use gst::{element_error, prelude::*};
extern crate gstreamer as gst;
use gst::gst_element_error;
use gst::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
@ -9,54 +11,69 @@ use anyhow::Error;
use derive_more::{Display, Error};
#[derive(Debug, Display, Error)]
#[display(fmt = "No such pad {_0} in {_1}")]
#[display(fmt = "Missing element {}", _0)]
struct MissingElement(#[error(not(source))] &'static str);
#[derive(Debug, Display, Error)]
#[display(fmt = "No such pad {} in {}", _0, _1)]
struct NoSuchPad(&'static str, String);
#[derive(Debug, Display, Error)]
#[display(fmt = "Usage: {_0} URI FEC_PERCENTAGE")]
#[display(fmt = "Usage: {} URI FEC_PERCENTAGE", _0)]
struct UsageError(#[error(not(source))] String);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
source: glib::Error,
}
fn static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.static_pad(pad_name) {
fn make_element(
factory_name: &'static str,
element_name: Option<&str>,
) -> Result<gst::Element, Error> {
match gst::ElementFactory::make(factory_name, element_name) {
Ok(elem) => Ok(elem),
Err(_) => Err(Error::from(MissingElement(factory_name))),
}
}
fn get_static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.get_static_pad(pad_name) {
Some(pad) => Ok(pad),
None => {
let element_name = element.name();
let element_name = element.get_name();
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
}
}
}
fn request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.request_pad_simple(pad_name) {
fn get_request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
match element.get_request_pad(pad_name) {
Some(pad) => Ok(pad),
None => {
let element_name = element.name();
let element_name = element.get_name();
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
}
}
}
fn connect_decodebin_pad(src_pad: &gst::Pad, sink: &gst::Element) -> Result<(), Error> {
let sinkpad = static_pad(sink, "sink")?;
let sinkpad = get_static_pad(&sink, "sink")?;
src_pad.link(&sinkpad)?;
Ok(())
}
fn make_fec_encoder(fec_percentage: u32) -> Result<gst::Element, Error> {
let fecenc = gst::ElementFactory::make("rtpulpfecenc")
.property("pt", 100u32)
.property("multipacket", true)
.property("percentage", fec_percentage)
.build()?;
let fecenc = make_element("rtpulpfecenc", None)?;
fecenc.set_property("pt", &100u32.to_value())?;
fecenc.set_property("multipacket", &true.to_value())?;
fecenc.set_property("percentage", &fec_percentage.to_value())?;
Ok(fecenc)
}
@ -73,33 +90,17 @@ fn example_main() -> Result<(), Error> {
let uri = &args[1];
let fec_percentage = args[2].parse::<u32>()?;
let video_caps = gst::Caps::builder("video/x-raw").build();
let pipeline = gst::Pipeline::new(None);
let src = make_element("uridecodebin", None)?;
let conv = make_element("videoconvert", None)?;
let q1 = make_element("queue", None)?;
let enc = make_element("vp8enc", None)?;
let q2 = make_element("queue", None)?;
let pay = make_element("rtpvp8pay", None)?;
let rtpbin = make_element("rtpbin", None)?;
let sink = make_element("udpsink", None)?;
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("uridecodebin")
.property_from_str("pattern", "ball")
.property("expose-all-streams", false)
.property("caps", video_caps)
.property("uri", uri)
.build()?;
let conv = gst::ElementFactory::make("videoconvert").build()?;
let q1 = gst::ElementFactory::make("queue").build()?;
let enc = gst::ElementFactory::make("vp8enc")
.property("keyframe-max-dist", 30i32)
.property("threads", 12i32)
.property("cpu-used", -16i32)
.property("deadline", 1i64)
.property_from_str("error-resilient", "default")
.build()?;
let q2 = gst::ElementFactory::make("queue").build()?;
let pay = gst::ElementFactory::make("rtpvp8pay").build()?;
let rtpbin = gst::ElementFactory::make("rtpbin").build()?;
let sink = gst::ElementFactory::make("udpsink")
.property("host", "127.0.0.1")
.property("sync", true)
.build()?;
pipeline.add_many([&src, &conv, &q1, &enc, &q2, &pay, &rtpbin, &sink])?;
pipeline.add_many(&[&src, &conv, &q1, &enc, &q2, &pay, &rtpbin, &sink])?;
conv.link(&q1)?;
q1.link(&enc)?;
@ -109,12 +110,13 @@ fn example_main() -> Result<(), Error> {
rtpbin.connect("request-fec-encoder", false, move |values| {
let rtpbin = values[0]
.get::<gst::Element>()
.expect("rtpbin \"request-fec-encoder\" signal values[0]");
.expect("rtpbin \"request-fec-encoder\" signal values[0]")
.expect("rtpbin \"request-fec-encoder\" signal values[0]: no `Element`");
match make_fec_encoder(fec_percentage) {
Ok(elem) => Some(elem.to_value()),
Err(err) => {
element_error!(
gst_element_error!(
rtpbin,
gst::LibraryError::Failed,
("Failed to make FEC encoder"),
@ -123,21 +125,21 @@ fn example_main() -> Result<(), Error> {
None
}
}
});
})?;
let srcpad = static_pad(&q2, "src")?;
let sinkpad = request_pad(&rtpbin, "send_rtp_sink_0")?;
let srcpad = get_static_pad(&q2, "src")?;
let sinkpad = get_request_pad(&rtpbin, "send_rtp_sink_0")?;
srcpad.link(&sinkpad)?;
let srcpad = static_pad(&rtpbin, "send_rtp_src_0")?;
let sinkpad = static_pad(&sink, "sink")?;
let srcpad = get_static_pad(&rtpbin, "send_rtp_src_0")?;
let sinkpad = get_static_pad(&sink, "sink")?;
srcpad.link(&sinkpad)?;
src.connect_pad_added(
move |decodebin, src_pad| match connect_decodebin_pad(src_pad, &conv) {
move |decodebin, src_pad| match connect_decodebin_pad(&src_pad, &conv) {
Ok(_) => (),
Err(err) => {
element_error!(
gst_element_error!(
decodebin,
gst::LibraryError::Failed,
("Failed to link decodebin srcpad"),
@ -147,15 +149,29 @@ fn example_main() -> Result<(), Error> {
},
);
let video_caps = gst::Caps::new_simple("video/x-raw", &[]);
src.set_property_from_str("pattern", "ball");
sink.set_property("host", &"127.0.0.1".to_value())?;
sink.set_property("sync", &true.to_value())?;
enc.set_property("keyframe-max-dist", &30i32.to_value())?;
enc.set_property("threads", &12i32.to_value())?;
enc.set_property("cpu-used", &(-16i32).to_value())?;
enc.set_property("deadline", &1i64.to_value())?;
enc.set_property_from_str("error-resilient", "default");
src.set_property("expose-all-streams", &false.to_value())?;
src.set_property("caps", &video_caps.to_value())?;
src.set_property("uri", &uri.to_value())?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -167,19 +183,24 @@ fn example_main() -> Result<(), Error> {
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into());
}
MessageView::StateChanged(s) => {
if let Some(element) = msg.src() {
if element == &pipeline && s.current() == gst::State::Playing {
if let Some(element) = msg.get_src() {
if element == pipeline && s.get_current() == gst::State::Playing {
eprintln!("PLAYING");
pipeline.debug_to_dot_file(gst::DebugGraphDetails::all(), "server-playing");
gst::debug_bin_to_dot_file(
&pipeline,
gst::DebugGraphDetails::all(),
"server-playing",
);
}
}
}
@ -197,6 +218,6 @@ fn example_main() -> Result<(), Error> {
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -1,223 +0,0 @@
// This example demonstrates how to set up a rtsp server using GStreamer
// and extending the default auth module behaviour by subclassing RTSPAuth
// For this, the example creates a videotestsrc pipeline manually to be used
// by the RTSP server for providing data
#![allow(clippy::non_send_fields_in_send_ty)]
use anyhow::Error;
use derive_more::{Display, Error};
use gst_rtsp_server::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
#[derive(Debug, Display, Error)]
#[display(fmt = "Could not get mount points")]
struct NoMountPoints;
fn main_loop() -> Result<(), Error> {
let main_loop = glib::MainLoop::new(None, false);
let server = gst_rtsp_server::RTSPServer::new();
// We create our custom auth module.
// The job of the auth module is to authenticate users and authorize
// factories access/construction.
let auth = auth::Auth::default();
server.set_auth(Some(&auth));
// Much like HTTP servers, RTSP servers have multiple endpoints that
// provide different streams. Here, we ask our server to give
// us a reference to his list of endpoints, so we can add our
// test endpoint, providing the pipeline from the cli.
let mounts = server.mount_points().ok_or(NoMountPoints)?;
// Next, we create a factory for the endpoint we want to create.
// The job of the factory is to create a new pipeline for each client that
// connects, or (if configured to do so) to reuse an existing pipeline.
let factory = gst_rtsp_server::RTSPMediaFactory::new();
// Here we tell the media factory the media we want to serve.
// This is done in the launch syntax. When the first client connects,
// the factory will use this syntax to create a new pipeline instance.
factory.set_launch("( videotestsrc ! vp8enc ! rtpvp8pay name=pay0 )");
// This setting specifies whether each connecting client gets the output
// of a new instance of the pipeline, or whether all connected clients share
// the output of the same pipeline.
// If you want to stream a fixed video you have stored on the server to any
// client, you would not set this to shared here (since every client wants
// to start at the beginning of the video). But if you want to distribute
// a live source, you will probably want to set this to shared, to save
// computing and memory capacity on the server.
factory.set_shared(true);
// Now we add a new mount-point and tell the RTSP server to serve the content
// provided by the factory we configured above, when a client connects to
// this specific path.
mounts.add_factory("/test", factory);
// Attach the server to our main context.
// A main context is the thing where other stuff is registering itself for its
// events (e.g. sockets, GStreamer bus, ...) and the main loop is something that
// polls the main context for its events and dispatches them to whoever is
// interested in them. In this example, we only do have one, so we can
// leave the context parameter empty, it will automatically select
// the default one.
let id = server.attach(None)?;
println!(
"Stream ready at rtsp://127.0.0.1:{}/test",
server.bound_port()
);
println!("user admin/password can access stream");
println!("user demo/demo passes authentication but receives 404");
println!("other users do not pass pass authentication and receive 401");
// Start the mainloop. From this point on, the server will start to serve
// our quality content to connecting clients.
main_loop.run();
id.remove();
Ok(())
}
// Our custom auth module
mod auth {
// In the imp submodule we include the actual implementation
mod imp {
use gst_rtsp::{RTSPHeaderField, RTSPStatusCode};
use gst_rtsp_server::{prelude::*, subclass::prelude::*, RTSPContext};
// This is the private data of our auth
#[derive(Default)]
pub struct Auth;
impl Auth {
// Simulate external auth validation and user extraction
// authorized users are admin/password and demo/demo
fn external_auth(&self, auth: &str) -> Option<String> {
if let Ok(decoded) = data_encoding::BASE64.decode(auth.as_bytes()) {
if let Ok(decoded) = std::str::from_utf8(&decoded) {
let tokens = decoded.split(':').collect::<Vec<_>>();
if tokens == vec!["admin", "password"] || tokens == vec!["demo", "demo"] {
return Some(tokens[0].into());
}
}
}
None
}
// Simulate external role check
// admin user can construct and access media factory
fn external_access_check(&self, user: &str) -> bool {
user == "admin"
}
}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for Auth {
const NAME: &'static str = "RsRTSPAuth";
type Type = super::Auth;
type ParentType = gst_rtsp_server::RTSPAuth;
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for Auth {}
// Implementation of gst_rtsp_server::RTSPAuth virtual methods
impl RTSPAuthImpl for Auth {
fn authenticate(&self, ctx: &RTSPContext) -> bool {
// authenticate should always be called with a valid context request
let req = ctx
.request()
.expect("Context without request. Should not happen !");
if let Some(auth_credentials) = req.parse_auth_credentials().first() {
if let Some(authorization) = auth_credentials.authorization() {
if let Some(user) = self.external_auth(authorization) {
// Update context token with authenticated username
ctx.set_token(
gst_rtsp_server::RTSPToken::builder()
.field("user", user)
.build(),
);
return true;
}
}
}
false
}
fn check(&self, ctx: &RTSPContext, role: &glib::GString) -> bool {
// We only check media factory access
if !role.starts_with("auth.check.media.factory") {
return true;
}
if ctx.token().is_none() {
// If we do not have a context token yet, check if there are any auth credentials in request
if !self.authenticate(ctx) {
// If there were no credentials, send a "401 Unauthorized" response
if let Some(resp) = ctx.response() {
resp.init_response(RTSPStatusCode::Unauthorized, ctx.request());
resp.add_header(
RTSPHeaderField::WwwAuthenticate,
"Basic realm=\"CustomRealm\"",
);
if let Some(client) = ctx.client() {
client.send_message(resp, ctx.session());
}
}
return false;
}
}
if let Some(token) = ctx.token() {
// If we already have a user token...
if self.external_access_check(&token.string("user").unwrap_or_default()) {
// grant access if user may access factory
return true;
} else {
// send a "404 Not Found" response if user may not access factory
if let Some(resp) = ctx.response() {
resp.init_response(RTSPStatusCode::NotFound, ctx.request());
if let Some(client) = ctx.client() {
client.send_message(resp, ctx.session());
}
}
}
}
false
}
}
}
// This here defines the public interface of our auth and implements
// the corresponding traits so that it behaves like any other RTSPAuth
glib::wrapper! {
pub struct Auth(ObjectSubclass<imp::Auth>) @extends gst_rtsp_server::RTSPAuth;
}
impl Default for Auth {
// Creates a new instance of our auth
fn default() -> Self {
glib::Object::new()
}
}
}
fn example_main() -> Result<(), Error> {
gst::init()?;
main_loop()
}
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
}
}

View file

@ -4,11 +4,21 @@
// send to the server. For this, the launch syntax pipeline, that is passed
// to this example's cli is spawned and the client's media is streamed into it.
extern crate gstreamer as gst;
extern crate gstreamer_rtsp as gst_rtsp;
extern crate gstreamer_rtsp_server as gst_rtsp_server;
extern crate gstreamer_rtsp_server_sys as gst_rtsp_server_sys;
use std::env;
use std::ptr;
use glib::translate::*;
use gst_rtsp::*;
use gst_rtsp_server::prelude::*;
use gst_rtsp_server::*;
use anyhow::Error;
use derive_more::{Display, Error};
use gst_rtsp_server::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
@ -18,7 +28,7 @@ mod examples_common;
struct NoMountPoints;
#[derive(Debug, Display, Error)]
#[display(fmt = "Usage: {_0} LAUNCH_LINE")]
#[display(fmt = "Usage: {} LAUNCH_LINE", _0)]
struct UsageError(#[error(not(source))] String);
fn main_loop() -> Result<(), Error> {
@ -31,24 +41,22 @@ fn main_loop() -> Result<(), Error> {
// Mostly analog to the rtsp-server example, the server is created
// and the factory for our test mount is configured.
let main_loop = glib::MainLoop::new(None, false);
let server = gst_rtsp_server::RTSPServer::new();
let server = RTSPServer::new();
// Much like HTTP servers, RTSP servers have multiple endpoints that
// provide or take different streams. Here, we ask our server to give
// us a reference to its list of endpoints, so we can add our
// test endpoint.
let mounts = server.mount_points().ok_or(NoMountPoints)?;
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
// Next, we create a factory for the endpoint we want to create.
// The job of the factory is to create a new pipeline for each client that
// connects, or (if configured to do so) to reuse an existing pipeline.
let factory = gst_rtsp_server::RTSPMediaFactory::new();
let factory = RTSPMediaFactory::new();
// Here we configure a method of authentication that we want the
// server to require from clients.
let auth = gst_rtsp_server::RTSPAuth::new();
let token = gst_rtsp_server::RTSPToken::builder()
.field(gst_rtsp_server::RTSP_TOKEN_MEDIA_FACTORY_ROLE, "user")
.build();
let basic = gst_rtsp_server::RTSPAuth::make_basic("user", "password");
// For proper authentication, we want to use encryption. And there's no
let auth = RTSPAuth::new();
let token = RTSPToken::new(&[(*RTSP_TOKEN_MEDIA_FACTORY_ROLE, &"user")]);
let basic = RTSPAuth::make_basic("user", "password");
// For propery authentication, we want to use encryption. And there's no
// encryption without a certificate!
let cert = gio::TlsCertificate::from_pem(
"-----BEGIN CERTIFICATE-----\
@ -76,14 +84,22 @@ fn main_loop() -> Result<(), Error> {
W535W8UBbEg=-----END PRIVATE KEY-----",
)?;
// Bindable versions were added in b1f515178a363df0322d7adbd5754e1f6e2083c9
// This declares that the user "user" (once authenticated) has a role that
// allows them to access and construct media factories.
factory.add_role_from_structure(
&gst::Structure::builder("user")
.field(gst_rtsp_server::RTSP_PERM_MEDIA_FACTORY_ACCESS, true)
.field(gst_rtsp_server::RTSP_PERM_MEDIA_FACTORY_CONSTRUCT, true)
.build(),
);
unsafe {
gst_rtsp_server_sys::gst_rtsp_media_factory_add_role(
factory.to_glib_none().0,
"user".to_glib_none().0,
RTSP_PERM_MEDIA_FACTORY_ACCESS.to_glib_none().0,
<bool as StaticType>::static_type().to_glib() as *const u8,
true.to_glib() as *const u8,
RTSP_PERM_MEDIA_FACTORY_CONSTRUCT.as_ptr() as *const u8,
<bool as StaticType>::static_type().to_glib() as *const u8,
true.to_glib() as *const u8,
ptr::null_mut::<u8>(),
);
}
auth.set_tls_certificate(Some(&cert));
auth.add_basic(basic.as_str(), &token);
@ -94,7 +110,7 @@ fn main_loop() -> Result<(), Error> {
factory.set_launch(args[1].as_str());
// Tell the RTSP server that we want to work in RECORD mode (clients send)
// data to us.
factory.set_transport_mode(gst_rtsp_server::RTSPTransportMode::RECORD);
factory.set_transport_mode(RTSPTransportMode::RECORD);
// The RTSP protocol allows a couple of different profiles for the actually
// used protocol of data-transmission. With this, we can limit the selection
// from which connecting clients have to choose.
@ -102,12 +118,12 @@ fn main_loop() -> Result<(), Error> {
// The F in the end is for feedback (an extension that allows more bidirectional
// feedback between sender and receiver). AV is just Audio/Video, P is Profile :)
// The default, old RTP profile is AVP
factory.set_profiles(gst_rtsp::RTSPProfile::SAVP | gst_rtsp::RTSPProfile::SAVPF);
factory.set_profiles(RTSPProfile::SAVP | RTSPProfile::SAVPF);
// Now we add a new mount-point and tell the RTSP server to use the factory
// we configured beforehand. This factory will take on the job of creating
// a pipeline, which will take on the incoming data of connected clients.
mounts.add_factory("/test", factory);
mounts.add_factory("/test", &factory);
// Attach the server to our main context.
// A main context is the thing where other stuff is registering itself for its
@ -116,18 +132,18 @@ fn main_loop() -> Result<(), Error> {
// interested in them. In this example, we only do have one, so we can
// leave the context parameter empty, it will automatically select
// the default one.
let id = server.attach(None)?;
let id = server.attach(None);
println!(
"Stream ready at rtsps://127.0.0.1:{}/test",
server.bound_port()
server.get_bound_port()
);
// Start the mainloop. From this point on, the server will start to take
// incoming connections from clients.
main_loop.run();
id.remove();
glib::source_remove(id);
Ok(())
}
@ -140,6 +156,6 @@ fn example_main() -> Result<(), Error> {
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -6,11 +6,21 @@
//
// It also comes with a custom RTSP server/client subclass for hooking into
// the client machinery and printing some status.
#![allow(clippy::non_send_fields_in_send_ty)]
extern crate gstreamer as gst;
extern crate gstreamer_rtsp as gst_rtsp;
extern crate gstreamer_rtsp_server as gst_rtsp_server;
extern crate gstreamer_sdp as gst_sdp;
use gst_rtsp_server::prelude::*;
use glib::glib_object_impl;
use glib::glib_object_subclass;
use glib::glib_object_wrapper;
use glib::glib_wrapper;
use anyhow::Error;
use derive_more::{Display, Error};
use gst_rtsp_server::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
@ -19,23 +29,23 @@ mod examples_common;
#[display(fmt = "Could not get mount points")]
struct NoMountPoints;
#[derive(Debug, Display, Error)]
#[display(fmt = "Usage: {} LAUNCH_LINE", _0)]
struct UsageError(#[error(not(source))] String);
fn main_loop() -> Result<(), Error> {
let main_loop = glib::MainLoop::new(None, false);
let server = server::Server::default();
let mounts = mount_points::MountPoints::default();
server.set_mount_points(Some(&mounts));
let server = server::Server::new();
// Much like HTTP servers, RTSP servers have multiple endpoints that
// provide different streams. Here, we ask our server to give
// us a reference to his list of endpoints, so we can add our
// test endpoint, providing the pipeline from the cli.
let mounts = server.mount_points().ok_or(NoMountPoints)?;
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
// Next, we create our custom factory for the endpoint we want to create.
// The job of the factory is to create a new pipeline for each client that
// connects, or (if configured to do so) to reuse an existing pipeline.
let factory = media_factory::Factory::default();
let factory = media_factory::Factory::new();
// This setting specifies whether each connecting client gets the output
// of a new instance of the pipeline, or whether all connected clients share
// the output of the same pipeline.
@ -49,7 +59,7 @@ fn main_loop() -> Result<(), Error> {
// Now we add a new mount-point and tell the RTSP server to serve the content
// provided by the factory we configured above, when a client connects to
// this specific path.
mounts.add_factory("/test", factory);
mounts.add_factory("/test", &factory);
// Attach the server to our main context.
// A main context is the thing where other stuff is registering itself for its
@ -58,52 +68,71 @@ fn main_loop() -> Result<(), Error> {
// interested in them. In this example, we only do have one, so we can
// leave the context parameter empty, it will automatically select
// the default one.
let id = server.attach(None)?;
let id = server.attach(None);
println!(
"Stream ready at rtsp://127.0.0.1:{}/test",
server.bound_port()
server.get_bound_port()
);
// Start the mainloop. From this point on, the server will start to serve
// our quality content to connecting clients.
main_loop.run();
id.remove();
glib::source_remove(id);
Ok(())
}
// Our custom media factory that creates a media input manually
mod media_factory {
use gst_rtsp_server::subclass::prelude::*;
use super::*;
use glib::subclass;
use glib::subclass::prelude::*;
use glib::translate::*;
extern crate gstreamer_rtsp_server as gst_rtsp_server;
use gst_rtsp_server::subclass::prelude::*;
// In the imp submodule we include the actual implementation
mod imp {
use super::*;
// This is the private data of our factory
#[derive(Default)]
pub struct Factory {}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for Factory {
const NAME: &'static str = "RsRTSPMediaFactory";
type Type = super::Factory;
type ParentType = gst_rtsp_server::RTSPMediaFactory;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
// This macro provides some boilerplate
glib_object_subclass!();
// Called when a new instance is to be created. We need to return an instance
// of our struct here.
fn new() -> Self {
Self {}
}
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for Factory {
fn constructed(&self) {
self.parent_constructed();
// This macro provides some boilerplate.
glib_object_impl!();
fn constructed(&self, obj: &glib::Object) {
self.parent_constructed(obj);
let factory = obj
.downcast_ref::<gst_rtsp_server::RTSPMediaFactory>()
.unwrap();
let factory = self.obj();
// All media created by this factory are our custom media type. This would
// not require a media factory subclass and can also be called on the normal
// RTSPMediaFactory.
@ -113,28 +142,27 @@ mod media_factory {
// Implementation of gst_rtsp_server::RTSPMediaFactory virtual methods
impl RTSPMediaFactoryImpl for Factory {
fn create_element(&self, _url: &gst_rtsp::RTSPUrl) -> Option<gst::Element> {
fn create_element(
&self,
_factory: &gst_rtsp_server::RTSPMediaFactory,
_url: &gst_rtsp::RTSPUrl,
) -> Option<gst::Element> {
// Create a simple VP8 videotestsrc input
let bin = gst::Bin::default();
let src = gst::ElementFactory::make("videotestsrc")
// Configure the videotestsrc live
.property("is-live", true)
.build()
.unwrap();
let enc = gst::ElementFactory::make("vp8enc")
// Produce encoded data as fast as possible
.property("deadline", 1i64)
.build()
.unwrap();
let bin = gst::Bin::new(None);
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
let enc = gst::ElementFactory::make("vp8enc", None).unwrap();
// The names of the payloaders must be payX
let pay = gst::ElementFactory::make("rtpvp8pay")
.name("pay0")
.build()
.unwrap();
let pay = gst::ElementFactory::make("rtpvp8pay", Some("pay0")).unwrap();
bin.add_many([&src, &enc, &pay]).unwrap();
gst::Element::link_many([&src, &enc, &pay]).unwrap();
// Configure the videotestsrc live
src.set_property("is-live", &true).unwrap();
// Produce encoded data as fast as possible
enc.set_property("deadline", &1i64).unwrap();
bin.add_many(&[&src, &enc, &pay]).unwrap();
gst::Element::link_many(&[&src, &enc, &pay]).unwrap();
Some(bin.upcast())
}
@ -143,20 +171,44 @@ mod media_factory {
// This here defines the public interface of our factory and implements
// the corresponding traits so that it behaves like any other RTSPMediaFactory
glib::wrapper! {
pub struct Factory(ObjectSubclass<imp::Factory>) @extends gst_rtsp_server::RTSPMediaFactory;
glib_wrapper! {
pub struct Factory(
Object<
gst::subclass::ElementInstanceStruct<imp::Factory>,
subclass::simple::ClassStruct<imp::Factory>,
FactoryClass
>
) @extends gst_rtsp_server::RTSPMediaFactory;
match fn {
get_type => || imp::Factory::get_type().to_glib(),
}
}
impl Default for Factory {
// Factories must be Send+Sync, and ours is
unsafe impl Send for Factory {}
unsafe impl Sync for Factory {}
impl Factory {
// Creates a new instance of our factory
fn default() -> Factory {
glib::Object::new()
pub fn new() -> Factory {
glib::Object::new(Self::static_type(), &[])
.expect("Failed to create factory")
.downcast()
.expect("Created factory is of wrong type")
}
}
}
// Our custom media subclass that adds a custom attribute to the SDP returned by DESCRIBE
mod media {
use super::*;
use glib::subclass;
use glib::subclass::prelude::*;
use glib::translate::*;
extern crate gstreamer_rtsp_server as gst_rtsp_server;
use gst_rtsp_server::subclass::prelude::*;
// In the imp submodule we include the actual implementation
@ -164,30 +216,42 @@ mod media {
use super::*;
// This is the private data of our media
#[derive(Default)]
pub struct Media {}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for Media {
const NAME: &'static str = "RsRTSPMedia";
type Type = super::Media;
type ParentType = gst_rtsp_server::RTSPMedia;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
// This macro provides some boilerplate
glib_object_subclass!();
// Called when a new instance is to be created. We need to return an instance
// of our struct here.
fn new() -> Self {
Self {}
}
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for Media {}
impl ObjectImpl for Media {
// This macro provides some boilerplate.
glib_object_impl!();
}
// Implementation of gst_rtsp_server::RTSPMedia virtual methods
impl RTSPMediaImpl for Media {
fn setup_sdp(
&self,
media: &gst_rtsp_server::RTSPMedia,
sdp: &mut gst_sdp::SDPMessageRef,
info: &gst_rtsp_server::subclass::SDPInfo,
) -> Result<(), gst::LoggableError> {
self.parent_setup_sdp(sdp, info)?;
self.parent_setup_sdp(media, sdp, info)?;
sdp.add_attribute("my-custom-attribute", Some("has-a-value"));
@ -198,77 +262,137 @@ mod media {
// This here defines the public interface of our factory and implements
// the corresponding traits so that it behaves like any other RTSPMedia
glib::wrapper! {
pub struct Media(ObjectSubclass<imp::Media>) @extends gst_rtsp_server::RTSPMedia;
glib_wrapper! {
pub struct Media(
Object<
gst::subclass::ElementInstanceStruct<imp::Media>,
subclass::simple::ClassStruct<imp::Media>,
MediaClass
>
) @extends gst_rtsp_server::RTSPMedia;
match fn {
get_type => || imp::Media::get_type().to_glib(),
}
}
// Medias must be Send+Sync, and ours is
unsafe impl Send for Media {}
unsafe impl Sync for Media {}
}
// Our custom RTSP server subclass that reports when clients are connecting and uses
// our custom RTSP client subclass for each client
mod server {
use gst_rtsp_server::subclass::prelude::*;
use super::*;
use glib::subclass;
use glib::subclass::prelude::*;
use glib::translate::*;
extern crate gstreamer_rtsp_server as gst_rtsp_server;
use gst_rtsp_server::subclass::prelude::*;
// In the imp submodule we include the actual implementation
mod imp {
use super::*;
// This is the private data of our server
#[derive(Default)]
pub struct Server {}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for Server {
const NAME: &'static str = "RsRTSPServer";
type Type = super::Server;
type ParentType = gst_rtsp_server::RTSPServer;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
// This macro provides some boilerplate
glib_object_subclass!();
// Called when a new instance is to be created. We need to return an instance
// of our struct here.
fn new() -> Self {
Self {}
}
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for Server {}
impl ObjectImpl for Server {
// This macro provides some boilerplate.
glib_object_impl!();
}
// Implementation of gst_rtsp_server::RTSPServer virtual methods
impl RTSPServerImpl for Server {
fn create_client(&self) -> Option<gst_rtsp_server::RTSPClient> {
let server = self.obj();
let client = super::client::Client::default();
fn create_client(
&self,
server: &gst_rtsp_server::RTSPServer,
) -> Option<gst_rtsp_server::RTSPClient> {
let client = super::client::Client::new();
// Duplicated from the default implementation
client.set_session_pool(server.session_pool().as_ref());
client.set_mount_points(server.mount_points().as_ref());
client.set_auth(server.auth().as_ref());
client.set_thread_pool(server.thread_pool().as_ref());
client.set_session_pool(server.get_session_pool().as_ref());
client.set_mount_points(server.get_mount_points().as_ref());
client.set_auth(server.get_auth().as_ref());
client.set_thread_pool(server.get_thread_pool().as_ref());
Some(client.upcast())
}
fn client_connected(&self, client: &gst_rtsp_server::RTSPClient) {
self.parent_client_connected(client);
println!("Client {client:?} connected");
fn client_connected(
&self,
server: &gst_rtsp_server::RTSPServer,
client: &gst_rtsp_server::RTSPClient,
) {
self.parent_client_connected(server, client);
println!("Client {:?} connected", client);
}
}
}
// This here defines the public interface of our factory and implements
// the corresponding traits so that it behaves like any other RTSPServer
glib::wrapper! {
pub struct Server(ObjectSubclass<imp::Server>) @extends gst_rtsp_server::RTSPServer;
glib_wrapper! {
pub struct Server(
Object<
gst::subclass::ElementInstanceStruct<imp::Server>,
subclass::simple::ClassStruct<imp::Server>,
ServerClass
>
) @extends gst_rtsp_server::RTSPServer;
match fn {
get_type => || imp::Server::get_type().to_glib(),
}
}
impl Default for Server {
// Servers must be Send+Sync, and ours is
unsafe impl Send for Server {}
unsafe impl Sync for Server {}
impl Server {
// Creates a new instance of our factory
fn default() -> Server {
glib::Object::new()
pub fn new() -> Server {
glib::Object::new(Self::static_type(), &[])
.expect("Failed to create server")
.downcast()
.expect("Created server is of wrong type")
}
}
}
// Our custom RTSP client subclass.
mod client {
use super::*;
use glib::subclass;
use glib::subclass::prelude::*;
use glib::translate::*;
extern crate gstreamer_rtsp_server as gst_rtsp_server;
use gst_rtsp_server::subclass::prelude::*;
// In the imp submodule we include the actual implementation
@ -276,92 +400,69 @@ mod client {
use super::*;
// This is the private data of our server
#[derive(Default)]
pub struct Client {}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for Client {
const NAME: &'static str = "RsRTSPClient";
type Type = super::Client;
type ParentType = gst_rtsp_server::RTSPClient;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
// This macro provides some boilerplate
glib_object_subclass!();
// Called when a new instance is to be created. We need to return an instance
// of our struct here.
fn new() -> Self {
Self {}
}
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for Client {}
impl ObjectImpl for Client {
// This macro provides some boilerplate.
glib_object_impl!();
}
// Implementation of gst_rtsp_server::RTSPClient virtual methods
impl RTSPClientImpl for Client {
fn closed(&self) {
let client = self.obj();
self.parent_closed();
println!("Client {client:?} closed");
}
fn describe_request(&self, ctx: &gst_rtsp_server::RTSPContext) {
self.parent_describe_request(ctx);
let request_uri = ctx.uri().unwrap().request_uri();
println!("Describe request for uri: {request_uri:?}");
fn closed(&self, client: &gst_rtsp_server::RTSPClient) {
self.parent_closed(client);
println!("Client {:?} closed", client);
}
}
}
// This here defines the public interface of our factory and implements
// the corresponding traits so that it behaves like any other RTSPClient
glib::wrapper! {
pub struct Client(ObjectSubclass<imp::Client>) @extends gst_rtsp_server::RTSPClient;
glib_wrapper! {
pub struct Client(
Object<
gst::subclass::ElementInstanceStruct<imp::Client>,
subclass::simple::ClassStruct<imp::Client>,
ClientClass
>
) @extends gst_rtsp_server::RTSPClient;
match fn {
get_type => || imp::Client::get_type().to_glib(),
}
}
impl Default for Client {
// Clients must be Send+Sync, and ours is
unsafe impl Send for Client {}
unsafe impl Sync for Client {}
impl Client {
// Creates a new instance of our factory
fn default() -> Client {
glib::Object::new()
}
}
}
mod mount_points {
use gst_rtsp_server::subclass::prelude::*;
mod imp {
use super::*;
// This is the private data of our mount points
#[derive(Default)]
pub struct MountPoints {}
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for MountPoints {
const NAME: &'static str = "RsRTSPMountPoints";
type Type = super::MountPoints;
type ParentType = gst_rtsp_server::RTSPMountPoints;
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for MountPoints {}
// Implementation of gst_rtsp_server::RTSPClient virtual methods
impl RTSPMountPointsImpl for MountPoints {
fn make_path(&self, url: &gst_rtsp::RTSPUrl) -> Option<glib::GString> {
println!("Make path called for {url:?} ");
self.parent_make_path(url)
}
}
}
glib::wrapper! {
pub struct MountPoints(ObjectSubclass<imp::MountPoints>) @extends gst_rtsp_server::RTSPMountPoints;
}
impl Default for MountPoints {
// Creates a new instance of our factory
fn default() -> Self {
glib::Object::new()
pub fn new() -> Client {
glib::Object::new(Self::static_type(), &[])
.expect("Failed to create client")
.downcast()
.expect("Created client is of wrong type")
}
}
}
@ -374,6 +475,6 @@ fn example_main() -> Result<(), Error> {
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -5,9 +5,13 @@
use std::env;
extern crate gstreamer as gst;
extern crate gstreamer_rtsp_server as gst_rtsp_server;
use gst_rtsp_server::prelude::*;
use anyhow::Error;
use derive_more::{Display, Error};
use gst_rtsp_server::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
@ -17,7 +21,7 @@ mod examples_common;
struct NoMountPoints;
#[derive(Debug, Display, Error)]
#[display(fmt = "Usage: {_0} LAUNCH_LINE")]
#[display(fmt = "Usage: {} LAUNCH_LINE", _0)]
struct UsageError(#[error(not(source))] String);
fn main_loop() -> Result<(), Error> {
@ -33,7 +37,7 @@ fn main_loop() -> Result<(), Error> {
// provide different streams. Here, we ask our server to give
// us a reference to his list of endpoints, so we can add our
// test endpoint, providing the pipeline from the cli.
let mounts = server.mount_points().ok_or(NoMountPoints)?;
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
// Next, we create a factory for the endpoint we want to create.
// The job of the factory is to create a new pipeline for each client that
@ -56,7 +60,7 @@ fn main_loop() -> Result<(), Error> {
// Now we add a new mount-point and tell the RTSP server to serve the content
// provided by the factory we configured above, when a client connects to
// this specific path.
mounts.add_factory("/test", factory);
mounts.add_factory("/test", &factory);
// Attach the server to our main context.
// A main context is the thing where other stuff is registering itself for its
@ -65,18 +69,18 @@ fn main_loop() -> Result<(), Error> {
// interested in them. In this example, we only do have one, so we can
// leave the context parameter empty, it will automatically select
// the default one.
let id = server.attach(None)?;
let id = server.attach(None);
println!(
"Stream ready at rtsp://127.0.0.1:{}/test",
server.bound_port()
server.get_bound_port()
);
// Start the mainloop. From this point on, the server will start to serve
// our quality content to connecting clients.
main_loop.run();
id.remove();
glib::source_remove(id);
Ok(())
}
@ -89,6 +93,6 @@ fn example_main() -> Result<(), Error> {
fn main() {
match examples_common::run(example_main) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}

View file

@ -5,19 +5,41 @@
//
// Our filter can only handle F32 mono and acts as a FIR filter. The filter impulse response /
// coefficients are provided via Rust API on the filter as a Vec<f32>.
#![allow(clippy::non_send_fields_in_send_ty)]
use glib::glib_object_impl;
use glib::glib_object_subclass;
use glib::glib_object_wrapper;
use glib::glib_wrapper;
extern crate gstreamer as gst;
use gst::gst_element_error;
use gst::gst_info;
use gst::gst_trace;
use gst::prelude::*;
use anyhow::Error;
use derive_more::{Display, Error};
use gst::prelude::*;
#[path = "../examples-common.rs"]
mod examples_common;
// Our custom FIR filter element is defined in this module
mod fir_filter {
use byte_slice_cast::*;
use super::*;
use glib::subclass;
use glib::subclass::prelude::*;
use glib::translate::*;
use gst::subclass::prelude::*;
extern crate gstreamer_base as gst_base;
use gst_base::subclass::prelude::*;
extern crate gstreamer_audio as gst_audio;
use byte_slice_cast::*;
use once_cell::sync::Lazy;
// The debug category we use below for our filter
@ -31,12 +53,12 @@ mod fir_filter {
// In the imp submodule we include the actual implementation
mod imp {
use std::{collections::VecDeque, sync::Mutex};
use super::*;
use std::collections::VecDeque;
use std::i32;
use std::sync::Mutex;
// This is the private data of our filter
#[derive(Default)]
pub struct FirFilter {
pub(super) coeffs: Mutex<Vec<f32>>,
history: Mutex<VecDeque<f32>>,
@ -45,104 +67,128 @@ mod fir_filter {
// This trait registers our type with the GObject object system and
// provides the entry points for creating a new instance and setting
// up the class data
#[glib::object_subclass]
impl ObjectSubclass for FirFilter {
const NAME: &'static str = "RsFirFilter";
type Type = super::FirFilter;
type ParentType = gst_base::BaseTransform;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
// This macro provides some boilerplate
glib_object_subclass!();
// Called when a new instance is to be created. We need to return an instance
// of our struct here.
fn new() -> Self {
Self {
coeffs: Mutex::new(Vec::new()),
history: Mutex::new(VecDeque::new()),
}
}
// Called exactly once when registering the type. Used for
// setting up metadata for all instances, e.g. the name and
// classification and the pad templates with their caps.
//
// Actual instances can create pads based on those pad templates
// with a subset of the caps given here. In case of basetransform,
// a "src" and "sink" pad template are required here and the base class
// will automatically instantiate pads for them.
//
// Our element here can only handle F32 mono audio.
fn class_init(klass: &mut subclass::simple::ClassStruct<Self>) {
// Set the element specific metadata. This information is what
// is visible from gst-inspect-1.0 and can also be programatically
// retrieved from the gst::Registry after initial registration
// without having to load the plugin in memory.
klass.set_metadata(
"FIR Filter",
"Filter/Effect/Audio",
"A FIR audio filter",
"Sebastian Dröge <sebastian@centricular.com>",
);
// Create and add pad templates for our sink and source pad. These
// are later used for actually creating the pads and beforehand
// already provide information to GStreamer about all possible
// pads that could exist for this type.
// On both of pads we can only handle F32 mono at any sample rate.
let caps = gst::Caps::new_simple(
"audio/x-raw",
&[
("format", &gst_audio::AUDIO_FORMAT_F32.to_str()),
("rate", &gst::IntRange::<i32>::new(1, i32::MAX)),
("channels", &1i32),
("layout", &"interleaved"),
],
);
// The src pad template must be named "src" for basetransform
// and specific a pad that is always there
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
klass.add_pad_template(src_pad_template);
// The sink pad template must be named "sink" for basetransform
// and specific a pad that is always there
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap();
klass.add_pad_template(sink_pad_template);
// Configure basetransform so that we are always running in-place,
// don't passthrough on same caps and also never call transform_ip
// in passthrough mode (which does not matter for us here).
//
// The way how our processing is implemented, in-place transformation
// is simpler.
klass.configure(
gst_base::subclass::BaseTransformMode::AlwaysInPlace,
false,
false,
);
}
}
// Implementation of glib::Object virtual methods
impl ObjectImpl for FirFilter {}
impl GstObjectImpl for FirFilter {}
impl ObjectImpl for FirFilter {
// This macro provides some boilerplate.
glib_object_impl!();
}
// Implementation of gst::Element virtual methods
impl ElementImpl for FirFilter {
// The element specific metadata. This information is what is visible from
// gst-inspect-1.0 and can also be programmatically retrieved from the gst::Registry
// after initial registration without having to load the plugin in memory.
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
static ELEMENT_METADATA: std::sync::OnceLock<gst::subclass::ElementMetadata> =
std::sync::OnceLock::new();
Some(ELEMENT_METADATA.get_or_init(|| {
gst::subclass::ElementMetadata::new(
"FIR Filter",
"Filter/Effect/Audio",
"A FIR audio filter",
"Sebastian Dröge <sebastian@centricular.com>",
)
}))
}
fn pad_templates() -> &'static [gst::PadTemplate] {
static PAD_TEMPLATES: std::sync::OnceLock<Vec<gst::PadTemplate>> =
std::sync::OnceLock::new();
PAD_TEMPLATES.get_or_init(|| {
// Create pad templates for our sink and source pad. These are later used for
// actually creating the pads and beforehand already provide information to
// GStreamer about all possible pads that could exist for this type.
// On both of pads we can only handle F32 mono at any sample rate.
let caps = gst_audio::AudioCapsBuilder::new_interleaved()
.format(gst_audio::AUDIO_FORMAT_F32)
.channels(1)
.build();
vec![
// The src pad template must be named "src" for basetransform
// and specific a pad that is always there
gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap(),
// The sink pad template must be named "sink" for basetransform
// and specific a pad that is always there
gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap(),
]
})
}
}
impl ElementImpl for FirFilter {}
// Implementation of gst_base::BaseTransform virtual methods
impl BaseTransformImpl for FirFilter {
// Configure basetransform so that we are always running in-place,
// don't passthrough on same caps and also never call transform_ip
// in passthrough mode (which does not matter for us here).
//
// The way how our processing is implemented, in-place transformation
// is simpler.
const MODE: gst_base::subclass::BaseTransformMode =
gst_base::subclass::BaseTransformMode::AlwaysInPlace;
const PASSTHROUGH_ON_SAME_CAPS: bool = false;
const TRANSFORM_IP_ON_PASSTHROUGH: bool = false;
// Returns the size of one processing unit (i.e. a frame in our case) corresponding
// to the given caps. This is used for allocating a big enough output buffer and
// sanity checking the input buffer size, among other things.
fn unit_size(&self, caps: &gst::Caps) -> Option<usize> {
fn get_unit_size(
&self,
_element: &gst_base::BaseTransform,
caps: &gst::Caps,
) -> Option<usize> {
let audio_info = gst_audio::AudioInfo::from_caps(caps).ok();
audio_info.map(|info| info.bpf() as usize)
}
// Called when shutting down the element so we can release all stream-related state
// There's also start(), which is called whenever starting the element again
fn stop(&self) -> Result<(), gst::ErrorMessage> {
fn stop(&self, element: &gst_base::BaseTransform) -> Result<(), gst::ErrorMessage> {
// Drop state
self.history.lock().unwrap().clear();
gst::info!(CAT, imp = self, "Stopped");
gst_info!(CAT, obj: element, "Stopped");
Ok(())
}
@ -150,19 +196,20 @@ mod fir_filter {
// Does the actual transformation of the input buffer to the output buffer
fn transform_ip(
&self,
element: &gst_base::BaseTransform,
buf: &mut gst::BufferRef,
) -> Result<gst::FlowSuccess, gst::FlowError> {
// Get coefficients and return directly if we have none
let coeffs = self.coeffs.lock().unwrap();
if coeffs.is_empty() {
gst::trace!(CAT, imp = self, "No coefficients set -- passthrough");
gst_trace!(CAT, obj: element, "No coefficients set -- passthrough");
return Ok(gst::FlowSuccess::Ok);
}
// Try mapping the input buffer as writable
let mut data = buf.map_writable().map_err(|_| {
gst::element_imp_error!(
self,
gst_element_error!(
element,
gst::CoreError::Failed,
["Failed to map input buffer readable"]
);
@ -171,8 +218,8 @@ mod fir_filter {
// And reinterprete it as a slice of f32
let samples = data.as_mut_slice_of::<f32>().map_err(|err| {
gst::element_imp_error!(
self,
gst_element_error!(
element,
gst::CoreError::Failed,
["Failed to cast input buffer as f32 slice: {}", err]
);
@ -181,9 +228,9 @@ mod fir_filter {
let mut history = self.history.lock().unwrap();
gst::trace!(
gst_trace!(
CAT,
imp = self,
obj: element,
"Transforming {} samples with filter of length {}",
samples.len(),
coeffs.len()
@ -211,50 +258,75 @@ mod fir_filter {
// This here defines the public interface of our element and implements
// the corresponding traits so that it behaves like any other gst::Element
glib::wrapper! {
pub struct FirFilter(ObjectSubclass<imp::FirFilter>) @extends gst_base::BaseTransform, gst::Element, gst::Object;
glib_wrapper! {
pub struct FirFilter(
Object<
gst::subclass::ElementInstanceStruct<imp::FirFilter>,
subclass::simple::ClassStruct<imp::FirFilter>,
FirFilterClass
>
) @extends gst_base::BaseTransform, gst::Element, gst::Object;
match fn {
get_type => || imp::FirFilter::get_type().to_glib(),
}
}
// GStreamer elements must be Send+Sync, and ours is
unsafe impl Send for FirFilter {}
unsafe impl Sync for FirFilter {}
impl FirFilter {
// Creates a new instance of our filter with the given name
pub fn new(name: Option<&str>) -> FirFilter {
glib::Object::builder().property("name", name).build()
glib::Object::new(Self::static_type(), &[("name", &name)])
.expect("Failed to create fir filter")
.downcast()
.expect("Created fir filter is of wrong type")
}
// Sets the coefficients by getting access to the private
// struct and simply setting them
pub fn set_coeffs(&self, coeffs: Vec<f32>) {
let imp = self.imp();
let imp = imp::FirFilter::from_instance(self);
*imp.coeffs.lock().unwrap() = coeffs;
}
}
}
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
#[display(fmt = "Missing element {}", _0)]
struct MissingElement(#[error(not(source))] &'static str);
#[derive(Debug, Display, Error)]
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
struct ErrorMessage {
src: glib::GString,
error: glib::Error,
debug: Option<glib::GString>,
src: String,
error: String,
debug: Option<String>,
source: glib::Error,
}
fn create_pipeline() -> Result<gst::Pipeline, Error> {
gst::init()?;
// Create our pipeline with the custom element
let pipeline = gst::Pipeline::default();
let src = gst::ElementFactory::make("audiotestsrc")
.property_from_str("wave", "white-noise")
.build()?;
let pipeline = gst::Pipeline::new(None);
let src = gst::ElementFactory::make("audiotestsrc", None)
.map_err(|_| MissingElement("audiotestsrc"))?;
let filter = fir_filter::FirFilter::new(None);
let conv = gst::ElementFactory::make("audioconvert").build()?;
let sink = gst::ElementFactory::make("autoaudiosink").build()?;
let conv = gst::ElementFactory::make("audioconvert", None)
.map_err(|_| MissingElement("audioconvert"))?;
let sink = gst::ElementFactory::make("autoaudiosink", None)
.map_err(|_| MissingElement("autoaudiosink"))?;
pipeline.add_many([&src, filter.upcast_ref(), &conv, &sink])?;
pipeline.add_many(&[&src, filter.upcast_ref(), &conv, &sink])?;
src.link(&filter)?;
filter.link(&conv)?;
conv.link(&sink)?;
src.set_property_from_str("wave", "white-noise");
// Create a windowed sinc lowpass filter at 1/64 sample rate,
// i.e. 689Hz for 44.1kHz sample rate
let w = 2.0 * std::f32::consts::PI / 64.0;
@ -291,10 +363,10 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Playing)?;
let bus = pipeline
.bus()
.get_bus()
.expect("Pipeline without bus. Shouldn't happen!");
for msg in bus.iter_timed(gst::ClockTime::NONE) {
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
use gst::MessageView;
match msg.view() {
@ -303,11 +375,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
pipeline.set_state(gst::State::Null)?;
return Err(ErrorMessage {
src: msg
.src()
.map(|s| s.path_string())
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
error: err.error(),
debug: err.debug(),
.get_src()
.map(|s| String::from(s.get_path_string()))
.unwrap_or_else(|| String::from("None")),
error: err.get_error().to_string(),
debug: err.get_debug(),
source: err.get_error(),
}
.into());
}
@ -323,12 +396,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
fn example_main() {
match create_pipeline().and_then(main_loop) {
Ok(r) => r,
Err(e) => eprintln!("Error! {e}"),
Err(e) => eprintln!("Error! {}", e),
}
}
fn main() {
// tutorials_common::run is only required to set up the application environment on macOS
// (but not necessary in normal Cocoa applications where this is set up automatically)
// tutorials_common::run is only required to set up the application environent on macOS
// (but not necessary in normal Cocoa applications where this is set up autmatically)
examples_common::run(example_main);
}

Some files were not shown because too many files have changed in this diff Show more