mirror of
https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs.git
synced 2024-12-24 19:10:29 +00:00
Merge branch 'develop_rr_peite' into 'master'
Gst NDI plugin
This commit is contained in:
commit
47453f380d
9 changed files with 1524 additions and 1127 deletions
1
gst-plugin-ndi/.gitignore
vendored
Normal file
1
gst-plugin-ndi/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
Cargo.lock
|
275
gst-plugin-ndi/Cargo.lock
generated
275
gst-plugin-ndi/Cargo.lock
generated
|
@ -1,275 +0,0 @@
|
|||
[[package]]
|
||||
name = "array-init"
|
||||
version = "0.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "byte-slice-cast"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
version = "1.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "glib"
|
||||
version = "0.5.0"
|
||||
source = "git+https://github.com/gtk-rs/glib#cae39ff7a72073a553f5b60321bd9389db00ac3b"
|
||||
dependencies = [
|
||||
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gobject-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glib-sys"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/gtk-rs/sys#ce1fffe51a6498ac278502bf1afb18d711ad0250"
|
||||
dependencies = [
|
||||
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gobject-sys"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/gtk-rs/sys#ce1fffe51a6498ac278502bf1afb18d711ad0250"
|
||||
dependencies = [
|
||||
"glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gst-plugin"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/sdroege/gst-plugin-rs#f2f18ebb278f66f09995148cf9790ed469f6357f"
|
||||
dependencies = [
|
||||
"byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glib 0.5.0 (git+https://github.com/gtk-rs/glib)",
|
||||
"glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gobject-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gstreamer 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)",
|
||||
"gstreamer-base 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)",
|
||||
"gstreamer-base-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"gstreamer-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gst-plugin-ndi"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"byte-slice-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glib 0.5.0 (git+https://github.com/gtk-rs/glib)",
|
||||
"gst-plugin 0.1.0 (git+https://github.com/sdroege/gst-plugin-rs)",
|
||||
"gstreamer 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)",
|
||||
"gstreamer-audio 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)",
|
||||
"gstreamer-base 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)",
|
||||
"gstreamer-video 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)",
|
||||
"num-traits 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer"
|
||||
version = "0.12.0"
|
||||
source = "git+https://github.com/sdroege/gstreamer-rs#5dd17d6248b45fa2536f1864962908fb21b041e8"
|
||||
dependencies = [
|
||||
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glib 0.5.0 (git+https://github.com/gtk-rs/glib)",
|
||||
"glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gobject-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gstreamer-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"muldiv 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-rational 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-audio"
|
||||
version = "0.12.0"
|
||||
source = "git+https://github.com/sdroege/gstreamer-rs#5dd17d6248b45fa2536f1864962908fb21b041e8"
|
||||
dependencies = [
|
||||
"array-init 0.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glib 0.5.0 (git+https://github.com/gtk-rs/glib)",
|
||||
"glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gobject-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gstreamer 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)",
|
||||
"gstreamer-audio-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"gstreamer-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-audio-sys"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/sdroege/gstreamer-sys#a9a4608562bcd377f116d5d4806bff439eca5168"
|
||||
dependencies = [
|
||||
"glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gobject-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gstreamer-base-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"gstreamer-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-base"
|
||||
version = "0.12.0"
|
||||
source = "git+https://github.com/sdroege/gstreamer-rs#5dd17d6248b45fa2536f1864962908fb21b041e8"
|
||||
dependencies = [
|
||||
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glib 0.5.0 (git+https://github.com/gtk-rs/glib)",
|
||||
"glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gobject-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gstreamer 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)",
|
||||
"gstreamer-base-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"gstreamer-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-base-sys"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/sdroege/gstreamer-sys#a9a4608562bcd377f116d5d4806bff439eca5168"
|
||||
dependencies = [
|
||||
"glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gobject-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gstreamer-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-sys"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/sdroege/gstreamer-sys#a9a4608562bcd377f116d5d4806bff439eca5168"
|
||||
dependencies = [
|
||||
"glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gobject-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-video"
|
||||
version = "0.12.0"
|
||||
source = "git+https://github.com/sdroege/gstreamer-rs#5dd17d6248b45fa2536f1864962908fb21b041e8"
|
||||
dependencies = [
|
||||
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glib 0.5.0 (git+https://github.com/gtk-rs/glib)",
|
||||
"glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gobject-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gstreamer 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)",
|
||||
"gstreamer-base 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)",
|
||||
"gstreamer-base-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"gstreamer-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"gstreamer-video-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gstreamer-video-sys"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/sdroege/gstreamer-sys#a9a4608562bcd377f116d5d4806bff439eca5168"
|
||||
dependencies = [
|
||||
"glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gobject-sys 0.6.0 (git+https://github.com/gtk-rs/sys)",
|
||||
"gstreamer-base-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"gstreamer-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)",
|
||||
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "muldiv"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "nodrop"
|
||||
version = "0.1.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "num-integer"
|
||||
version = "0.1.36"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"num-traits 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-rational"
|
||||
version = "0.1.42"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"num-integer 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "pkg-config"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[metadata]
|
||||
"checksum array-init 0.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4d3b508d35216892b50a135fb52c9bb90f04a97b7782230805dff1a156ad5469"
|
||||
"checksum bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b3c30d3802dfb7281680d6285f2ccdaa8c2d8fee41f93805dba5c4cf50dc23cf"
|
||||
"checksum byte-slice-cast 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5a865e7bfa6c3b79216ccba767d4dc66e4f9f65f1ed4639e73faff3c4a2485d7"
|
||||
"checksum byteorder 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "73b5bdfe7ee3ad0b99c9801d58807a9dbc9e09196365b0203853b99889ab3c87"
|
||||
"checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de"
|
||||
"checksum glib 0.5.0 (git+https://github.com/gtk-rs/glib)" = "<none>"
|
||||
"checksum glib-sys 0.6.0 (git+https://github.com/gtk-rs/sys)" = "<none>"
|
||||
"checksum gobject-sys 0.6.0 (git+https://github.com/gtk-rs/sys)" = "<none>"
|
||||
"checksum gst-plugin 0.1.0 (git+https://github.com/sdroege/gst-plugin-rs)" = "<none>"
|
||||
"checksum gstreamer 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)" = "<none>"
|
||||
"checksum gstreamer-audio 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)" = "<none>"
|
||||
"checksum gstreamer-audio-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)" = "<none>"
|
||||
"checksum gstreamer-base 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)" = "<none>"
|
||||
"checksum gstreamer-base-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)" = "<none>"
|
||||
"checksum gstreamer-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)" = "<none>"
|
||||
"checksum gstreamer-video 0.12.0 (git+https://github.com/sdroege/gstreamer-rs)" = "<none>"
|
||||
"checksum gstreamer-video-sys 0.6.0 (git+https://github.com/sdroege/gstreamer-sys)" = "<none>"
|
||||
"checksum lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c8f31047daa365f19be14b47c29df4f7c3b581832407daabe6ae77397619237d"
|
||||
"checksum libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)" = "6fd41f331ac7c5b8ac259b8bf82c75c0fb2e469bbf37d2becbba9a6a2221965b"
|
||||
"checksum muldiv 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1cbef5aa2e8cd82a18cc20e26434cc9843e1ef46e55bfabe5bddb022236c5b3e"
|
||||
"checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2"
|
||||
"checksum num-integer 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f8d26da319fb45674985c78f1d1caf99aa4941f785d384a2ae36d0740bc3e2fe"
|
||||
"checksum num-rational 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "ee314c74bd753fc86b4780aa9475da469155f3848473a261d2d18e35245a784e"
|
||||
"checksum num-traits 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dee092fcdf725aee04dd7da1d21debff559237d49ef1cb3e69bcb8ece44c7364"
|
||||
"checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
|
|
@ -1,19 +1,19 @@
|
|||
[package]
|
||||
name = "gst-plugin-ndi"
|
||||
version = "0.1.0"
|
||||
authors = ["Ruben Gonzalez <rubenrua@teltek.es>"]
|
||||
authors = ["Ruben Gonzalez <rubenrua@teltek.es>", "Daniel Vilar <daniel.peiteado@teltek.es>"]
|
||||
repository = "https://gitlab.teltek.es/rubenrua/ndi-rs.git"
|
||||
license = "LGPL"
|
||||
|
||||
[dependencies]
|
||||
gst-plugin = { git = "https://github.com/sdroege/gst-plugin-rs" }
|
||||
glib = { git = "https://github.com/gtk-rs/glib" }
|
||||
gstreamer = { git = "https://github.com/sdroege/gstreamer-rs" }
|
||||
gstreamer-base = { git = "https://github.com/sdroege/gstreamer-rs" }
|
||||
gstreamer-video = { git = "https://github.com/sdroege/gstreamer-rs" }
|
||||
gstreamer-audio = { git = "https://github.com/sdroege/gstreamer-rs" }
|
||||
byte-slice-cast = "0.1" # TODO delete
|
||||
num-traits = "0.2" # TODO delete
|
||||
gobject-subclass = "0.2"
|
||||
gst-plugin = "0.3"
|
||||
glib = "0.6"
|
||||
gstreamer = "0.12"
|
||||
gstreamer-base = "0.12"
|
||||
gstreamer-video = "0.12"
|
||||
gstreamer-audio = "0.12"
|
||||
lazy_static = "1.1.0"
|
||||
|
||||
[lib]
|
||||
name = "gstndi"
|
||||
|
|
|
@ -1,17 +1,49 @@
|
|||
TODO
|
||||
====
|
||||
GStreamer NDI Plugin
|
||||
====================
|
||||
|
||||
See:
|
||||
*Compiled and tested with Ubuntu 16.04.5 and GStreamer 1.8.3*
|
||||
|
||||
https://coaxion.net/blog/2018/01/how-to-write-gstreamer-elements-in-rust-part-1-a-video-filter-for-converting-rgb-to-grayscale/
|
||||
https://coaxion.net/blog/2018/02/how-to-write-gstreamer-elements-in-rust-part-2-a-raw-audio-sine-wave-source/
|
||||
Before compile the element it's necessary install Rust, NDI SDK and the following packages for gstreamer:
|
||||
|
||||
```
|
||||
apt-get install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \
|
||||
gstreamer1.0-plugins-base gstreamer1.0-plugins-good \
|
||||
gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly \
|
||||
gstreamer1.0-libav libgstrtspserver-1.0-dev
|
||||
```
|
||||
|
||||
|
||||
Test
|
||||
Compile NDI element and basic pipelines
|
||||
-------
|
||||
|
||||
```
|
||||
cargo build
|
||||
export GST_PLUGIN_PATH=`pwd`/target/debug
|
||||
gst-inspect-1.0 ndisrc
|
||||
|
||||
gst-inspect-1.0 ndi
|
||||
gst-inspect-1.0 ndivideosrc
|
||||
gst-inspect-1.0 ndiaudiosrc
|
||||
|
||||
gst-launch-1.0 ndivideosrc stream-name="GC-DEV2 (OBS)" ! autovideosink
|
||||
gst-launch-1.0 ndiaudiosrc stream-name="GC-DEV2 (OBS)" ! autoaudiosink
|
||||
|
||||
gst-launch-1.0 ndivideosrc stream-name="GC-DEV2 (OBS)" ! autovideosink ndiaudiosrc stream-name="GC-DEV2 (OBS)" ! autoaudiosink
|
||||
|
||||
```
|
||||
|
||||
Debug pipelines:
|
||||
```
|
||||
#Check if the timestamps are correct
|
||||
gst-launch-1.0 -v ndivideosrc name=gc-ndi-src stream-name="GC-DEV2 (OBS)" ! fakesink silent=false
|
||||
|
||||
#Debug sink to check if jitter is correct
|
||||
GST_DEBUG=*basesink*:5 gst-launch-1.0 -v ndivideosrc name=gc-ndi-src stream-name="GC-DEV2 (OBS)" ! autovideosink
|
||||
|
||||
#Add latency when launching the pipeline
|
||||
gst-launch-1.0 -v ndivideosrc name=gc-ndi-src stream-name="GC-DEV2 (OBS)" ! autovideosink ts-offset=1000000000
|
||||
```
|
||||
|
||||
More info about GStreamer plugins and Rust:
|
||||
----------------------------------
|
||||
https://coaxion.net/blog/2018/01/how-to-write-gstreamer-elements-in-rust-part-1-a-video-filter-for-converting-rgb-to-grayscale/
|
||||
https://coaxion.net/blog/2018/02/how-to-write-gstreamer-elements-in-rust-part-2-a-raw-audio-sine-wave-source/
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#![allow(non_camel_case_types, non_upper_case_globals, non_snake_case)]
|
||||
|
||||
// Copyright (C) 2017 Sebastian Dröge <sebastian@centricular.com>
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
|
@ -7,6 +9,8 @@
|
|||
// except according to those terms.
|
||||
|
||||
extern crate glib;
|
||||
extern crate gobject_subclass;
|
||||
|
||||
#[macro_use]
|
||||
extern crate gst_plugin;
|
||||
#[macro_use]
|
||||
|
@ -15,18 +19,230 @@ extern crate gstreamer_audio as gst_audio;
|
|||
extern crate gstreamer_base as gst_base;
|
||||
extern crate gstreamer_video as gst_video;
|
||||
|
||||
extern crate byte_slice_cast;
|
||||
extern crate num_traits;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
|
||||
mod ndisrc;
|
||||
mod ndivideosrc;
|
||||
mod ndiaudiosrc;
|
||||
pub mod ndilib;
|
||||
|
||||
use std::{thread, time};
|
||||
use std::ffi::{CStr, CString};
|
||||
use ndilib::*;
|
||||
use gst_plugin::base_src::*;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use gst::GstObjectExt;
|
||||
|
||||
// Plugin entry point that should register all elements provided by this plugin,
|
||||
// and everything else that this plugin might provide (e.g. typefinders or device providers).
|
||||
fn plugin_init(plugin: &gst::Plugin) -> bool {
|
||||
ndisrc::register(plugin);
|
||||
ndivideosrc::register(plugin);
|
||||
ndiaudiosrc::register(plugin);
|
||||
true
|
||||
}
|
||||
|
||||
struct ndi_receiver_info{
|
||||
stream_name: String,
|
||||
ip: String,
|
||||
video: bool,
|
||||
audio: bool,
|
||||
ndi_instance: NdiInstance,
|
||||
id: i8,
|
||||
}
|
||||
|
||||
struct Ndi{
|
||||
initial_timestamp: u64,
|
||||
start_pts: gst::ClockTime,
|
||||
}
|
||||
|
||||
static mut ndi_struct: Ndi = Ndi{
|
||||
initial_timestamp: 0,
|
||||
start_pts: gst::ClockTime(Some(0)),
|
||||
};
|
||||
|
||||
lazy_static! {
|
||||
static ref hashmap_receivers: Mutex<HashMap<i8, ndi_receiver_info>> = {
|
||||
let m = HashMap::new();
|
||||
Mutex::new(m)
|
||||
};
|
||||
}
|
||||
|
||||
static mut id_receiver: i8 = 0;
|
||||
|
||||
fn connect_ndi(cat: gst::DebugCategory , element: &BaseSrc, ip: String, stream_name: String) -> i8{
|
||||
gst_debug!(cat, obj: element, "Starting NDI connection...");
|
||||
|
||||
let mut receivers = hashmap_receivers.lock().unwrap();
|
||||
let mut audio = false;
|
||||
let mut video = false;
|
||||
|
||||
//FIXME Search for another way to know if the source is an audio or a video source
|
||||
if element.get_name().contains("audiosrc"){
|
||||
audio = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
video = true;
|
||||
}
|
||||
|
||||
for val in receivers.values_mut(){
|
||||
if val.ip == ip || val.stream_name == stream_name{
|
||||
if (val.audio && val.video) || (val.audio && audio) || (val.video && video){
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
if video {
|
||||
val.video = video;
|
||||
}
|
||||
else{
|
||||
val.audio = audio;
|
||||
}
|
||||
return val.id;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
unsafe {
|
||||
|
||||
if !NDIlib_initialize() {
|
||||
gst_element_error!(element, gst::CoreError::Negotiation, ["Cannot run NDI: NDIlib_initialize error"]);
|
||||
// return false;
|
||||
return 0;
|
||||
}
|
||||
|
||||
//TODO default values
|
||||
let NDI_find_create_desc: NDIlib_find_create_t = Default::default();
|
||||
let pNDI_find = NDIlib_find_create_v2(&NDI_find_create_desc);
|
||||
//let ip_ptr = CString::new(ip.clone()).unwrap();
|
||||
if pNDI_find.is_null() {
|
||||
gst_element_error!(element, gst::CoreError::Negotiation, ["Cannot run NDI: NDIlib_find_create_v2 error"]);
|
||||
// return false;
|
||||
return 0;
|
||||
}
|
||||
|
||||
let mut total_sources: u32 = 0;
|
||||
let p_sources;
|
||||
|
||||
// TODO Sleep 1s to wait for all sources
|
||||
thread::sleep(time::Duration::from_millis(2000));
|
||||
p_sources = NDIlib_find_get_current_sources(pNDI_find, &mut total_sources as *mut u32);
|
||||
|
||||
// We need at least one source
|
||||
if p_sources.is_null() {
|
||||
gst_element_error!(element, gst::CoreError::Negotiation, ["Error getting NDIlib_find_get_current_sources"]);
|
||||
// return false;
|
||||
return 0;
|
||||
}
|
||||
|
||||
let mut no_source: isize = -1;
|
||||
for i in 0..total_sources as isize{
|
||||
if CStr::from_ptr((*p_sources.offset(i)).p_ndi_name).to_string_lossy().into_owned() == stream_name ||
|
||||
CStr::from_ptr((*p_sources.offset(i)).p_ip_address).to_string_lossy().into_owned() == ip{
|
||||
no_source = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if no_source == -1 {
|
||||
gst_element_error!(element, gst::ResourceError::OpenRead, ["Stream not found"]);
|
||||
// return false;
|
||||
return 0;
|
||||
}
|
||||
|
||||
gst_debug!(cat, obj: element, "Total sources in network {}: Connecting to NDI source with name '{}' and address '{}'", total_sources,
|
||||
CStr::from_ptr((*p_sources.offset(no_source)).p_ndi_name)
|
||||
.to_string_lossy()
|
||||
.into_owned(),
|
||||
CStr::from_ptr((*p_sources.offset(no_source)).p_ip_address)
|
||||
.to_string_lossy()
|
||||
.into_owned());
|
||||
|
||||
let source = *p_sources.offset(no_source).clone();
|
||||
|
||||
let source_ip = CStr::from_ptr(source.p_ip_address).to_string_lossy().into_owned();
|
||||
let source_name = CStr::from_ptr(source.p_ndi_name).to_string_lossy().into_owned();
|
||||
|
||||
// We now have at least one source, so we create a receiver to look at it.
|
||||
// We tell it that we prefer YCbCr video since it is more efficient for us. If the source has an alpha channel
|
||||
// it will still be provided in BGRA
|
||||
let p_ndi_name = CString::new("Galicaster NDI Receiver").unwrap();
|
||||
let NDI_recv_create_desc = NDIlib_recv_create_v3_t {
|
||||
source_to_connect_to: source,
|
||||
p_ndi_name: p_ndi_name.as_ptr(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let pNDI_recv = NDIlib_recv_create_v3(&NDI_recv_create_desc);
|
||||
if pNDI_recv.is_null() {
|
||||
//println!("Cannot run NDI: NDIlib_recv_create_v3 error.");
|
||||
gst_element_error!(element, gst::CoreError::Negotiation, ["Cannot run NDI: NDIlib_recv_create_v3 error"]);
|
||||
// return false;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Destroy the NDI finder. We needed to have access to the pointers to p_sources[0]
|
||||
NDIlib_find_destroy(pNDI_find);
|
||||
|
||||
// We are now going to mark this source as being on program output for tally purposes (but not on preview)
|
||||
let tally_state: NDIlib_tally_t = Default::default();
|
||||
NDIlib_recv_set_tally(pNDI_recv, &tally_state);
|
||||
|
||||
// Enable Hardware Decompression support if this support has it. Please read the caveats in the documentation
|
||||
// regarding this. There are times in which it might reduce the performance although on small stream numbers
|
||||
// it almost always yields the same or better performance.
|
||||
let data = CString::new("<ndi_hwaccel enabled=\"true\"/>").unwrap();
|
||||
let enable_hw_accel = NDIlib_metadata_frame_t {
|
||||
length: data.to_bytes().len() as i32,
|
||||
timecode: 0,
|
||||
p_data: data.as_ptr(),
|
||||
};
|
||||
|
||||
NDIlib_recv_send_metadata(pNDI_recv, &enable_hw_accel);
|
||||
|
||||
id_receiver += 1;
|
||||
receivers.insert(id_receiver, ndi_receiver_info{stream_name: source_name.clone(), ip: source_ip.clone(), video:video, audio: audio, ndi_instance: NdiInstance{recv: pNDI_recv}, id: id_receiver});
|
||||
|
||||
// let start = SystemTime::now();
|
||||
// let since_the_epoch = start.duration_since(UNIX_EPOCH)
|
||||
// .expect("Time went backwards");
|
||||
// println!("{:?}", since_the_epoch);
|
||||
// ndi_struct.start_pts = Some(since_the_epoch.as_secs() * 1000000000 +
|
||||
// since_the_epoch.subsec_nanos() as u64);
|
||||
gst_debug!(cat, obj: element, "Started NDI connection");
|
||||
return id_receiver;
|
||||
}
|
||||
}
|
||||
|
||||
fn stop_ndi(cat: gst::DebugCategory , element: &BaseSrc, id: i8) -> bool{
|
||||
gst_debug!(cat, obj: element, "Closing NDI connection...");
|
||||
let mut receivers = hashmap_receivers.lock().unwrap();
|
||||
{
|
||||
let val = receivers.get_mut(&id).unwrap();
|
||||
if val.video && val.audio{
|
||||
if element.get_name().contains("audiosrc"){
|
||||
val.audio = false;
|
||||
}
|
||||
else{
|
||||
val.video = false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
let recv = &val.ndi_instance;
|
||||
let pNDI_recv = recv.recv;
|
||||
unsafe{
|
||||
NDIlib_recv_destroy(pNDI_recv);
|
||||
// ndi_struct.recv = None;
|
||||
NDIlib_destroy();
|
||||
}
|
||||
}
|
||||
receivers.remove(&id);
|
||||
gst_debug!(cat, obj: element, "Closed NDI connection");
|
||||
return true;
|
||||
}
|
||||
|
||||
// Static plugin metdata that is directly stored in the plugin shared object and read by GStreamer
|
||||
// upon loading.
|
||||
// Plugin name, plugin description, plugin entry point function, version number of this plugin,
|
||||
|
|
490
gst-plugin-ndi/src/ndiaudiosrc.rs
Normal file
490
gst-plugin-ndi/src/ndiaudiosrc.rs
Normal file
|
@ -0,0 +1,490 @@
|
|||
#![allow(non_camel_case_types, non_upper_case_globals, non_snake_case)]
|
||||
|
||||
use glib;
|
||||
use gst;
|
||||
use gst::prelude::*;
|
||||
use gst_audio;
|
||||
use gst_base::prelude::*;
|
||||
|
||||
use gst_plugin::base_src::*;
|
||||
use gst_plugin::element::*;
|
||||
use gobject_subclass::object::*;
|
||||
|
||||
use std::sync::Mutex;
|
||||
use std::{i32, u32};
|
||||
|
||||
use std::ptr;
|
||||
|
||||
use ndilib::*;
|
||||
use connect_ndi;
|
||||
use stop_ndi;
|
||||
use ndi_struct;
|
||||
|
||||
use hashmap_receivers;
|
||||
|
||||
// Property value storage
|
||||
#[derive(Debug, Clone)]
|
||||
struct Settings {
|
||||
stream_name: String,
|
||||
ip: String,
|
||||
id_receiver: i8,
|
||||
}
|
||||
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Settings {
|
||||
stream_name: String::from("Fixed ndi stream name"),
|
||||
ip: String::from(""),
|
||||
id_receiver: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Metadata for the properties
|
||||
static PROPERTIES: [Property; 2] = [
|
||||
Property::String(
|
||||
"stream-name",
|
||||
"Sream Name",
|
||||
"Name of the streaming device",
|
||||
None,
|
||||
PropertyMutability::ReadWrite,
|
||||
),
|
||||
Property::String(
|
||||
"ip",
|
||||
"Stream IP",
|
||||
"Stream IP",
|
||||
None,
|
||||
PropertyMutability::ReadWrite,
|
||||
),
|
||||
];
|
||||
|
||||
// Stream-specific state, i.e. audio format configuration
|
||||
// and sample offset
|
||||
struct State {
|
||||
info: Option<gst_audio::AudioInfo>,
|
||||
}
|
||||
|
||||
impl Default for State {
|
||||
fn default() -> State {
|
||||
State {
|
||||
info: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct TimestampData{
|
||||
offset: u64,
|
||||
}
|
||||
|
||||
// Struct containing all the element data
|
||||
struct NdiAudioSrc {
|
||||
cat: gst::DebugCategory,
|
||||
settings: Mutex<Settings>,
|
||||
state: Mutex<State>,
|
||||
timestamp_data: Mutex<TimestampData>,
|
||||
}
|
||||
|
||||
impl NdiAudioSrc {
|
||||
// Called when a new instance is to be created
|
||||
fn new(element: &BaseSrc) -> Box<BaseSrcImpl<BaseSrc>> {
|
||||
// Initialize live-ness and notify the base class that
|
||||
// we'd like to operate in Time format
|
||||
element.set_live(true);
|
||||
element.set_format(gst::Format::Time);
|
||||
|
||||
Box::new(Self {
|
||||
cat: gst::DebugCategory::new(
|
||||
"ndiaudiosrc",
|
||||
gst::DebugColorFlags::empty(),
|
||||
"NewTek NDI Audio Source",
|
||||
),
|
||||
settings: Mutex::new(Default::default()),
|
||||
state: Mutex::new(Default::default()),
|
||||
timestamp_data: Mutex::new(TimestampData{
|
||||
offset: 0,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
// Called exactly once when registering the type. Used for
|
||||
// setting up metadata for all instances, e.g. the name and
|
||||
// classification and the pad templates with their caps.
|
||||
//
|
||||
// Actual instances can create pads based on those pad templates
|
||||
// with a subset of the caps given here. In case of basesrc,
|
||||
// a "src" and "sink" pad template are required here and the base class
|
||||
// will automatically instantiate pads for them.
|
||||
//
|
||||
// Our element here can output f32 and f64
|
||||
fn class_init(klass: &mut BaseSrcClass) {
|
||||
klass.set_metadata(
|
||||
"NewTek NDI Audio Source",
|
||||
"Source",
|
||||
"NewTek NDI audio source",
|
||||
"Ruben Gonzalez <rubenrua@teltek.es>, Daniel Vilar <daniel.peiteado@teltek.es>",
|
||||
);
|
||||
|
||||
// On the src pad, we can produce F32/F64 with any sample rate
|
||||
// and any number of channels
|
||||
let caps = gst::Caps::new_simple(
|
||||
"audio/x-raw",
|
||||
&[
|
||||
(
|
||||
"format",
|
||||
&gst::List::new(&[
|
||||
//TODO add all formats?
|
||||
&gst_audio::AUDIO_FORMAT_F32.to_string(),
|
||||
&gst_audio::AUDIO_FORMAT_F64.to_string(),
|
||||
&gst_audio::AUDIO_FORMAT_S16.to_string(),
|
||||
]),
|
||||
),
|
||||
("rate", &gst::IntRange::<i32>::new(1, i32::MAX)),
|
||||
("channels", &gst::IntRange::<i32>::new(1, i32::MAX)),
|
||||
("layout", &"interleaved"),
|
||||
],
|
||||
);
|
||||
// The src pad template must be named "src" for basesrc
|
||||
// and specific a pad that is always there
|
||||
let src_pad_template = gst::PadTemplate::new(
|
||||
"src",
|
||||
gst::PadDirection::Src,
|
||||
gst::PadPresence::Always,
|
||||
&caps,
|
||||
//&gst::Caps::new_any(),
|
||||
);
|
||||
klass.add_pad_template(src_pad_template);
|
||||
|
||||
// Install all our properties
|
||||
klass.install_properties(&PROPERTIES);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Virtual methods of GObject itself
|
||||
impl ObjectImpl<BaseSrc> for NdiAudioSrc {
|
||||
// Called whenever a value of a property is changed. It can be called
|
||||
// at any time from any thread.
|
||||
fn set_property(&self, obj: &glib::Object, id: u32, value: &glib::Value) {
|
||||
let prop = &PROPERTIES[id as usize];
|
||||
let element = obj.clone().downcast::<BaseSrc>().unwrap();
|
||||
|
||||
match *prop {
|
||||
Property::String("stream-name", ..) => {
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
let stream_name = value.get().unwrap();
|
||||
gst_debug!(
|
||||
self.cat,
|
||||
obj: &element,
|
||||
"Changing stream-name from {} to {}",
|
||||
settings.stream_name,
|
||||
stream_name
|
||||
);
|
||||
settings.stream_name = stream_name;
|
||||
drop(settings);
|
||||
|
||||
let _ =
|
||||
element.post_message(&gst::Message::new_latency().src(Some(&element)).build());
|
||||
},
|
||||
Property::String("ip", ..) => {
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
let ip = value.get().unwrap();
|
||||
gst_debug!(
|
||||
self.cat,
|
||||
obj: &element,
|
||||
"Changing ip from {} to {}",
|
||||
settings.ip,
|
||||
ip
|
||||
);
|
||||
settings.ip = ip;
|
||||
drop(settings);
|
||||
|
||||
let _ =
|
||||
element.post_message(&gst::Message::new_latency().src(Some(&element)).build());
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
// Called whenever a value of a property is read. It can be called
|
||||
// at any time from any thread.
|
||||
fn get_property(&self, _obj: &glib::Object, id: u32) -> Result<glib::Value, ()> {
|
||||
let prop = &PROPERTIES[id as usize];
|
||||
|
||||
match *prop {
|
||||
Property::String("stream-name", ..) => {
|
||||
let settings = self.settings.lock().unwrap();
|
||||
//TODO to_value supongo que solo funciona con numeros
|
||||
Ok(settings.stream_name.to_value())
|
||||
},
|
||||
Property::String("ip", ..) => {
|
||||
let settings = self.settings.lock().unwrap();
|
||||
//TODO to_value supongo que solo funciona con numeros
|
||||
Ok(settings.ip.to_value())
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Virtual methods of gst::Element. We override none
|
||||
impl ElementImpl<BaseSrc> for NdiAudioSrc {
|
||||
fn change_state(&self, element: &BaseSrc, transition: gst::StateChange) -> gst::StateChangeReturn {
|
||||
if transition == gst::StateChange::PausedToPlaying{
|
||||
let receivers = hashmap_receivers.lock().unwrap();
|
||||
let settings = self.settings.lock().unwrap();
|
||||
|
||||
let receiver = receivers.get(&settings.id_receiver).unwrap();
|
||||
let recv = &receiver.ndi_instance;
|
||||
let pNDI_recv = recv.recv;
|
||||
|
||||
let audio_frame: NDIlib_audio_frame_v2_t = Default::default();
|
||||
|
||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
||||
unsafe{
|
||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_audio{
|
||||
frame_type = NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000);
|
||||
}
|
||||
|
||||
if ndi_struct.initial_timestamp <= audio_frame.timestamp as u64 || ndi_struct.initial_timestamp == 0{
|
||||
ndi_struct.initial_timestamp = audio_frame.timestamp as u64;
|
||||
}
|
||||
}
|
||||
}
|
||||
element.parent_change_state(transition)
|
||||
}
|
||||
}
|
||||
|
||||
// Virtual methods of gst_base::BaseSrc
|
||||
impl BaseSrcImpl<BaseSrc> for NdiAudioSrc {
|
||||
// Called whenever the input/output caps are changing, i.e. in the very beginning before data
|
||||
// flow happens and whenever the situation in the pipeline is changing. All buffers after this
|
||||
// call have the caps given here.
|
||||
//
|
||||
// We simply remember the resulting AudioInfo from the caps to be able to use this for knowing
|
||||
// the sample rate, etc. when creating buffers
|
||||
fn set_caps(&self, element: &BaseSrc, caps: &gst::CapsRef) -> bool {
|
||||
|
||||
let info = match gst_audio::AudioInfo::from_caps(caps) {
|
||||
None => return false,
|
||||
Some(info) => info,
|
||||
};
|
||||
|
||||
gst_debug!(self.cat, obj: element, "Configuring for caps {}", caps);
|
||||
|
||||
// TODO Puede que falle si no creamos la estructura de cero, pero si lo hacemos no podemos poner recv a none
|
||||
let mut state = self.state.lock().unwrap();
|
||||
state.info = Some(info);
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
// Called when starting, so we can initialize all stream-related state to its defaults
|
||||
fn start(&self, element: &BaseSrc) -> bool {
|
||||
// Reset state
|
||||
*self.state.lock().unwrap() = Default::default();
|
||||
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
settings.id_receiver = connect_ndi(self.cat, element, settings.ip.clone(), settings.stream_name.clone());
|
||||
if settings.id_receiver == 0{
|
||||
return false;
|
||||
}
|
||||
else{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Called when shutting down the element so we can release all stream-related state
|
||||
fn stop(&self, element: &BaseSrc) -> bool {
|
||||
// Reset state
|
||||
*self.state.lock().unwrap() = Default::default();
|
||||
|
||||
let settings = self.settings.lock().unwrap();
|
||||
stop_ndi(self.cat, element, settings.id_receiver.clone());
|
||||
// Commented because when adding ndi destroy stopped in this line
|
||||
//*self.state.lock().unwrap() = Default::default();
|
||||
true
|
||||
}
|
||||
|
||||
fn query(&self, element: &BaseSrc, query: &mut gst::QueryRef) -> bool {
|
||||
use gst::QueryView;
|
||||
|
||||
match query.view_mut() {
|
||||
// We only work in Push mode. In Pull mode, create() could be called with
|
||||
// arbitrary offsets and we would have to produce for that specific offset
|
||||
QueryView::Scheduling(ref mut q) => {
|
||||
q.set(gst::SchedulingFlags::SEQUENTIAL, 1, -1, 0);
|
||||
q.add_scheduling_modes(&[gst::PadMode::Push]);
|
||||
return true;
|
||||
}
|
||||
// In Live mode we will have a latency equal to the number of samples in each buffer.
|
||||
// We can't output samples before they were produced, and the last sample of a buffer
|
||||
// is produced that much after the beginning, leading to this latency calculation
|
||||
// QueryView::Latency(ref mut q) => {
|
||||
// let settings = &*self.settings.lock().unwrap();
|
||||
// let state = self.state.lock().unwrap();
|
||||
//
|
||||
// if let Some(ref _info) = state.info {
|
||||
// // let latency = gst::SECOND
|
||||
// // .mul_div_floor(1024 as u64, _info.rate() as u64)
|
||||
// // .unwrap();
|
||||
// let latency = gst::SECOND.mul_div_floor(3 as u64, 2 as u64).unwrap();
|
||||
// // let latency = gst::SECOND
|
||||
// // .mul_div_floor(1 as u64, 30 as u64)
|
||||
// // .unwrap();
|
||||
// // gst_debug!(self.cat, obj: element, "Returning latency {}", latency);
|
||||
// let max = latency * 1843200;
|
||||
// // println!("{:?}", latency);
|
||||
// // println!("{:?}",max);
|
||||
// q.set(true, latency, max);
|
||||
// return true;
|
||||
// } else {
|
||||
// return false;
|
||||
// }
|
||||
// }
|
||||
_ => (),
|
||||
}
|
||||
BaseSrcBase::parent_query(element, query)
|
||||
}
|
||||
|
||||
fn fixate(&self, element: &BaseSrc, caps: gst::Caps) -> gst::Caps {
|
||||
//We need to set the correct caps resolution and framerate
|
||||
let receivers = hashmap_receivers.lock().unwrap();
|
||||
let settings = self.settings.lock().unwrap();
|
||||
|
||||
let receiver = receivers.get(&settings.id_receiver).unwrap();
|
||||
|
||||
let recv = &receiver.ndi_instance;
|
||||
let pNDI_recv = recv.recv;
|
||||
|
||||
let audio_frame: NDIlib_audio_frame_v2_t = Default::default();
|
||||
|
||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_audio{
|
||||
unsafe{
|
||||
frame_type = NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000);
|
||||
}
|
||||
}
|
||||
let mut caps = gst::Caps::truncate(caps);
|
||||
{
|
||||
let caps = caps.make_mut();
|
||||
let s = caps.get_mut_structure(0).unwrap();
|
||||
//s.fixate_field_nearest_int("rate", audio_frame.sample_rate);
|
||||
s.fixate_field_nearest_int("rate", audio_frame.sample_rate / audio_frame.no_channels);
|
||||
s.fixate_field_nearest_int("channels", audio_frame.no_channels);
|
||||
}
|
||||
|
||||
// Let BaseSrc fixate anything else for us. We could've alternatively have
|
||||
// called Caps::fixate() here
|
||||
element.parent_fixate(caps)
|
||||
// }
|
||||
}
|
||||
|
||||
//Creates the audio buffers
|
||||
fn create(
|
||||
&self,
|
||||
element: &BaseSrc,
|
||||
_offset: u64,
|
||||
_length: u32,
|
||||
) -> Result<gst::Buffer, gst::FlowReturn> {
|
||||
// Keep a local copy of the values of all our properties at this very moment. This
|
||||
// ensures that the mutex is never locked for long and the application wouldn't
|
||||
// have to block until this function returns when getting/setting property values
|
||||
let _settings = &*self.settings.lock().unwrap();
|
||||
|
||||
let mut timestamp_data = self.timestamp_data.lock().unwrap();
|
||||
// Get a locked reference to our state, i.e. the input and output AudioInfo
|
||||
let state = self.state.lock().unwrap();
|
||||
let _info = match state.info {
|
||||
None => {
|
||||
gst_element_error!(element, gst::CoreError::Negotiation, ["Have no caps yet"]);
|
||||
return Err(gst::FlowReturn::NotNegotiated);
|
||||
}
|
||||
Some(ref info) => info.clone(),
|
||||
};
|
||||
let receivers = hashmap_receivers.lock().unwrap();
|
||||
|
||||
let recv = &receivers.get(&_settings.id_receiver).unwrap().ndi_instance;
|
||||
let pNDI_recv = recv.recv;
|
||||
|
||||
let pts: u64;
|
||||
let audio_frame: NDIlib_audio_frame_v2_t = Default::default();
|
||||
|
||||
unsafe{
|
||||
let time = ndi_struct.initial_timestamp;
|
||||
|
||||
let mut skip_frame = true;
|
||||
while skip_frame {
|
||||
let frame_type = NDIlib_recv_capture_v2(pNDI_recv, ptr::null(), &audio_frame, ptr::null(), 1000,);
|
||||
if frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none || frame_type == NDIlib_frame_type_e::NDIlib_frame_type_error {
|
||||
gst_element_error!(element, gst::ResourceError::Read, ["NDI frame type none received, assuming that the source closed the stream...."]);
|
||||
return Err(gst::FlowReturn::CustomError);
|
||||
}
|
||||
if time >= (audio_frame.timestamp as u64){
|
||||
gst_debug!(self.cat, obj: element, "Frame timestamp ({:?}) is lower than received in the first frame from NDI ({:?}), so skiping...", (audio_frame.timestamp as u64), time);
|
||||
}
|
||||
else{
|
||||
skip_frame = false;
|
||||
}
|
||||
}
|
||||
|
||||
pts = audio_frame.timestamp as u64 - time;
|
||||
|
||||
let buff_size = ((audio_frame.channel_stride_in_bytes)) as usize;
|
||||
let mut buffer = gst::Buffer::with_size(buff_size).unwrap();
|
||||
{
|
||||
let vec = Vec::from_raw_parts(audio_frame.p_data as *mut u8, buff_size, buff_size);
|
||||
let pts: gst::ClockTime = (pts * 100).into();
|
||||
|
||||
let duration: gst::ClockTime = (((audio_frame.no_samples as f64 / audio_frame.sample_rate as f64) * 1000000000.0) as u64).into();
|
||||
let buffer = buffer.get_mut().unwrap();
|
||||
|
||||
if ndi_struct.start_pts == gst::ClockTime(Some(0)){
|
||||
ndi_struct.start_pts = element.get_clock().unwrap().get_time() - element.get_base_time();
|
||||
}
|
||||
|
||||
buffer.set_pts(pts + ndi_struct.start_pts);
|
||||
buffer.set_duration(duration);
|
||||
buffer.set_offset(timestamp_data.offset);
|
||||
buffer.set_offset_end(timestamp_data.offset + 1);
|
||||
timestamp_data.offset = timestamp_data.offset + 1;
|
||||
buffer.copy_from_slice(0, &vec).unwrap();
|
||||
}
|
||||
|
||||
gst_debug!(self.cat, obj: element, "Produced buffer {:?}", buffer);
|
||||
|
||||
Ok(buffer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This zero-sized struct is containing the static metadata of our element. It is only necessary to
|
||||
// be able to implement traits on it, but e.g. a plugin that registers multiple elements with the
|
||||
// same code would use this struct to store information about the concrete element. An example of
|
||||
// this would be a plugin that wraps around a library that has multiple decoders with the same API,
|
||||
// but wants (as it should) a separate element registered for each decoder.
|
||||
struct NdiAudioSrcStatic;
|
||||
|
||||
// The basic trait for registering the type: This returns a name for the type and registers the
|
||||
// instance and class initializations functions with the type system, thus hooking everything
|
||||
// together.
|
||||
impl ImplTypeStatic<BaseSrc> for NdiAudioSrcStatic {
|
||||
fn get_name(&self) -> &str {
|
||||
"NdiAudioSrc"
|
||||
}
|
||||
|
||||
fn new(&self, element: &BaseSrc) -> Box<BaseSrcImpl<BaseSrc>> {
|
||||
NdiAudioSrc::new(element)
|
||||
}
|
||||
|
||||
fn class_init(&self, klass: &mut BaseSrcClass) {
|
||||
NdiAudioSrc::class_init(klass);
|
||||
}
|
||||
}
|
||||
|
||||
// Registers the type for our element, and then registers in GStreamer under
|
||||
// the name NdiAudioSrc for being able to instantiate it via e.g.
|
||||
// gst::ElementFactory::make().
|
||||
pub fn register(plugin: &gst::Plugin) {
|
||||
let type_ = register_type(NdiAudioSrcStatic);
|
||||
gst::Element::register(plugin, "ndiaudiosrc", 0, type_);
|
||||
}
|
257
gst-plugin-ndi/src/ndilib.rs
Normal file
257
gst-plugin-ndi/src/ndilib.rs
Normal file
|
@ -0,0 +1,257 @@
|
|||
#![allow(non_camel_case_types, non_upper_case_globals, non_snake_case)]
|
||||
|
||||
use std::ptr;
|
||||
|
||||
#[link(name = "ndi")]
|
||||
extern "C" {
|
||||
pub fn NDIlib_initialize() -> bool;
|
||||
pub fn NDIlib_find_create_v2(
|
||||
p_create_settings: *const NDIlib_find_create_t,
|
||||
) -> NDIlib_find_instance_t;
|
||||
pub fn NDIlib_find_get_current_sources(
|
||||
p_instance: NDIlib_find_instance_t,
|
||||
p_no_sources: *mut u32,
|
||||
) -> *const NDIlib_source_t;
|
||||
pub fn NDIlib_recv_create_v3(
|
||||
p_create_settings: *const NDIlib_recv_create_v3_t,
|
||||
) -> NDIlib_recv_instance_t;
|
||||
pub fn NDIlib_find_destroy(p_instance: NDIlib_recv_instance_t);
|
||||
pub fn NDIlib_recv_destroy(p_instance: NDIlib_recv_instance_t);
|
||||
pub fn NDIlib_destroy();
|
||||
pub fn NDIlib_recv_set_tally(
|
||||
p_instance: NDIlib_recv_instance_t,
|
||||
p_tally: *const NDIlib_tally_t,
|
||||
) -> bool;
|
||||
pub fn NDIlib_recv_send_metadata(
|
||||
p_instance: NDIlib_recv_instance_t,
|
||||
p_metadata: *const NDIlib_metadata_frame_t,
|
||||
) -> bool;
|
||||
pub fn NDIlib_recv_capture_v2(
|
||||
p_instance: NDIlib_recv_instance_t,
|
||||
p_video_data: *const NDIlib_video_frame_v2_t,
|
||||
p_audio_data: *const NDIlib_audio_frame_v2_t,
|
||||
p_metadata: *const NDIlib_metadata_frame_t,
|
||||
timeout_in_ms: u32,
|
||||
) -> NDIlib_frame_type_e;
|
||||
}
|
||||
|
||||
pub type NDIlib_find_instance_t = *mut ::std::os::raw::c_void;
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NDIlib_find_create_t {
|
||||
pub show_local_sources: bool,
|
||||
pub p_groups: *const ::std::os::raw::c_char,
|
||||
pub p_extra_ips: *const ::std::os::raw::c_char,
|
||||
}
|
||||
|
||||
impl Default for NDIlib_find_create_t {
|
||||
fn default() -> Self {
|
||||
NDIlib_find_create_t {
|
||||
show_local_sources: true,
|
||||
p_groups: ptr::null(),
|
||||
p_extra_ips: ptr::null(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NDIlib_source_t {
|
||||
pub p_ndi_name: *const ::std::os::raw::c_char,
|
||||
pub p_ip_address: *const ::std::os::raw::c_char,
|
||||
}
|
||||
|
||||
impl Default for NDIlib_source_t {
|
||||
fn default() -> Self {
|
||||
NDIlib_source_t {
|
||||
p_ndi_name: ptr::null(),
|
||||
p_ip_address: ptr::null(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(i32)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum NDIlib_frame_type_e {
|
||||
NDIlib_frame_type_none = 0,
|
||||
NDIlib_frame_type_video = 1,
|
||||
NDIlib_frame_type_audio = 2,
|
||||
NDIlib_frame_type_metadata = 3,
|
||||
NDIlib_frame_type_error = 4,
|
||||
NDIlib_frame_type_status_change = 100,
|
||||
}
|
||||
|
||||
#[repr(i32)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum NDIlib_recv_bandwidth_e {
|
||||
NDIlib_recv_bandwidth_metadata_only = -10,
|
||||
NDIlib_recv_bandwidth_audio_only = 10,
|
||||
NDIlib_recv_bandwidth_lowest = 0,
|
||||
NDIlib_recv_bandwidth_highest = 100,
|
||||
}
|
||||
|
||||
#[repr(u32)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum NDIlib_recv_color_format_e {
|
||||
NDIlib_recv_color_format_BGRX_BGRA = 0,
|
||||
NDIlib_recv_color_format_UYVY_BGRA = 1,
|
||||
NDIlib_recv_color_format_RGBX_RGBA = 2,
|
||||
NDIlib_recv_color_format_UYVY_RGBA = 3,
|
||||
NDIlib_recv_color_format_fastest = 100,
|
||||
}
|
||||
|
||||
#[repr(u32)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum NDIlib_FourCC_type_e {
|
||||
NDIlib_FourCC_type_UYVY = 1498831189,
|
||||
NDIlib_FourCC_type_BGRA = 1095911234,
|
||||
NDIlib_FourCC_type_BGRX = 1481787202,
|
||||
NDIlib_FourCC_type_RGBA = 1094862674,
|
||||
NDIlib_FourCC_type_RGBX = 1480738642,
|
||||
NDIlib_FourCC_type_UYVA = 1096178005,
|
||||
}
|
||||
|
||||
#[repr(u32)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum NDIlib_frame_format_type_e {
|
||||
NDIlib_frame_format_type_progressive = 1,
|
||||
NDIlib_frame_format_type_interleaved = 0,
|
||||
NDIlib_frame_format_type_field_0 = 2,
|
||||
NDIlib_frame_format_type_field_1 = 3,
|
||||
}
|
||||
|
||||
pub const NDIlib_send_timecode_synthesize: i64 = ::std::i64::MAX;
|
||||
pub const NDIlib_send_timecode_empty: i64 = 0;
|
||||
pub const NDIlib_recv_timestamp_undefined: i64 = ::std::i64::MAX;
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NDIlib_recv_create_v3_t {
|
||||
pub source_to_connect_to: NDIlib_source_t,
|
||||
pub color_format: NDIlib_recv_color_format_e,
|
||||
pub bandwidth: NDIlib_recv_bandwidth_e,
|
||||
pub allow_video_fields: bool,
|
||||
pub p_ndi_name: *const ::std::os::raw::c_char,
|
||||
}
|
||||
|
||||
impl Default for NDIlib_recv_create_v3_t {
|
||||
fn default() -> Self {
|
||||
NDIlib_recv_create_v3_t {
|
||||
source_to_connect_to: Default::default(),
|
||||
allow_video_fields: true,
|
||||
bandwidth: NDIlib_recv_bandwidth_e::NDIlib_recv_bandwidth_highest,
|
||||
color_format: NDIlib_recv_color_format_e::NDIlib_recv_color_format_UYVY_BGRA,
|
||||
p_ndi_name: ptr::null(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type NDIlib_recv_instance_t = *mut ::std::os::raw::c_void;
|
||||
|
||||
//Rust wrapper around *mut ::std::os::raw::c_void
|
||||
pub struct NdiInstance {
|
||||
pub recv: NDIlib_recv_instance_t,
|
||||
// pub audio: bool,
|
||||
}
|
||||
|
||||
unsafe impl ::std::marker::Send for NdiInstance {}
|
||||
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NDIlib_tally_t {
|
||||
pub on_program: bool,
|
||||
pub on_preview: bool,
|
||||
}
|
||||
|
||||
impl Default for NDIlib_tally_t {
|
||||
fn default() -> Self {
|
||||
NDIlib_tally_t {
|
||||
on_program: false,
|
||||
on_preview: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NDIlib_metadata_frame_t {
|
||||
pub length: ::std::os::raw::c_int,
|
||||
pub timecode: i64,
|
||||
pub p_data: *const ::std::os::raw::c_char,
|
||||
}
|
||||
|
||||
impl Default for NDIlib_metadata_frame_t {
|
||||
fn default() -> Self {
|
||||
NDIlib_metadata_frame_t {
|
||||
length: 0,
|
||||
timecode: 0, //NDIlib_send_timecode_synthesize,
|
||||
p_data: ptr::null(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NDIlib_video_frame_v2_t {
|
||||
pub xres: ::std::os::raw::c_int,
|
||||
pub yres: ::std::os::raw::c_int,
|
||||
pub FourCC: NDIlib_FourCC_type_e,
|
||||
pub frame_rate_N: ::std::os::raw::c_int,
|
||||
pub frame_rate_D: ::std::os::raw::c_int,
|
||||
pub picture_aspect_ratio: ::std::os::raw::c_float,
|
||||
pub frame_format_type: NDIlib_frame_format_type_e,
|
||||
pub timecode: i64,
|
||||
pub p_data: *const ::std::os::raw::c_char,
|
||||
pub line_stride_in_bytes: ::std::os::raw::c_int,
|
||||
pub p_metadata: *const ::std::os::raw::c_char,
|
||||
pub timestamp: i64,
|
||||
}
|
||||
|
||||
impl Default for NDIlib_video_frame_v2_t {
|
||||
fn default() -> Self {
|
||||
NDIlib_video_frame_v2_t {
|
||||
xres: 0,
|
||||
yres: 0,
|
||||
FourCC: NDIlib_FourCC_type_e::NDIlib_FourCC_type_UYVY,
|
||||
frame_rate_N: 30000,
|
||||
frame_rate_D: 1001,
|
||||
picture_aspect_ratio: 0.0,
|
||||
frame_format_type: NDIlib_frame_format_type_e::NDIlib_frame_format_type_progressive,
|
||||
timecode: NDIlib_send_timecode_synthesize,
|
||||
p_data: ptr::null(),
|
||||
line_stride_in_bytes: 0,
|
||||
p_metadata: ptr::null(),
|
||||
timestamp: NDIlib_send_timecode_empty,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NDIlib_audio_frame_v2_t {
|
||||
pub sample_rate: ::std::os::raw::c_int,
|
||||
pub no_channels: ::std::os::raw::c_int,
|
||||
pub no_samples: ::std::os::raw::c_int,
|
||||
pub timecode: i64,
|
||||
pub p_data: *const ::std::os::raw::c_float,
|
||||
pub channel_stride_in_bytes: ::std::os::raw::c_int,
|
||||
pub p_metadata: *const ::std::os::raw::c_char,
|
||||
pub timestamp: i64,
|
||||
}
|
||||
|
||||
impl Default for NDIlib_audio_frame_v2_t {
|
||||
fn default() -> Self {
|
||||
NDIlib_audio_frame_v2_t {
|
||||
sample_rate: 48000,
|
||||
no_channels: 2,
|
||||
no_samples: 0,
|
||||
timecode: NDIlib_send_timecode_synthesize,
|
||||
p_data: ptr::null(),
|
||||
channel_stride_in_bytes: 0,
|
||||
p_metadata: ptr::null(),
|
||||
timestamp: NDIlib_send_timecode_empty,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,832 +0,0 @@
|
|||
// Copyright (C) 2018 Sebastian Dröge <sebastian@centricular.com>
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use glib;
|
||||
use gst;
|
||||
use gst::prelude::*;
|
||||
use gst_audio;
|
||||
use gst_base::prelude::*;
|
||||
|
||||
use byte_slice_cast::*;
|
||||
|
||||
use gst_plugin::base_src::*;
|
||||
use gst_plugin::element::*;
|
||||
use gst_plugin::object::*;
|
||||
use gst_plugin::properties::*;
|
||||
|
||||
use std::ops::Rem;
|
||||
use std::sync::Mutex;
|
||||
use std::{i32, u32};
|
||||
|
||||
use num_traits::cast::NumCast;
|
||||
use num_traits::float::Float;
|
||||
|
||||
// Default values of properties
|
||||
const DEFAULT_SAMPLES_PER_BUFFER: u32 = 1024;
|
||||
const DEFAULT_FREQ: u32 = 440;
|
||||
const DEFAULT_VOLUME: f64 = 0.8;
|
||||
const DEFAULT_MUTE: bool = false;
|
||||
const DEFAULT_IS_LIVE: bool = false;
|
||||
|
||||
// Property value storage
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
struct Settings {
|
||||
samples_per_buffer: u32,
|
||||
freq: u32,
|
||||
volume: f64,
|
||||
mute: bool,
|
||||
is_live: bool,
|
||||
}
|
||||
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Settings {
|
||||
samples_per_buffer: DEFAULT_SAMPLES_PER_BUFFER,
|
||||
freq: DEFAULT_FREQ,
|
||||
volume: DEFAULT_VOLUME,
|
||||
mute: DEFAULT_MUTE,
|
||||
is_live: DEFAULT_IS_LIVE,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Metadata for the properties
|
||||
static PROPERTIES: [Property; 5] = [
|
||||
Property::UInt(
|
||||
"samples-per-buffer",
|
||||
"Samples Per Buffer",
|
||||
"Number of samples per output buffer",
|
||||
(1, u32::MAX),
|
||||
DEFAULT_SAMPLES_PER_BUFFER,
|
||||
PropertyMutability::ReadWrite,
|
||||
),
|
||||
Property::UInt(
|
||||
"freq",
|
||||
"Frequency",
|
||||
"Frequency",
|
||||
(1, u32::MAX),
|
||||
DEFAULT_FREQ,
|
||||
PropertyMutability::ReadWrite,
|
||||
),
|
||||
Property::Double(
|
||||
"volume",
|
||||
"Volume",
|
||||
"Output volume",
|
||||
(0.0, 10.0),
|
||||
DEFAULT_VOLUME,
|
||||
PropertyMutability::ReadWrite,
|
||||
),
|
||||
Property::Boolean(
|
||||
"mute",
|
||||
"Mute",
|
||||
"Mute",
|
||||
DEFAULT_MUTE,
|
||||
PropertyMutability::ReadWrite,
|
||||
),
|
||||
Property::Boolean(
|
||||
"is-live",
|
||||
"Is Live",
|
||||
"(Pseudo) live output",
|
||||
DEFAULT_IS_LIVE,
|
||||
PropertyMutability::ReadWrite,
|
||||
),
|
||||
];
|
||||
|
||||
// Stream-specific state, i.e. audio format configuration
|
||||
// and sample offset
|
||||
struct State {
|
||||
info: Option<gst_audio::AudioInfo>,
|
||||
sample_offset: u64,
|
||||
sample_stop: Option<u64>,
|
||||
accumulator: f64,
|
||||
}
|
||||
|
||||
impl Default for State {
|
||||
fn default() -> State {
|
||||
State {
|
||||
info: None,
|
||||
sample_offset: 0,
|
||||
sample_stop: None,
|
||||
accumulator: 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ClockWait {
|
||||
clock_id: Option<gst::ClockId>,
|
||||
flushing: bool,
|
||||
}
|
||||
|
||||
// Struct containing all the element data
|
||||
struct NdiSrc {
|
||||
cat: gst::DebugCategory,
|
||||
settings: Mutex<Settings>,
|
||||
state: Mutex<State>,
|
||||
clock_wait: Mutex<ClockWait>,
|
||||
}
|
||||
|
||||
impl NdiSrc {
|
||||
// Called when a new instance is to be created
|
||||
fn new(element: &BaseSrc) -> Box<BaseSrcImpl<BaseSrc>> {
|
||||
// Initialize live-ness and notify the base class that
|
||||
// we'd like to operate in Time format
|
||||
element.set_live(DEFAULT_IS_LIVE);
|
||||
element.set_format(gst::Format::Time);
|
||||
|
||||
Box::new(Self {
|
||||
cat: gst::DebugCategory::new(
|
||||
"ndisrc",
|
||||
gst::DebugColorFlags::empty(),
|
||||
"NewTek NDI Source",
|
||||
),
|
||||
settings: Mutex::new(Default::default()),
|
||||
state: Mutex::new(Default::default()),
|
||||
clock_wait: Mutex::new(ClockWait {
|
||||
clock_id: None,
|
||||
flushing: true,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
// Called exactly once when registering the type. Used for
|
||||
// setting up metadata for all instances, e.g. the name and
|
||||
// classification and the pad templates with their caps.
|
||||
//
|
||||
// Actual instances can create pads based on those pad templates
|
||||
// with a subset of the caps given here. In case of basesrc,
|
||||
// a "src" and "sink" pad template are required here and the base class
|
||||
// will automatically instantiate pads for them.
|
||||
//
|
||||
// Our element here can output f32 and f64
|
||||
fn class_init(klass: &mut BaseSrcClass) {
|
||||
klass.set_metadata(
|
||||
"NewTek NDI Source",
|
||||
"Source",
|
||||
"NewTek NDI video/audio source",
|
||||
"Ruben Gonzalez <rubenrua@teltek.es>",
|
||||
);
|
||||
|
||||
// On the src pad, we can produce F32/F64 with any sample rate
|
||||
// and any number of channels
|
||||
let caps = gst::Caps::new_simple(
|
||||
"audio/x-raw",
|
||||
&[
|
||||
(
|
||||
"format",
|
||||
&gst::List::new(&[
|
||||
&gst_audio::AUDIO_FORMAT_F32.to_string(),
|
||||
&gst_audio::AUDIO_FORMAT_F64.to_string(),
|
||||
]),
|
||||
),
|
||||
("layout", &"interleaved"),
|
||||
("rate", &gst::IntRange::<i32>::new(1, i32::MAX)),
|
||||
("channels", &gst::IntRange::<i32>::new(1, i32::MAX)),
|
||||
],
|
||||
);
|
||||
// The src pad template must be named "src" for basesrc
|
||||
// and specific a pad that is always there
|
||||
let src_pad_template = gst::PadTemplate::new(
|
||||
"src",
|
||||
gst::PadDirection::Src,
|
||||
gst::PadPresence::Always,
|
||||
&caps,
|
||||
);
|
||||
klass.add_pad_template(src_pad_template);
|
||||
|
||||
// Install all our properties
|
||||
klass.install_properties(&PROPERTIES);
|
||||
}
|
||||
|
||||
fn process<F: Float + FromByteSlice>(
|
||||
data: &mut [u8],
|
||||
accumulator_ref: &mut f64,
|
||||
freq: u32,
|
||||
rate: u32,
|
||||
channels: u32,
|
||||
vol: f64,
|
||||
) {
|
||||
use std::f64::consts::PI;
|
||||
|
||||
// Reinterpret our byte-slice as a slice containing elements of the type
|
||||
// we're interested in. GStreamer requires for raw audio that the alignment
|
||||
// of memory is correct, so this will never ever fail unless there is an
|
||||
// actual bug elsewhere.
|
||||
let data = data.as_mut_slice_of::<F>().unwrap();
|
||||
|
||||
// Convert all our parameters to the target type for calculations
|
||||
let vol: F = NumCast::from(vol).unwrap();
|
||||
let freq = freq as f64;
|
||||
let rate = rate as f64;
|
||||
let two_pi = 2.0 * PI;
|
||||
|
||||
// We're carrying a accumulator with up to 2pi around instead of working
|
||||
// on the sample offset. High sample offsets cause too much inaccuracy when
|
||||
// converted to floating point numbers and then iterated over in 1-steps
|
||||
let mut accumulator = *accumulator_ref;
|
||||
let step = two_pi * freq / rate;
|
||||
|
||||
for chunk in data.chunks_mut(channels as usize) {
|
||||
let value = vol * F::sin(NumCast::from(accumulator).unwrap());
|
||||
for sample in chunk {
|
||||
*sample = value;
|
||||
}
|
||||
|
||||
accumulator += step;
|
||||
if accumulator >= two_pi {
|
||||
accumulator -= two_pi;
|
||||
}
|
||||
}
|
||||
|
||||
*accumulator_ref = accumulator;
|
||||
}
|
||||
}
|
||||
|
||||
// Virtual methods of GObject itself
|
||||
impl ObjectImpl<BaseSrc> for NdiSrc {
|
||||
// Called whenever a value of a property is changed. It can be called
|
||||
// at any time from any thread.
|
||||
fn set_property(&self, obj: &glib::Object, id: u32, value: &glib::Value) {
|
||||
let prop = &PROPERTIES[id as usize];
|
||||
let element = obj.clone().downcast::<BaseSrc>().unwrap();
|
||||
|
||||
match *prop {
|
||||
Property::UInt("samples-per-buffer", ..) => {
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
let samples_per_buffer = value.get().unwrap();
|
||||
gst_info!(
|
||||
self.cat,
|
||||
obj: &element,
|
||||
"Changing samples-per-buffer from {} to {}",
|
||||
settings.samples_per_buffer,
|
||||
samples_per_buffer
|
||||
);
|
||||
settings.samples_per_buffer = samples_per_buffer;
|
||||
drop(settings);
|
||||
|
||||
let _ =
|
||||
element.post_message(&gst::Message::new_latency().src(Some(&element)).build());
|
||||
}
|
||||
Property::UInt("freq", ..) => {
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
let freq = value.get().unwrap();
|
||||
gst_info!(
|
||||
self.cat,
|
||||
obj: &element,
|
||||
"Changing freq from {} to {}",
|
||||
settings.freq,
|
||||
freq
|
||||
);
|
||||
settings.freq = freq;
|
||||
}
|
||||
Property::Double("volume", ..) => {
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
let volume = value.get().unwrap();
|
||||
gst_info!(
|
||||
self.cat,
|
||||
obj: &element,
|
||||
"Changing volume from {} to {}",
|
||||
settings.volume,
|
||||
volume
|
||||
);
|
||||
settings.volume = volume;
|
||||
}
|
||||
Property::Boolean("mute", ..) => {
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
let mute = value.get().unwrap();
|
||||
gst_info!(
|
||||
self.cat,
|
||||
obj: &element,
|
||||
"Changing mute from {} to {}",
|
||||
settings.mute,
|
||||
mute
|
||||
);
|
||||
settings.mute = mute;
|
||||
}
|
||||
Property::Boolean("is-live", ..) => {
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
let is_live = value.get().unwrap();
|
||||
gst_info!(
|
||||
self.cat,
|
||||
obj: &element,
|
||||
"Changing is-live from {} to {}",
|
||||
settings.is_live,
|
||||
is_live
|
||||
);
|
||||
settings.is_live = is_live;
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
// Called whenever a value of a property is read. It can be called
|
||||
// at any time from any thread.
|
||||
fn get_property(&self, _obj: &glib::Object, id: u32) -> Result<glib::Value, ()> {
|
||||
let prop = &PROPERTIES[id as usize];
|
||||
|
||||
match *prop {
|
||||
Property::UInt("samples-per-buffer", ..) => {
|
||||
let settings = self.settings.lock().unwrap();
|
||||
Ok(settings.samples_per_buffer.to_value())
|
||||
}
|
||||
Property::UInt("freq", ..) => {
|
||||
let settings = self.settings.lock().unwrap();
|
||||
Ok(settings.freq.to_value())
|
||||
}
|
||||
Property::Double("volume", ..) => {
|
||||
let settings = self.settings.lock().unwrap();
|
||||
Ok(settings.volume.to_value())
|
||||
}
|
||||
Property::Boolean("mute", ..) => {
|
||||
let settings = self.settings.lock().unwrap();
|
||||
Ok(settings.mute.to_value())
|
||||
}
|
||||
Property::Boolean("is-live", ..) => {
|
||||
let settings = self.settings.lock().unwrap();
|
||||
Ok(settings.is_live.to_value())
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Virtual methods of gst::Element. We override none
|
||||
impl ElementImpl<BaseSrc> for NdiSrc {
|
||||
fn change_state(
|
||||
&self,
|
||||
element: &BaseSrc,
|
||||
transition: gst::StateChange,
|
||||
) -> gst::StateChangeReturn {
|
||||
// Configure live'ness once here just before starting the source
|
||||
match transition {
|
||||
gst::StateChange::ReadyToPaused => {
|
||||
element.set_live(self.settings.lock().unwrap().is_live);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
element.parent_change_state(transition)
|
||||
}
|
||||
}
|
||||
|
||||
// Virtual methods of gst_base::BaseSrc
|
||||
impl BaseSrcImpl<BaseSrc> for NdiSrc {
|
||||
// Called whenever the input/output caps are changing, i.e. in the very beginning before data
|
||||
// flow happens and whenever the situation in the pipeline is changing. All buffers after this
|
||||
// call have the caps given here.
|
||||
//
|
||||
// We simply remember the resulting AudioInfo from the caps to be able to use this for knowing
|
||||
// the sample rate, etc. when creating buffers
|
||||
fn set_caps(&self, element: &BaseSrc, caps: &gst::CapsRef) -> bool {
|
||||
use std::f64::consts::PI;
|
||||
|
||||
let info = match gst_audio::AudioInfo::from_caps(caps) {
|
||||
None => return false,
|
||||
Some(info) => info,
|
||||
};
|
||||
|
||||
gst_debug!(self.cat, obj: element, "Configuring for caps {}", caps);
|
||||
|
||||
element.set_blocksize(info.bpf() * (*self.settings.lock().unwrap()).samples_per_buffer);
|
||||
|
||||
let settings = *self.settings.lock().unwrap();
|
||||
let mut state = self.state.lock().unwrap();
|
||||
|
||||
// If we have no caps yet, any old sample_offset and sample_stop will be
|
||||
// in nanoseconds
|
||||
let old_rate = match state.info {
|
||||
Some(ref info) => info.rate() as u64,
|
||||
None => gst::SECOND_VAL,
|
||||
};
|
||||
|
||||
// Update sample offset and accumulator based on the previous values and the
|
||||
// sample rate change, if any
|
||||
let old_sample_offset = state.sample_offset;
|
||||
let sample_offset = old_sample_offset
|
||||
.mul_div_floor(info.rate() as u64, old_rate)
|
||||
.unwrap();
|
||||
|
||||
let old_sample_stop = state.sample_stop;
|
||||
let sample_stop =
|
||||
old_sample_stop.map(|v| v.mul_div_floor(info.rate() as u64, old_rate).unwrap());
|
||||
|
||||
let accumulator =
|
||||
(sample_offset as f64).rem(2.0 * PI * (settings.freq as f64) / (info.rate() as f64));
|
||||
|
||||
*state = State {
|
||||
info: Some(info),
|
||||
sample_offset: sample_offset,
|
||||
sample_stop: sample_stop,
|
||||
accumulator: accumulator,
|
||||
};
|
||||
|
||||
drop(state);
|
||||
|
||||
let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
// Called when starting, so we can initialize all stream-related state to its defaults
|
||||
fn start(&self, element: &BaseSrc) -> bool {
|
||||
// Reset state
|
||||
*self.state.lock().unwrap() = Default::default();
|
||||
self.unlock_stop(element);
|
||||
|
||||
gst_info!(self.cat, obj: element, "Started");
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
// Called when shutting down the element so we can release all stream-related state
|
||||
fn stop(&self, element: &BaseSrc) -> bool {
|
||||
// Reset state
|
||||
*self.state.lock().unwrap() = Default::default();
|
||||
self.unlock(element);
|
||||
|
||||
gst_info!(self.cat, obj: element, "Stopped");
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn query(&self, element: &BaseSrc, query: &mut gst::QueryRef) -> bool {
|
||||
use gst::QueryView;
|
||||
|
||||
match query.view_mut() {
|
||||
// We only work in Push mode. In Pull mode, create() could be called with
|
||||
// arbitrary offsets and we would have to produce for that specific offset
|
||||
QueryView::Scheduling(ref mut q) => {
|
||||
q.set(gst::SchedulingFlags::SEQUENTIAL, 1, -1, 0);
|
||||
q.add_scheduling_modes(&[gst::PadMode::Push]);
|
||||
return true;
|
||||
}
|
||||
// In Live mode we will have a latency equal to the number of samples in each buffer.
|
||||
// We can't output samples before they were produced, and the last sample of a buffer
|
||||
// is produced that much after the beginning, leading to this latency calculation
|
||||
QueryView::Latency(ref mut q) => {
|
||||
let settings = *self.settings.lock().unwrap();
|
||||
let state = self.state.lock().unwrap();
|
||||
|
||||
if let Some(ref info) = state.info {
|
||||
let latency = gst::SECOND
|
||||
.mul_div_floor(settings.samples_per_buffer as u64, info.rate() as u64)
|
||||
.unwrap();
|
||||
gst_debug!(self.cat, obj: element, "Returning latency {}", latency);
|
||||
q.set(settings.is_live, latency, gst::CLOCK_TIME_NONE);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
BaseSrcBase::parent_query(element, query)
|
||||
}
|
||||
|
||||
// Creates the audio buffers
|
||||
fn create(
|
||||
&self,
|
||||
element: &BaseSrc,
|
||||
_offset: u64,
|
||||
_length: u32,
|
||||
) -> Result<gst::Buffer, gst::FlowReturn> {
|
||||
// Keep a local copy of the values of all our properties at this very moment. This
|
||||
// ensures that the mutex is never locked for long and the application wouldn't
|
||||
// have to block until this function returns when getting/setting property values
|
||||
let settings = *self.settings.lock().unwrap();
|
||||
|
||||
// Get a locked reference to our state, i.e. the input and output AudioInfo
|
||||
let mut state = self.state.lock().unwrap();
|
||||
let info = match state.info {
|
||||
None => {
|
||||
gst_element_error!(element, gst::CoreError::Negotiation, ["Have no caps yet"]);
|
||||
return Err(gst::FlowReturn::NotNegotiated);
|
||||
}
|
||||
Some(ref info) => info.clone(),
|
||||
};
|
||||
|
||||
// If a stop position is set (from a seek), only produce samples up to that
|
||||
// point but at most samples_per_buffer samples per buffer
|
||||
let n_samples = if let Some(sample_stop) = state.sample_stop {
|
||||
if sample_stop <= state.sample_offset {
|
||||
gst_log!(self.cat, obj: element, "At EOS");
|
||||
return Err(gst::FlowReturn::Eos);
|
||||
}
|
||||
|
||||
sample_stop - state.sample_offset
|
||||
} else {
|
||||
settings.samples_per_buffer as u64
|
||||
};
|
||||
|
||||
// Allocate a new buffer of the required size, update the metadata with the
|
||||
// current timestamp and duration and then fill it according to the current
|
||||
// caps
|
||||
let mut buffer =
|
||||
gst::Buffer::with_size((n_samples as usize) * (info.bpf() as usize)).unwrap();
|
||||
{
|
||||
let buffer = buffer.get_mut().unwrap();
|
||||
|
||||
// Calculate the current timestamp (PTS) and the next one,
|
||||
// and calculate the duration from the difference instead of
|
||||
// simply the number of samples to prevent rounding errors
|
||||
let pts = state
|
||||
.sample_offset
|
||||
.mul_div_floor(gst::SECOND_VAL, info.rate() as u64)
|
||||
.unwrap()
|
||||
.into();
|
||||
let next_pts: gst::ClockTime = (state.sample_offset + n_samples)
|
||||
.mul_div_floor(gst::SECOND_VAL, info.rate() as u64)
|
||||
.unwrap()
|
||||
.into();
|
||||
buffer.set_pts(pts);
|
||||
buffer.set_duration(next_pts - pts);
|
||||
|
||||
// Map the buffer writable and create the actual samples
|
||||
let mut map = buffer.map_writable().unwrap();
|
||||
let data = map.as_mut_slice();
|
||||
|
||||
if info.format() == gst_audio::AUDIO_FORMAT_F32 {
|
||||
Self::process::<f32>(
|
||||
data,
|
||||
&mut state.accumulator,
|
||||
settings.freq,
|
||||
info.rate(),
|
||||
info.channels(),
|
||||
settings.volume,
|
||||
);
|
||||
} else {
|
||||
Self::process::<f64>(
|
||||
data,
|
||||
&mut state.accumulator,
|
||||
settings.freq,
|
||||
info.rate(),
|
||||
info.channels(),
|
||||
settings.volume,
|
||||
);
|
||||
}
|
||||
}
|
||||
state.sample_offset += n_samples;
|
||||
drop(state);
|
||||
|
||||
// If we're live, we are waiting until the time of the last sample in our buffer has
|
||||
// arrived. This is the very reason why we have to report that much latency.
|
||||
// A real live-source would of course only allow us to have the data available after
|
||||
// that latency, e.g. when capturing from a microphone, and no waiting from our side
|
||||
// would be necessary..
|
||||
//
|
||||
// Waiting happens based on the pipeline clock, which means that a real live source
|
||||
// with its own clock would require various translations between the two clocks.
|
||||
// This is out of scope for the tutorial though.
|
||||
if element.is_live() {
|
||||
let clock = match element.get_clock() {
|
||||
None => return Ok(buffer),
|
||||
Some(clock) => clock,
|
||||
};
|
||||
|
||||
let segment = element
|
||||
.get_segment()
|
||||
.downcast::<gst::format::Time>()
|
||||
.unwrap();
|
||||
let base_time = element.get_base_time();
|
||||
let running_time = segment.to_running_time(buffer.get_pts() + buffer.get_duration());
|
||||
|
||||
// The last sample's clock time is the base time of the element plus the
|
||||
// running time of the last sample
|
||||
let wait_until = running_time + base_time;
|
||||
if wait_until.is_none() {
|
||||
return Ok(buffer);
|
||||
}
|
||||
|
||||
// Store the clock ID in our struct unless we're flushing anyway.
|
||||
// This allows to asynchronously cancel the waiting from unlock()
|
||||
// so that we immediately stop waiting on e.g. shutdown.
|
||||
let mut clock_wait = self.clock_wait.lock().unwrap();
|
||||
if clock_wait.flushing {
|
||||
gst_debug!(self.cat, obj: element, "Flushing");
|
||||
return Err(gst::FlowReturn::Flushing);
|
||||
}
|
||||
|
||||
let id = clock.new_single_shot_id(wait_until).unwrap();
|
||||
clock_wait.clock_id = Some(id.clone());
|
||||
drop(clock_wait);
|
||||
|
||||
gst_log!(
|
||||
self.cat,
|
||||
obj: element,
|
||||
"Waiting until {}, now {}",
|
||||
wait_until,
|
||||
clock.get_time()
|
||||
);
|
||||
let (res, jitter) = id.wait();
|
||||
gst_log!(
|
||||
self.cat,
|
||||
obj: element,
|
||||
"Waited res {:?} jitter {}",
|
||||
res,
|
||||
jitter
|
||||
);
|
||||
self.clock_wait.lock().unwrap().clock_id.take();
|
||||
|
||||
// If the clock ID was unscheduled, unlock() was called
|
||||
// and we should return Flushing immediately.
|
||||
if res == gst::ClockReturn::Unscheduled {
|
||||
gst_debug!(self.cat, obj: element, "Flushing");
|
||||
return Err(gst::FlowReturn::Flushing);
|
||||
}
|
||||
}
|
||||
|
||||
gst_debug!(self.cat, obj: element, "Produced buffer {:?}", buffer);
|
||||
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
fn fixate(&self, element: &BaseSrc, caps: gst::Caps) -> gst::Caps {
|
||||
// Fixate the caps. BaseSrc will do some fixation for us, but
|
||||
// as we allow any rate between 1 and MAX it would fixate to 1. 1Hz
|
||||
// is generally not a useful sample rate.
|
||||
//
|
||||
// We fixate to the closest integer value to 48kHz that is possible
|
||||
// here, and for good measure also decide that the closest value to 1
|
||||
// channel is good.
|
||||
let mut caps = gst::Caps::truncate(caps);
|
||||
{
|
||||
let caps = caps.make_mut();
|
||||
let s = caps.get_mut_structure(0).unwrap();
|
||||
s.fixate_field_nearest_int("rate", 48_000);
|
||||
s.fixate_field_nearest_int("channels", 1);
|
||||
}
|
||||
|
||||
// Let BaseSrc fixate anything else for us. We could've alternatively have
|
||||
// called Caps::fixate() here
|
||||
element.parent_fixate(caps)
|
||||
}
|
||||
|
||||
fn is_seekable(&self, _element: &BaseSrc) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn do_seek(&self, element: &BaseSrc, segment: &mut gst::Segment) -> bool {
|
||||
// Handle seeking here. For Time and Default (sample offset) seeks we can
|
||||
// do something and have to update our sample offset and accumulator accordingly.
|
||||
//
|
||||
// Also we should remember the stop time (so we can stop at that point), and if
|
||||
// reverse playback is requested. These values will all be used during buffer creation
|
||||
// and for calculating the timestamps, etc.
|
||||
|
||||
if segment.get_rate() < 0.0 {
|
||||
gst_error!(self.cat, obj: element, "Reverse playback not supported");
|
||||
return false;
|
||||
}
|
||||
|
||||
let settings = *self.settings.lock().unwrap();
|
||||
let mut state = self.state.lock().unwrap();
|
||||
|
||||
// We store sample_offset and sample_stop in nanoseconds if we
|
||||
// don't know any sample rate yet. It will be converted correctly
|
||||
// once a sample rate is known.
|
||||
let rate = match state.info {
|
||||
None => gst::SECOND_VAL,
|
||||
Some(ref info) => info.rate() as u64,
|
||||
};
|
||||
|
||||
if let Some(segment) = segment.downcast_ref::<gst::format::Time>() {
|
||||
use std::f64::consts::PI;
|
||||
|
||||
let sample_offset = segment
|
||||
.get_start()
|
||||
.unwrap()
|
||||
.mul_div_floor(rate, gst::SECOND_VAL)
|
||||
.unwrap();
|
||||
|
||||
let sample_stop = segment
|
||||
.get_stop()
|
||||
.map(|v| v.mul_div_floor(rate, gst::SECOND_VAL).unwrap());
|
||||
|
||||
let accumulator =
|
||||
(sample_offset as f64).rem(2.0 * PI * (settings.freq as f64) / (rate as f64));
|
||||
|
||||
gst_debug!(
|
||||
self.cat,
|
||||
obj: element,
|
||||
"Seeked to {}-{:?} (accum: {}) for segment {:?}",
|
||||
sample_offset,
|
||||
sample_stop,
|
||||
accumulator,
|
||||
segment
|
||||
);
|
||||
|
||||
*state = State {
|
||||
info: state.info.clone(),
|
||||
sample_offset: sample_offset,
|
||||
sample_stop: sample_stop,
|
||||
accumulator: accumulator,
|
||||
};
|
||||
|
||||
true
|
||||
} else if let Some(segment) = segment.downcast_ref::<gst::format::Default>() {
|
||||
use std::f64::consts::PI;
|
||||
|
||||
if state.info.is_none() {
|
||||
gst_error!(
|
||||
self.cat,
|
||||
obj: element,
|
||||
"Can only seek in Default format if sample rate is known"
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
let sample_offset = segment.get_start().unwrap();
|
||||
let sample_stop = segment.get_stop().0;
|
||||
|
||||
let accumulator =
|
||||
(sample_offset as f64).rem(2.0 * PI * (settings.freq as f64) / (rate as f64));
|
||||
|
||||
gst_debug!(
|
||||
self.cat,
|
||||
obj: element,
|
||||
"Seeked to {}-{:?} (accum: {}) for segment {:?}",
|
||||
sample_offset,
|
||||
sample_stop,
|
||||
accumulator,
|
||||
segment
|
||||
);
|
||||
|
||||
*state = State {
|
||||
info: state.info.clone(),
|
||||
sample_offset: sample_offset,
|
||||
sample_stop: sample_stop,
|
||||
accumulator: accumulator,
|
||||
};
|
||||
|
||||
true
|
||||
} else {
|
||||
gst_error!(
|
||||
self.cat,
|
||||
obj: element,
|
||||
"Can't seek in format {:?}",
|
||||
segment.get_format()
|
||||
);
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn unlock(&self, element: &BaseSrc) -> bool {
|
||||
// This should unblock the create() function ASAP, so we
|
||||
// just unschedule the clock it here, if any.
|
||||
gst_debug!(self.cat, obj: element, "Unlocking");
|
||||
let mut clock_wait = self.clock_wait.lock().unwrap();
|
||||
if let Some(clock_id) = clock_wait.clock_id.take() {
|
||||
clock_id.unschedule();
|
||||
}
|
||||
clock_wait.flushing = true;
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn unlock_stop(&self, element: &BaseSrc) -> bool {
|
||||
// This signals that unlocking is done, so we can reset
|
||||
// all values again.
|
||||
gst_debug!(self.cat, obj: element, "Unlock stop");
|
||||
let mut clock_wait = self.clock_wait.lock().unwrap();
|
||||
clock_wait.flushing = false;
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
// This zero-sized struct is containing the static metadata of our element. It is only necessary to
|
||||
// be able to implement traits on it, but e.g. a plugin that registers multiple elements with the
|
||||
// same code would use this struct to store information about the concrete element. An example of
|
||||
// this would be a plugin that wraps around a library that has multiple decoders with the same API,
|
||||
// but wants (as it should) a separate element registered for each decoder.
|
||||
struct NdiSrcStatic;
|
||||
|
||||
// The basic trait for registering the type: This returns a name for the type and registers the
|
||||
// instance and class initializations functions with the type system, thus hooking everything
|
||||
// together.
|
||||
impl ImplTypeStatic<BaseSrc> for NdiSrcStatic {
|
||||
fn get_name(&self) -> &str {
|
||||
"NdiSrc"
|
||||
}
|
||||
|
||||
fn new(&self, element: &BaseSrc) -> Box<BaseSrcImpl<BaseSrc>> {
|
||||
NdiSrc::new(element)
|
||||
}
|
||||
|
||||
fn class_init(&self, klass: &mut BaseSrcClass) {
|
||||
NdiSrc::class_init(klass);
|
||||
}
|
||||
}
|
||||
|
||||
// Registers the type for our element, and then registers in GStreamer under
|
||||
// the name "ndisrc" for being able to instantiate it via e.g.
|
||||
// gst::ElementFactory::make().
|
||||
pub fn register(plugin: &gst::Plugin) {
|
||||
let type_ = register_type(NdiSrcStatic);
|
||||
gst::Element::register(plugin, "ndisrc", 0, type_);
|
||||
}
|
508
gst-plugin-ndi/src/ndivideosrc.rs
Normal file
508
gst-plugin-ndi/src/ndivideosrc.rs
Normal file
|
@ -0,0 +1,508 @@
|
|||
#![allow(non_camel_case_types, non_upper_case_globals, non_snake_case)]
|
||||
|
||||
use glib;
|
||||
use gst;
|
||||
use gst::prelude::*;
|
||||
use gst_video;
|
||||
use gst_base::prelude::*;
|
||||
use gst::Fraction;
|
||||
|
||||
use gst_plugin::base_src::*;
|
||||
use gst_plugin::element::*;
|
||||
use gobject_subclass::object::*;
|
||||
|
||||
use std::sync::Mutex;
|
||||
use std::{i32, u32};
|
||||
|
||||
use std::ptr;
|
||||
|
||||
use ndilib::*;
|
||||
use connect_ndi;
|
||||
use stop_ndi;
|
||||
use ndi_struct;
|
||||
|
||||
use hashmap_receivers;
|
||||
|
||||
// Property value storage
|
||||
#[derive(Debug, Clone)]
|
||||
struct Settings {
|
||||
stream_name: String,
|
||||
ip: String,
|
||||
id_receiver: i8,
|
||||
latency: u64,
|
||||
}
|
||||
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Settings {
|
||||
stream_name: String::from("Fixed ndi stream name"),
|
||||
ip: String::from(""),
|
||||
id_receiver: 0,
|
||||
latency: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Metadata for the properties
|
||||
static PROPERTIES: [Property; 2] = [
|
||||
Property::String(
|
||||
"stream-name",
|
||||
"Sream Name",
|
||||
"Name of the streaming device",
|
||||
None,
|
||||
PropertyMutability::ReadWrite,
|
||||
),
|
||||
Property::String(
|
||||
"ip",
|
||||
"Stream IP",
|
||||
"Stream IP",
|
||||
None,
|
||||
PropertyMutability::ReadWrite,
|
||||
),
|
||||
];
|
||||
|
||||
// Stream-specific state, i.e. audio format configuration
|
||||
// and sample offset
|
||||
struct State {
|
||||
info: Option<gst_video::VideoInfo>,
|
||||
}
|
||||
|
||||
impl Default for State {
|
||||
fn default() -> State {
|
||||
State {
|
||||
info: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct TimestampData{
|
||||
offset: u64,
|
||||
}
|
||||
|
||||
// Struct containing all the element data
|
||||
struct NdiVideoSrc {
|
||||
cat: gst::DebugCategory,
|
||||
settings: Mutex<Settings>,
|
||||
state: Mutex<State>,
|
||||
timestamp_data: Mutex<TimestampData>,
|
||||
}
|
||||
|
||||
impl NdiVideoSrc {
|
||||
// Called when a new instance is to be created
|
||||
fn new(element: &BaseSrc) -> Box<BaseSrcImpl<BaseSrc>> {
|
||||
// Initialize live-ness and notify the base class that
|
||||
// we'd like to operate in Time format
|
||||
element.set_live(true);
|
||||
element.set_format(gst::Format::Time);
|
||||
|
||||
Box::new(Self {
|
||||
cat: gst::DebugCategory::new(
|
||||
"ndivideosrc",
|
||||
gst::DebugColorFlags::empty(),
|
||||
"NewTek NDI Video Source",
|
||||
),
|
||||
settings: Mutex::new(Default::default()),
|
||||
state: Mutex::new(Default::default()),
|
||||
timestamp_data: Mutex::new(TimestampData{
|
||||
offset: 0,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
// Called exactly once when registering the type. Used for
|
||||
// setting up metadata for all instances, e.g. the name and
|
||||
// classification and the pad templates with their caps.
|
||||
//
|
||||
// Actual instances can create pads based on those pad templates
|
||||
// with a subset of the caps given here. In case of basesrc,
|
||||
// a "src" and "sink" pad template are required here and the base class
|
||||
// will automatically instantiate pads for them.
|
||||
//
|
||||
// Our element here can output f32 and f64
|
||||
fn class_init(klass: &mut BaseSrcClass) {
|
||||
klass.set_metadata(
|
||||
"NewTek NDI Video Source",
|
||||
"Source",
|
||||
"NewTek NDI video source",
|
||||
"Ruben Gonzalez <rubenrua@teltek.es>, Daniel Vilar <daniel.peiteado@teltek.es>",
|
||||
);
|
||||
|
||||
// On the src pad, we can produce F32/F64 with any sample rate
|
||||
// and any number of channels
|
||||
let caps = gst::Caps::new_simple(
|
||||
"video/x-raw",
|
||||
&[
|
||||
(
|
||||
"format",
|
||||
&gst::List::new(&[
|
||||
//TODO add all formats?
|
||||
&gst_video::VideoFormat::Uyvy.to_string(),
|
||||
//&gst_video::VideoFormat::Rgb.to_string(),
|
||||
//&gst_video::VideoFormat::Gray8.to_string(),
|
||||
]),
|
||||
),
|
||||
("width", &gst::IntRange::<i32>::new(0, i32::MAX)),
|
||||
("height", &gst::IntRange::<i32>::new(0, i32::MAX)),
|
||||
(
|
||||
"framerate",
|
||||
&gst::FractionRange::new(
|
||||
gst::Fraction::new(0, 1),
|
||||
gst::Fraction::new(i32::MAX, 1),
|
||||
),
|
||||
),
|
||||
],
|
||||
);
|
||||
// The src pad template must be named "src" for basesrc
|
||||
// and specific a pad that is always there
|
||||
let src_pad_template = gst::PadTemplate::new(
|
||||
"src",
|
||||
gst::PadDirection::Src,
|
||||
gst::PadPresence::Always,
|
||||
&caps,
|
||||
//&gst::Caps::new_any(),
|
||||
);
|
||||
klass.add_pad_template(src_pad_template);
|
||||
|
||||
// Install all our properties
|
||||
klass.install_properties(&PROPERTIES);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Virtual methods of GObject itself
|
||||
impl ObjectImpl<BaseSrc> for NdiVideoSrc {
|
||||
// Called whenever a value of a property is changed. It can be called
|
||||
// at any time from any thread.
|
||||
fn set_property(&self, obj: &glib::Object, id: u32, value: &glib::Value) {
|
||||
let prop = &PROPERTIES[id as usize];
|
||||
let element = obj.clone().downcast::<BaseSrc>().unwrap();
|
||||
|
||||
match *prop {
|
||||
Property::String("stream-name", ..) => {
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
let stream_name = value.get().unwrap();
|
||||
gst_debug!(
|
||||
self.cat,
|
||||
obj: &element,
|
||||
"Changing stream-name from {} to {}",
|
||||
settings.stream_name,
|
||||
stream_name
|
||||
);
|
||||
settings.stream_name = stream_name;
|
||||
drop(settings);
|
||||
|
||||
// let _ =
|
||||
// element.post_message(&gst::Message::new_latency().src(Some(&element)).build());
|
||||
},
|
||||
Property::String("ip", ..) => {
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
let ip = value.get().unwrap();
|
||||
gst_debug!(
|
||||
self.cat,
|
||||
obj: &element,
|
||||
"Changing ip from {} to {}",
|
||||
settings.ip,
|
||||
ip
|
||||
);
|
||||
settings.ip = ip;
|
||||
drop(settings);
|
||||
|
||||
// let _ =
|
||||
// element.post_message(&gst::Message::new_latency().src(Some(&element)).build());
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
// Called whenever a value of a property is read. It can be called
|
||||
// at any time from any thread.
|
||||
fn get_property(&self, _obj: &glib::Object, id: u32) -> Result<glib::Value, ()> {
|
||||
let prop = &PROPERTIES[id as usize];
|
||||
|
||||
match *prop {
|
||||
Property::String("stream-name", ..) => {
|
||||
let settings = self.settings.lock().unwrap();
|
||||
Ok(settings.stream_name.to_value())
|
||||
},
|
||||
Property::String("ip", ..) => {
|
||||
let settings = self.settings.lock().unwrap();
|
||||
Ok(settings.ip.to_value())
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Virtual methods of gst::Element. We override none
|
||||
impl ElementImpl<BaseSrc> for NdiVideoSrc {
|
||||
fn change_state(&self, element: &BaseSrc, transition: gst::StateChange) -> gst::StateChangeReturn {
|
||||
if transition == gst::StateChange::PausedToPlaying{
|
||||
let receivers = hashmap_receivers.lock().unwrap();
|
||||
let settings = self.settings.lock().unwrap();
|
||||
|
||||
let receiver = receivers.get(&settings.id_receiver).unwrap();
|
||||
let recv = &receiver.ndi_instance;
|
||||
let pNDI_recv = recv.recv;
|
||||
|
||||
let video_frame: NDIlib_video_frame_v2_t = Default::default();
|
||||
|
||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
||||
unsafe{
|
||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_video{
|
||||
frame_type = NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000);
|
||||
}
|
||||
|
||||
if ndi_struct.initial_timestamp <= video_frame.timestamp as u64 || ndi_struct.initial_timestamp == 0{
|
||||
ndi_struct.initial_timestamp = video_frame.timestamp as u64;
|
||||
}
|
||||
}
|
||||
}
|
||||
element.parent_change_state(transition)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Virtual methods of gst_base::BaseSrc
|
||||
impl BaseSrcImpl<BaseSrc> for NdiVideoSrc {
|
||||
// Called whenever the input/output caps are changing, i.e. in the very beginning before data
|
||||
// flow happens and whenever the situation in the pipeline is changing. All buffers after this
|
||||
// call have the caps given here.
|
||||
//
|
||||
// We simply remember the resulting AudioInfo from the caps to be able to use this for knowing
|
||||
// the sample rate, etc. when creating buffers
|
||||
fn set_caps(&self, element: &BaseSrc, caps: &gst::CapsRef) -> bool {
|
||||
let info = match gst_video::VideoInfo::from_caps(caps) {
|
||||
None => return false,
|
||||
Some(info) => info,
|
||||
};
|
||||
gst_debug!(self.cat, obj: element, "Configuring for caps {}", caps);
|
||||
|
||||
// TODO Puede que falle si no creamos la estructura de cero, pero si lo hacemos no podemos poner recv a none
|
||||
let mut state = self.state.lock().unwrap();
|
||||
state.info = Some(info);
|
||||
let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
||||
true
|
||||
}
|
||||
|
||||
// Called when starting, so we can initialize all stream-related state to its defaults
|
||||
fn start(&self, element: &BaseSrc) -> bool {
|
||||
// Reset state
|
||||
*self.state.lock().unwrap() = Default::default();
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
settings.id_receiver = connect_ndi(self.cat, element, settings.ip.clone(), settings.stream_name.clone());
|
||||
|
||||
if settings.id_receiver == 0{
|
||||
return false;
|
||||
}
|
||||
else{
|
||||
// let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Called when shutting down the element so we can release all stream-related state
|
||||
fn stop(&self, element: &BaseSrc) -> bool {
|
||||
// Reset state
|
||||
*self.state.lock().unwrap() = Default::default();
|
||||
|
||||
let settings = self.settings.lock().unwrap();
|
||||
stop_ndi(self.cat, element, settings.id_receiver.clone());
|
||||
// Commented because when adding ndi destroy stopped in this line
|
||||
//*self.state.lock().unwrap() = Default::default();
|
||||
true
|
||||
}
|
||||
|
||||
|
||||
fn query(&self, element: &BaseSrc, query: &mut gst::QueryRef) -> bool {
|
||||
use gst::QueryView;
|
||||
match query.view_mut() {
|
||||
// We only work in Push mode. In Pull mode, create() could be called with
|
||||
// arbitrary offsets and we would have to produce for that specific offset
|
||||
QueryView::Scheduling(ref mut q) => {
|
||||
q.set(gst::SchedulingFlags::SEQUENTIAL, 1, -1, 0);
|
||||
q.add_scheduling_modes(&[gst::PadMode::Push]);
|
||||
return true;
|
||||
}
|
||||
// In Live mode we will have a latency equal to the number of samples in each buffer.
|
||||
// We can't output samples before they were produced, and the last sample of a buffer
|
||||
// is produced that much after the beginning, leading to this latency calculation
|
||||
// QueryView::Latency(ref mut q) => {
|
||||
// let settings = self.settings.lock().unwrap();
|
||||
// let state = self.state.lock().unwrap();
|
||||
// println!("Dentro de query");
|
||||
//
|
||||
// if let Some(ref _info) = state.info {
|
||||
// // let latency = gst::SECOND
|
||||
// // .mul_div_floor(settings.samples_per_buffer as u64, info.rate() as u64)
|
||||
// // .unwrap();
|
||||
// let latency = gst::SECOND.mul_div_floor(3 as u64, 2 as u64).unwrap();
|
||||
// let mut latency = gst::SECOND.mul_div_floor(settings.latency, 1000).unwrap();
|
||||
// // if settings.latency > 2000{
|
||||
// // println!("{:?}", element.get_name());
|
||||
// // latency = gst::SECOND * 0;
|
||||
// // }
|
||||
// let latency = gst::SECOND * 0;
|
||||
// // .mul_div_floor(1 as u64, 30 as u64)
|
||||
// // .unwrap();
|
||||
// // gst_debug!(self.cat, obj: element, "Returning latency {}", latency);
|
||||
// let max = gst::SECOND * 120 * 1843200;
|
||||
// // println!("{:?}", latency2);
|
||||
// println!("{:?}", latency);
|
||||
// println!("{:?}", (settings.latency / 1000));
|
||||
// // println!("{:?}",max);
|
||||
// q.set(true, latency, max);
|
||||
// return true;
|
||||
// } else {
|
||||
// return false;
|
||||
// }
|
||||
// }
|
||||
_ => (),
|
||||
}
|
||||
BaseSrcBase::parent_query(element, query)
|
||||
}
|
||||
|
||||
fn fixate(&self, element: &BaseSrc, caps: gst::Caps) -> gst::Caps {
|
||||
//We need to set the correct caps resolution and framerate
|
||||
let receivers = hashmap_receivers.lock().unwrap();
|
||||
let settings = self.settings.lock().unwrap();
|
||||
|
||||
let receiver = receivers.get(&settings.id_receiver).unwrap();
|
||||
let recv = &receiver.ndi_instance;
|
||||
let pNDI_recv = recv.recv;
|
||||
|
||||
let video_frame: NDIlib_video_frame_v2_t = Default::default();
|
||||
|
||||
let mut frame_type: NDIlib_frame_type_e = NDIlib_frame_type_e::NDIlib_frame_type_none;
|
||||
while frame_type != NDIlib_frame_type_e::NDIlib_frame_type_video{
|
||||
unsafe{
|
||||
frame_type = NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000);
|
||||
}
|
||||
}
|
||||
|
||||
let mut caps = gst::Caps::truncate(caps);
|
||||
{
|
||||
let caps = caps.make_mut();
|
||||
let s = caps.get_mut_structure(0).unwrap();
|
||||
s.fixate_field_nearest_int("width", video_frame.xres);
|
||||
s.fixate_field_nearest_int("height", video_frame.yres);
|
||||
s.fixate_field_nearest_fraction("framerate", Fraction::new(video_frame.frame_rate_N, video_frame.frame_rate_D));
|
||||
}
|
||||
|
||||
// Let BaseSrc fixate anything else for us. We could've alternatively have
|
||||
// called Caps::fixate() here
|
||||
|
||||
let _ = element.post_message(&gst::Message::new_latency().src(Some(element)).build());
|
||||
element.parent_fixate(caps)
|
||||
}
|
||||
|
||||
//Creates the video buffers
|
||||
fn create(
|
||||
&self,
|
||||
element: &BaseSrc,
|
||||
_offset: u64,
|
||||
_length: u32,
|
||||
) -> Result<gst::Buffer, gst::FlowReturn> {
|
||||
// Keep a local copy of the values of all our properties at this very moment. This
|
||||
// ensures that the mutex is never locked for long and the application wouldn't
|
||||
// have to block until this function returns when getting/setting property values
|
||||
let _settings = &*self.settings.lock().unwrap();
|
||||
|
||||
let mut timestamp_data = self.timestamp_data.lock().unwrap();
|
||||
// Get a locked reference to our state, i.e. the input and output AudioInfo
|
||||
let state = self.state.lock().unwrap();
|
||||
let _info = match state.info {
|
||||
None => {
|
||||
gst_element_error!(element, gst::CoreError::Negotiation, ["Have no caps yet"]);
|
||||
return Err(gst::FlowReturn::NotNegotiated);
|
||||
}
|
||||
Some(ref info) => info.clone(),
|
||||
};
|
||||
// unsafe{
|
||||
let receivers = hashmap_receivers.lock().unwrap();
|
||||
|
||||
let recv = &receivers.get(&_settings.id_receiver).unwrap().ndi_instance;
|
||||
let pNDI_recv = recv.recv;
|
||||
|
||||
let pts: u64;
|
||||
let video_frame: NDIlib_video_frame_v2_t = Default::default();
|
||||
|
||||
unsafe{
|
||||
let time = ndi_struct.initial_timestamp;
|
||||
|
||||
let mut skip_frame = true;
|
||||
while skip_frame {
|
||||
let frame_type = NDIlib_recv_capture_v2(pNDI_recv, &video_frame, ptr::null(), ptr::null(), 1000,);
|
||||
if frame_type == NDIlib_frame_type_e::NDIlib_frame_type_none || frame_type == NDIlib_frame_type_e::NDIlib_frame_type_error {
|
||||
gst_element_error!(element, gst::ResourceError::Read, ["NDI frame type none received, assuming that the source closed the stream...."]);
|
||||
return Err(gst::FlowReturn::CustomError);
|
||||
}
|
||||
if time >= (video_frame.timestamp as u64){
|
||||
gst_debug!(self.cat, obj: element, "Frame timestamp ({:?}) is lower than received in the first frame from NDI ({:?}), so skiping...", (video_frame.timestamp as u64), time);
|
||||
}
|
||||
else{
|
||||
skip_frame = false;
|
||||
}
|
||||
}
|
||||
|
||||
pts = video_frame.timestamp as u64 - time;
|
||||
|
||||
let buff_size = (video_frame.yres * video_frame.line_stride_in_bytes) as usize;
|
||||
let mut buffer = gst::Buffer::with_size(buff_size).unwrap();
|
||||
{
|
||||
let vec = Vec::from_raw_parts(video_frame.p_data as *mut u8, buff_size, buff_size);
|
||||
let pts: gst::ClockTime = (pts * 100).into();
|
||||
|
||||
let duration: gst::ClockTime = (((video_frame.frame_rate_D as f64 / video_frame.frame_rate_N as f64) * 1000000000.0) as u64).into();
|
||||
let buffer = buffer.get_mut().unwrap();
|
||||
|
||||
if ndi_struct.start_pts == gst::ClockTime(Some(0)){
|
||||
ndi_struct.start_pts = element.get_clock().unwrap().get_time() - element.get_base_time();
|
||||
}
|
||||
|
||||
buffer.set_pts(pts + ndi_struct.start_pts);
|
||||
buffer.set_duration(duration);
|
||||
buffer.set_offset(timestamp_data.offset);
|
||||
buffer.set_offset_end(timestamp_data.offset + 1);
|
||||
timestamp_data.offset = timestamp_data.offset + 1;
|
||||
buffer.copy_from_slice(0, &vec).unwrap();
|
||||
}
|
||||
|
||||
gst_debug!(self.cat, obj: element, "Produced buffer {:?}", buffer);
|
||||
|
||||
Ok(buffer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This zero-sized struct is containing the static metadata of our element. It is only necessary to
|
||||
// be able to implement traits on it, but e.g. a plugin that registers multiple elements with the
|
||||
// same code would use this struct to store information about the concrete element. An example of
|
||||
// this would be a plugin that wraps around a library that has multiple decoders with the same API,
|
||||
// but wants (as it should) a separate element registered for each decoder.
|
||||
struct NdiVideoSrcStatic;
|
||||
|
||||
// The basic trait for registering the type: This returns a name for the type and registers the
|
||||
// instance and class initializations functions with the type system, thus hooking everything
|
||||
// together.
|
||||
impl ImplTypeStatic<BaseSrc> for NdiVideoSrcStatic {
|
||||
fn get_name(&self) -> &str {
|
||||
"NdiVideoSrc"
|
||||
}
|
||||
|
||||
fn new(&self, element: &BaseSrc) -> Box<BaseSrcImpl<BaseSrc>> {
|
||||
NdiVideoSrc::new(element)
|
||||
}
|
||||
|
||||
fn class_init(&self, klass: &mut BaseSrcClass) {
|
||||
NdiVideoSrc::class_init(klass);
|
||||
}
|
||||
}
|
||||
|
||||
// Registers the type for our element, and then registers in GStreamer under
|
||||
// the name NdiVideoSrc for being able to instantiate it via e.g.
|
||||
// gst::ElementFactory::make().
|
||||
pub fn register(plugin: &gst::Plugin) {
|
||||
let type_ = register_type(NdiVideoSrcStatic);
|
||||
gst::Element::register(plugin, "ndivideosrc", 0, type_);
|
||||
}
|
Loading…
Reference in a new issue